gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.program.model.data; import static org.junit.Assert.*; import org.junit.Test; import generic.test.AbstractGenericTest; import ghidra.docking.settings.*; import ghidra.program.model.address.Address; import ghidra.program.model.mem.ByteMemBufferImpl; import ghidra.program.model.mem.MemBuffer; public class IntegerDataTypeTest extends AbstractGenericTest { private static byte[] arr(int... vals) { byte[] result = new byte[vals.length]; for (int i = 0; i < vals.length; i++) { result[i] = (byte) vals[i]; } return result; } private static MemBuffer buf(boolean bigEndian, int... vals) { return new ByteMemBufferImpl(Address.NO_ADDRESS, arr(vals), bigEndian); } private static Settings format(FormatSettingsDefinition setDef) { Settings settings = new SettingsImpl(); setDef.setChoice(settings, setDef.getChoice(null)); return settings; } // NB. Need at least one byte to appear "initialized" private static final MemBuffer BE = buf(true, 0); private static final MemBuffer LE = buf(false, 0); private static final Settings HEX = format(FormatSettingsDefinition.DEF_HEX); private static final Settings DEC = format(FormatSettingsDefinition.DEF_DECIMAL); private static final Settings BIN = format(FormatSettingsDefinition.DEF_BINARY); private static final Settings OCT = format(FormatSettingsDefinition.DEF_OCTAL); private static final Settings CHR = format(FormatSettingsDefinition.DEF_CHAR); private interface EncodeRunnable { void run() throws Exception; } private static void assertFails(EncodeRunnable r) throws Exception { try { r.run(); } catch (DataTypeEncodeException e) { return; // pass } fail(); } @Test public void testEncodeValueUnsignedByteBE() throws Exception { DataType type = AbstractIntegerDataType.getUnsignedDataType(1, null); // Technically, these two are exactly the same test, just different Java syntax assertArrayEquals(arr(0xff), type.encodeValue((byte) 0xff, BE, HEX, 1)); assertArrayEquals(arr(0xff), type.encodeValue((byte) -1, BE, HEX, 1)); assertFails(() -> type.encodeValue((short) 0x100, BE, HEX, 1)); assertFails(() -> type.encodeValue((short) -1, BE, HEX, 1)); assertArrayEquals(arr(0xff), type.encodeValue(0xff, BE, HEX, 1)); // This fails, because (int)-1 is 4294967295 when treated unsigned assertFails(() -> type.encodeValue(-1, BE, HEX, 1)); } @Test public void testEncodeRepresentationUnsignedByteHexBE() throws Exception { DataType type = AbstractIntegerDataType.getUnsignedDataType(1, null); // Sanity check: Renders unsigned assertEquals("80h", type.getRepresentation(buf(true, 0x80), HEX, 1)); assertArrayEquals(arr(0x00), type.encodeRepresentation("0h", BE, HEX, 1)); assertArrayEquals(arr(0x7f), type.encodeRepresentation("7fh", BE, HEX, 1)); assertArrayEquals(arr(0x80), type.encodeRepresentation("80h", BE, HEX, 1)); assertArrayEquals(arr(0xff), type.encodeRepresentation("ffh", BE, HEX, 1)); assertFails(() -> type.encodeRepresentation("100h", BE, HEX, 1)); assertFails(() -> type.encodeRepresentation("-1h", BE, HEX, 1)); } @Test public void testEncodeRepresentationSignedShortHexBE() throws Exception { DataType type = AbstractIntegerDataType.getSignedDataType(2, null); // Sanity check: Negative hex values render unsigned assertEquals("8000h", type.getRepresentation(buf(true, 0x80, 0x00), HEX, 2)); assertArrayEquals(arr(0x00, 0x00), type.encodeRepresentation("0h", BE, HEX, 2)); assertArrayEquals(arr(0x7f, 0xff), type.encodeRepresentation("7fffh", BE, HEX, 2)); assertArrayEquals(arr(0x80, 0x00), type.encodeRepresentation("8000h", BE, HEX, 2)); assertArrayEquals(arr(0xff, 0xff), type.encodeRepresentation("ffffh", BE, HEX, 2)); assertArrayEquals(arr(0xff, 0xff), type.encodeRepresentation("-1h", BE, HEX, 2)); assertArrayEquals(arr(0x80, 0x00), type.encodeRepresentation("-8000h", BE, HEX, 2)); assertFails(() -> type.encodeRepresentation("10000h", BE, HEX, 2)); assertFails(() -> type.encodeRepresentation("-8001h", BE, HEX, 2)); } @Test public void testEncodeRepresentationSignedShortHexLE() throws Exception { DataType type = AbstractIntegerDataType.getSignedDataType(2, null); // Sanity check: Negative hex values render unsigned assertEquals("8000h", type.getRepresentation(buf(false, 0x00, 0x80), HEX, 2)); assertArrayEquals(arr(0x00, 0x00), type.encodeRepresentation("0h", LE, HEX, 2)); assertArrayEquals(arr(0xff, 0x7f), type.encodeRepresentation("7fffh", LE, HEX, 2)); assertArrayEquals(arr(0x00, 0x80), type.encodeRepresentation("8000h", LE, HEX, 2)); assertArrayEquals(arr(0xff, 0xff), type.encodeRepresentation("ffffh", LE, HEX, 2)); assertArrayEquals(arr(0xff, 0xff), type.encodeRepresentation("-1h", LE, HEX, 2)); assertArrayEquals(arr(0x00, 0x80), type.encodeRepresentation("-8000h", LE, HEX, 2)); assertFails(() -> type.encodeRepresentation("10000h", LE, HEX, 2)); assertFails(() -> type.encodeRepresentation("-8001h", LE, HEX, 2)); } @Test public void testEncodeRepresentationUnsignedShortHexBE() throws Exception { DataType type = AbstractIntegerDataType.getUnsignedDataType(2, null); // Sanity check: Renders unsigned assertEquals("8000h", type.getRepresentation(buf(true, 0x80, 0x00), HEX, 2)); assertArrayEquals(arr(0x00, 0x00), type.encodeRepresentation("0h", BE, HEX, 2)); assertArrayEquals(arr(0x7f, 0xff), type.encodeRepresentation("7fffh", BE, HEX, 2)); assertArrayEquals(arr(0x80, 0x00), type.encodeRepresentation("8000h", BE, HEX, 2)); assertArrayEquals(arr(0xff, 0xff), type.encodeRepresentation("ffffh", BE, HEX, 2)); assertFails(() -> type.encodeRepresentation("-1h", BE, HEX, 2)); assertFails(() -> type.encodeRepresentation("-8000h", BE, HEX, 2)); assertFails(() -> type.encodeRepresentation("10000h", BE, HEX, 2)); assertFails(() -> type.encodeRepresentation("-8001h", BE, HEX, 2)); } @Test public void testEncodeRepresentationSignedShortDecBE() throws Exception { DataType type = AbstractIntegerDataType.getSignedDataType(2, null); // Sanity check: Negative hex values render signed assertEquals("-32768", type.getRepresentation(buf(true, 0x80, 0x00), DEC, 2)); assertArrayEquals(arr(0x00, 0x00), type.encodeRepresentation("0", BE, DEC, 2)); assertArrayEquals(arr(0x7f, 0xff), type.encodeRepresentation("32767", BE, DEC, 2)); assertArrayEquals(arr(0x80, 0x00), type.encodeRepresentation("-32768", BE, DEC, 2)); assertArrayEquals(arr(0xff, 0xff), type.encodeRepresentation("-1", BE, DEC, 2)); assertFails(() -> type.encodeRepresentation("32768", BE, DEC, 2)); assertFails(() -> type.encodeRepresentation("-32769", BE, DEC, 2)); } @Test public void testEncodeRepresentationUnsignedShortDecBE() throws Exception { DataType type = AbstractIntegerDataType.getUnsignedDataType(2, null); // Sanity check: Renders unsigned assertEquals("32768", type.getRepresentation(buf(true, 0x80, 0x00), DEC, 2)); assertArrayEquals(arr(0x00, 0x00), type.encodeRepresentation("0", BE, DEC, 2)); assertArrayEquals(arr(0x7f, 0xff), type.encodeRepresentation("32767", BE, DEC, 2)); assertArrayEquals(arr(0x80, 0x00), type.encodeRepresentation("32768", BE, DEC, 2)); assertArrayEquals(arr(0xff, 0xff), type.encodeRepresentation("65535", BE, DEC, 2)); assertFails(() -> type.encodeRepresentation("-1", BE, DEC, 2)); assertFails(() -> type.encodeRepresentation("65536", BE, DEC, 2)); } @Test public void testEncodeRepresentationSignedShortBinBE() throws Exception { DataType type = AbstractIntegerDataType.getUnsignedDataType(2, null); // Sanity check assertEquals("100000011b", type.getRepresentation(buf(true, 0x01, 0x03), BIN, 2)); assertArrayEquals(arr(0x01, 0x03), type.encodeRepresentation("100000011b", BE, BIN, 2)); } @Test public void testEncodeRepresentationSignedShortOctBE() throws Exception { DataType type = AbstractIntegerDataType.getUnsignedDataType(2, null); // Sanity check assertEquals("403o", type.getRepresentation(buf(true, 0x01, 0x03), OCT, 2)); assertArrayEquals(arr(0x01, 0x03), type.encodeRepresentation("403o", BE, OCT, 2)); } @Test public void testEncodeRepresentationChar() throws Exception { DataType stype = AbstractIntegerDataType.getSignedDataType(1, null); DataType utype = AbstractIntegerDataType.getUnsignedDataType(1, null); // Sanity check assertEquals("'A'", stype.getRepresentation(buf(true, 0x41), CHR, 1)); assertEquals("'A'", utype.getRepresentation(buf(true, 0x41), CHR, 1)); assertArrayEquals(arr(0x41), stype.encodeRepresentation("'A'", BE, CHR, 1)); assertArrayEquals(arr(0x41), utype.encodeRepresentation("'A'", BE, CHR, 1)); } }
/* * Copyright 2008 the original author or authors. * Copyright 2005 Sun Microsystems, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sorcer.provider.boot; import edu.emory.mathcs.util.classloader.URIClassLoader; import net.jini.loader.ClassAnnotation; import java.io.File; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.MalformedURLException; import java.util.Properties; /** * The ServiceClassLoader overrides getURLs(), ensuring all classes that need to * be annotated with specific location(s) are returned appropriately * * @author Dennis Reedy */ public class ServiceClassLoader extends URIClassLoader implements ClassAnnotation { private URI[] searchPath; /** The ClassAnnotator to use */ private ClassAnnotator annotator; /** Meta data associated with the classloader */ private Properties metaData = new Properties(); /** * Constructs a new ServiceClassLoader for the specified URLs having the * given parent. The constructor takes two sets of URLs. The first set is * where the class loader loads classes from, the second set is what it * returns when getURLs() is called. * * @param searchPath Array of URIs to search for classes * @param annotator Array of URLs to use for the codebase * @param parent Parent ClassLoader to delegate to */ public ServiceClassLoader(URI[] searchPath, ClassAnnotator annotator, ClassLoader parent) { this(searchPath, annotator, parent, null); } /** * Constructs a new ServiceClassLoader for the specified URLs having the * given parent. The constructor takes two sets of URLs. The first set is * where the class loader loads classes from, the second set is what it * returns when getURLs() is called. * * @param searchPath Array of URIs to search for classes * @param annotator Array of URLs to use for the codebase * @param parent Parent ClassLoader to delegate to * @param metaData Optional meta data associated with the classloader */ public ServiceClassLoader(URI[] searchPath, ClassAnnotator annotator, ClassLoader parent, Properties metaData) { super(searchPath, parent); // if(annotator==null) // throw new NullPointerException("annotator is null"); this.annotator = annotator; this.searchPath = searchPath; if(metaData!=null) this.metaData.putAll(metaData); } /** * Get the {@link sorcer.provider.boot.ClassAnnotator} created at construction * time * * @return The ClassAnnotator */ public ClassAnnotator getClassAnnotator() { return(annotator); } /** * Get the meta data associated with this classloader * * @return A Properties object representing any meta data associated with * this classloader. A new Properties object is created each time */ public Properties getMetaData() { return(new Properties(metaData)); } /** * Add meta data associated with the classloader * * @param metaData Properties to associate to this classloader. If the * property already exists in the managed metaData, it will be replaced. * New properties will be added. A null parameter will be ignored. */ public void addMetaData(Properties metaData) { if(metaData==null) return; this.metaData.putAll(metaData); } /** * Get the URLs to be used for class annotations as determined by the * {@link sorcer.provider.boot.ClassAnnotator} */ public URL[] getURLs() { if (annotator != null) return(annotator.getURLs()); else return null; } /** * Get the search path of URLs for loading classes and resources * * @return The array of <code>URL[]</code> which corresponds to the search * path for the class loader; that is, the array elements are the locations * from which the class loader will load requested classes. * * @throws MalformedURLException If any of the URis cannot be transformed * to URLs */ public URL[] getSearchPath() throws MalformedURLException { URL[] urls; if(searchPath != null) { urls = new URL[searchPath.length]; for(int i=0; i<urls.length; i++) urls[i] = searchPath[i].toURL(); } else { urls = new URL[0]; } return (urls); } /** * Appends the specified URLs to the list of URLs to search for classes and * resources. * * @param urls The URLs to add */ public void addURLs(URL[] urls) { URI[] uris = new URI[0]; try { uris = getURIs(urls); } catch (URISyntaxException e) { e.printStackTrace(); } for (URI uri : uris) super.addURI(uri); } /** * Get the class annotations as determined by the * {@link sorcer.provider.boot.ClassAnnotator} * * @see net.jini.loader.ClassAnnotation#getClassAnnotation */ public String getClassAnnotation() { if (annotator != null) return (annotator.getClassAnnotation()); else return null; } /** * Returns a String representation of this class loader. **/ public String toString() { return(ServiceClassLoader.class.getName()+" "+ "ClassPath : ["+ClassAnnotator.urisToPath(searchPath)+"] "+ "Codebase : ["+getClassAnnotation()+"]"); } /** * Convert a <code>URL[]</code> into a <code>URI[]</code> * * @param urls Array of URLs to convert * * @return Converted array of URIs * * @throws URISyntaxException If there are errors converting the URLs to * URIs */ public static URI[] getURIs(URL[] urls) throws URISyntaxException { if(urls==null) throw new IllegalArgumentException("urls array must not be null"); URI[] uris = new URI[urls.length]; for(int i=0; i<urls.length; i++) { if(urls[i].getProtocol().equals("file")) { File f = new File(urls[i].getFile()); if(f.getAbsolutePath().contains("%20") ) { String path = f.getAbsolutePath().replaceAll("%20", " "); f = new File(path); } uris[i] = f.toURI(); } else { uris[i] = urls[i].toURI(); } } return(uris); } }
package nl.pascaldevink.jotify.gui.listeners; import java.util.ArrayList; import java.util.List; import nl.pascaldevink.jotify.gui.JotifyPlaybackQueue; import de.felixbruns.jotify.media.Album; import de.felixbruns.jotify.media.Artist; import de.felixbruns.jotify.media.Playlist; import de.felixbruns.jotify.media.Result; import de.felixbruns.jotify.media.Track; public class JotifyBroadcast { private List<PlaylistListener> playlistListeners; private List<QueueListener> queueListeners; private List<SearchListener> searchListeners; private List<BrowseListener> browseListeners; private List<ControlListener> controlListeners; private List<PlayerListener> playerListeners; private List<ClearSelectionListener> clearSelectionListener; private static JotifyBroadcast instance; static { instance = new JotifyBroadcast(); } public static JotifyBroadcast getInstance(){ return instance; } private JotifyBroadcast(){ this.playlistListeners = new ArrayList<PlaylistListener>(); this.queueListeners = new ArrayList<QueueListener>(); this.searchListeners = new ArrayList<SearchListener>(); this.browseListeners = new ArrayList<BrowseListener>(); this.controlListeners = new ArrayList<ControlListener>(); this.playerListeners = new ArrayList<PlayerListener>(); this.clearSelectionListener = new ArrayList<ClearSelectionListener>(); } public void addPlaylistListener(PlaylistListener listener){ this.playlistListeners.add(listener); } public void addQueueListener(QueueListener listener){ this.queueListeners.add(listener); } public void addSearchListener(SearchListener listener){ this.searchListeners.add(listener); } public void addBrowseListener(BrowseListener listener){ this.browseListeners.add(listener); } public void addControlListener(ControlListener listener){ this.controlListeners.add(listener); } public void addPlayerListener(PlayerListener listener){ this.playerListeners.add(listener); } public void addClearSelectionListener(ClearSelectionListener listener){ this.clearSelectionListener.add(listener); } public void firePlaylistAdded(Playlist playlist) { for(PlaylistListener listener : this.playlistListeners){ listener.playlistAdded(playlist); } } public void firePlaylistRemoved(Playlist playlist) { for(PlaylistListener listener : this.playlistListeners){ listener.playlistRemoved(playlist); } } public void firePlaylistUpdated(Playlist playlist) { for(PlaylistListener listener : this.playlistListeners){ listener.playlistUpdated(playlist); } } public void firePlaylistSelected(Playlist playlist) { for(PlaylistListener listener : this.playlistListeners){ listener.playlistSelected(playlist); } } public void fireQueueSelected(JotifyPlaybackQueue queue) { for(QueueListener listener : this.queueListeners){ listener.queueSelected(queue); } } public void fireQueueUpdated(JotifyPlaybackQueue queue) { for(QueueListener listener : this.queueListeners){ listener.queueUpdated(queue); } } public void fireSearchResultReceived(Result result){ for(SearchListener listener : this.searchListeners){ listener.searchResultReceived(result); } } public void fireSearchResultSelected(Result result){ for(SearchListener listener : this.searchListeners){ listener.searchResultSelected(result); } } public void fireBrowsedArtist(Artist artist){ for(BrowseListener listener : this.browseListeners){ listener.browsedArtist(artist); } } public void fireBrowsedAlbum(Album album){ for(BrowseListener listener : this.browseListeners){ listener.browsedAlbum(album); } } public void fireBrowsedTracks(Result result){ for(BrowseListener listener : this.browseListeners){ listener.browsedTracks(result); } } public void fireControlPlay(){ for(ControlListener listener : this.controlListeners){ listener.controlPlay(); } } public void fireControlPause(){ for(ControlListener listener : this.controlListeners){ listener.controlPause(); } } public void fireControlPrevious(){ for(ControlListener listener : this.controlListeners){ listener.controlPrevious(); } } public void fireControlNext(){ for(ControlListener listener : this.controlListeners){ listener.controlNext(); } } public void fireControlVolume(float volume){ for(ControlListener listener : this.controlListeners){ listener.controlVolume(volume); } } public void fireControlSeek(float percent){ for(ControlListener listener : this.controlListeners){ listener.controlSeek(percent); } } public void fireControlSelect(Track track){ for(ControlListener listener : this.controlListeners){ listener.controlSelect(track); } } public void fireControlSelect(List<Track> tracks){ for(ControlListener listener : this.controlListeners){ listener.controlSelect(tracks); } } public void fireControlAddTracksToQueue(List<Track> tracks) { for(ControlListener listener : this.controlListeners){ listener.addTracks(tracks); } } public void fireControlQueue(Track track){ for(ControlListener listener : this.controlListeners){ listener.controlQueue(track); } } public void firePlayerTrackChanged(Track track){ for(PlayerListener listener : this.playerListeners){ listener.playerTrackChanged(track); } } public void firePlayerStatusChanged(PlayerListener.Status status){ for(PlayerListener listener : this.playerListeners){ listener.playerStatusChanged(status); } } public void firePlayerPositionChanged(int position){ for(PlayerListener listener : this.playerListeners){ listener.playerPositionChanged(position); } } public void fireClearSelection(){ for(ClearSelectionListener listener : this.clearSelectionListener){ listener.clearSelection(); } } }
package script.file; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import chat.encrypter.EncryptService; import chat.logs.LoggerEx; /** * protocol://host:port/path?query * * hdfs://namenode:9000/user/mac/a.txt * * file://192.168.1.200:9000/user/mac/a.txt * * @author aplomb * */ public abstract class FileAdapter { private EncryptService encryptService; public enum READWRITE { READ, WRITE; } protected void copy(PathEx path, InputStream is, OutputStream os, READWRITE readWrite) throws IOException { boolean encrypted = false; EncryptService encrypterListener = null; if (encrypterListener == null) { encrypterListener = encryptService; } if(encrypterListener != null) { switch(readWrite) { case READ: try { encrypterListener.decrypt(is, os); encrypted = true; } catch (Throwable e) { LoggerEx.error("D", "rollback..."); } break; case WRITE: encrypterListener.encrypt(is, os); encrypted = true; break; } } if(!encrypted) IOUtils.copyLarge(is, os); } public enum FileReplaceStrategy { REPLACE, DONTREPLACE; } public interface SaveFileCachedListener { /** * File saved to local * * @param entity * @throws IOException */ public void fileCached(FileEntity entity) throws IOException; /** * File saved to S3 or other remote system. * * @param entity * @throws IOException */ public void fileSaved(FileEntity entity) throws IOException; /** * File saved to S3 or other remote system failed. * * @param entity * @throws IOException */ public void saveFailed(FileEntity entity, Throwable e) throws IOException; } public static class MetadataEx { private String type; private String targetId; public static final String FIELD_TYPE = "type"; public static final String FIELD_TARGETID = "tid"; public String getType() { return type; } public void setType(String type) { this.type = type; } public String etTargetId() { return targetId; } public void setTargetId(String targetId) { this.targetId = targetId; } public String toString() { return "type " + type + "; targetId " + targetId; } public Map<String, String> toMap() { Map<String, String> map = new HashMap<>(); if(targetId != null) map.put(FIELD_TARGETID, targetId); if(type != null) map.put(FIELD_TYPE, type); return map; } } public static class PathEx { public static final String ACU_PATH = "path"; public static final String ACU_PATH_HASH_KEY = "hashkey"; public static final String ACU_PATH_ENCRYPTTYPE = "et"; public static final String ACU_PATH_TYPE = "type"; private String path; private String hashKey; private MetadataEx metadata; @Override public String toString() { return path + ", hashKey " + hashKey + ", metadata " + metadata; } public PathEx(String path) { this.path = path; } public PathEx(String path, String hashKey, MetadataEx metadata) { this(path); this.hashKey = hashKey; this.setMetadata(metadata); } public String getPath() { return path; } public void setPath(String path) { this.path = path; } public String getHashKey() { return hashKey; } public void setHashKey(String hashKey) { this.hashKey = hashKey; } // @Override // public DBObject toDocument() { // return new CleanBasicDBObject() // .append(ACU_PATH, path) // .append(ACU_PATH_TYPE, type) // .append(ACU_PATH_HASH_KEY, hashKey); // } // // @Override // public void fromDocument(DBObject dbo) { // CleanBasicDBObject cdbo = new CleanBasicDBObject(dbo); // this.path = cdbo.getString(ACU_PATH); // this.hashKey = cdbo.getString(ACU_PATH_HASH_KEY); // this.type = cdbo.getString(ACU_PATH_TYPE); // } public MetadataEx getMetadata() { return metadata; } public void setMetadata(MetadataEx metadata) { this.metadata = metadata; } } public static final String DOC_ROOT_PATH = "resources/"; public static final String DOC_STICKER_SUIT_ROOT_PATH = "stickersuit/"; public abstract FileEntity saveFile(InputStream is, PathEx path, FileReplaceStrategy strategy, SaveFileCachedListener listener) throws IOException; public abstract FileEntity saveFile(InputStream is, PathEx path, FileReplaceStrategy strategy) throws IOException; public abstract FileEntity saveFile(File file, PathEx path, FileReplaceStrategy strategy, SaveFileCachedListener listener, boolean isNeedMd5) throws IOException; public abstract FileEntity saveFile(InputStream is, long length, PathEx path, FileReplaceStrategy strategy, SaveFileCachedListener listener) throws IOException; public abstract boolean deleteFile(PathEx path) throws IOException; public abstract boolean moveFile(PathEx sourcePath, PathEx destPath) throws IOException; public abstract boolean readFile(PathEx path, OutputStream os) throws IOException; public abstract boolean readFile(PathEx path, OutputStream os, Integer offset, Integer length) throws IOException; public abstract boolean isFileExist(PathEx path) throws IOException; public abstract Long getLastModificationTime(PathEx path) throws IOException; public abstract FileEntity saveDirectory(PathEx path) throws IOException; public abstract boolean deleteDirectory(PathEx path) throws IOException; public abstract boolean moveDirectory(PathEx sourcePath, PathEx destPath) throws IOException; public abstract boolean isDirectoryExist(PathEx path) throws IOException; public abstract FileEntity getFileEntity(PathEx path) throws IOException; /** * This is used for generate the download url for directly downloading against file servers. * * @param path * @param fileName * @param contentType * @param useragent * @return * @throws IOException */ public abstract String generateDownloadUrl(PathEx path, String fileName, String contentType, String useragent) throws IOException; /** * Check the file adapter support download url or not. * * @return */ public abstract boolean isSupportDownloadUrl(); /** * The return string if end with /, then it's a directory, otherwise is a file. * * For example, * user/mac/afile.txt * user/mac/adirectory/ * * @param path * @return * @throws IOException */ public abstract List<FileEntity> getFilesInDirectory(PathEx path, String[] extensions, boolean recursive) throws IOException; public EncryptService getEncryptService() { return encryptService; } public void setEncryptService(EncryptService encryptService) { this.encryptService = encryptService; } public static class FileEntity { public static final int TYPE_FILE = 1; public static final int TYPE_DIRECTORY = 2; private long length; private long lastModificationTime; private int type; private String absolutePath; private String md5; public void setLength(long length) { this.length = length; } public long getLength() { return length; } public void setLastModificationTime(long lastModificationTime) { this.lastModificationTime = lastModificationTime; } public long getLastModificationTime() { return lastModificationTime; } public void setType(int type) { this.type = type; } public int getType() { return type; } public void setAbsolutePath(String absolutePath) { this.absolutePath = absolutePath; } public String getAbsolutePath() { return absolutePath; } public void fromFile(File file) { if(file.exists()) { if(file.isFile()) { type = TYPE_FILE; length = file.length(); } else { type = TYPE_DIRECTORY; } absolutePath = FilenameUtils.separatorsToUnix(file.getAbsolutePath()); lastModificationTime = file.lastModified(); } } public String getMd5() { return md5; } public void setMd5(String md5) { this.md5 = md5; } } public void close(){} }
package freerunningapps.veggietizer.view; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.res.Resources; import android.view.LayoutInflater; import android.view.View; import android.widget.ExpandableListAdapter; import android.widget.ExpandableListView; import android.widget.ImageView; import android.widget.TextView; import freerunningapps.veggietizer.R; import freerunningapps.veggietizer.controller.FontManager; import freerunningapps.veggietizer.controller.adapter.DetailsInfoAdapter; import freerunningapps.veggietizer.model.Achievement; import freerunningapps.veggietizer.model.enums.Category; import freerunningapps.veggietizer.model.util.Formatter; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; /** * Class to show a popup from everywhere. * Uses static methods to show popups. * * @author Lukas Gebhard <freerunningapps@gmail.com>, Matthias Heim <freerunningapps@gmail.com> */ public final class Popup { /** * Prevents from instantiating this class. */ private Popup() {} /** * Shows a popup after inserting a new meat dish. * The popup gives instant feedback about how much impact the inserted meat dish has. * * @param carbonSaved The amount of CO2 saved. * @param waterSaved The amount of water saved. * @param feedSaved The amount of feed saved. * @param context The application context. * @param onClickListener To be executed when submitting the popup (for example, if another dialog * is to be shown after this one, it could be invoked by this listener). * Can be <code>null</code>, then the dialog just closes. * @return The popup dialog. */ public static AlertDialog showFeedbackOnInsert(float meatSaved, float carbonSaved, float waterSaved, float feedSaved, Context context, DialogInterface.OnClickListener onClickListener) { Resources resources = context.getResources(); String grammes = resources.getString(R.string.unitGrammes); String kilogrammes = resources.getString(R.string.unitKilogrammes); String litres = resources.getString(R.string.unitLitres); String millilitres = resources.getString(R.string.unitMillilitres); int numberOfDecimals = 2; // Construct the popup View popupLayout; AlertDialog.Builder popupBuilder = new AlertDialog.Builder(context); // Set layout for popup LayoutInflater inflater = LayoutInflater.from(context); // Custom Title TextView customTitle = (TextView) inflater.inflate(R.layout.popup_title, null); customTitle.setText(R.string.instant_feedback_popup_title); String meatSavedFormatted, carbonSavedFormatted, waterSavedFormatted, feedSavedFormatted; // Format impact values meatSavedFormatted = Formatter.format(meatSaved, grammes, kilogrammes, Formatter.KILO, numberOfDecimals); carbonSavedFormatted = Formatter.format(carbonSaved, grammes, kilogrammes, Formatter.KILO, numberOfDecimals); // If meat other than fish was selected, also add water and feed savings if (waterSaved != 0 && feedSaved != 0) { popupLayout = inflater.inflate(R.layout.popup_instant_feedback, null); // Format impact values waterSavedFormatted = Formatter.format(waterSaved, millilitres, litres, Formatter.KILO, 0); feedSavedFormatted = Formatter.format(feedSaved, grammes, kilogrammes, Formatter.KILO, numberOfDecimals); // Set saved values TextView waterText = ((TextView) popupLayout.findViewById(R.id.instant_feedback_popup_water)); TextView feedText = ((TextView) popupLayout.findViewById(R.id.instant_feedback_popup_feed)); waterText.setText(waterSavedFormatted); feedText.setText(feedSavedFormatted); Utility.setFont(FontManager.Font.ROBOTO_LIGHT, new TextView[]{ waterText, feedText }); } else { popupLayout = inflater.inflate(R.layout.popup_instant_feedback_fish, null); } // Set saved values TextView meatText = ((TextView) popupLayout.findViewById(R.id.instant_feedback_popup_meat)); TextView carbonText = ((TextView) popupLayout.findViewById(R.id.instant_feedback_popup_carbon)); meatText.setText(meatSavedFormatted); carbonText.setText(carbonSavedFormatted); Utility.setFont(FontManager.Font.ROBOTO_LIGHT, new TextView[] { customTitle, meatText, carbonText }); popupBuilder.setView(popupLayout) .setCustomTitle(customTitle) .setNeutralButton(R.string.ok, onClickListener); AlertDialog dialog = popupBuilder.create(); dialog.show(); return dialog; } /** * Shows a popup window. * * @param title The title of the popup. * @param description The custom description. * @param context The context to show the popup in. * @return The popup dialog. */ public static AlertDialog show(String title, String description, Context context, DialogInterface.OnClickListener onSubmitListener) { // Custom Title TextView customTitle = (TextView) LayoutInflater.from(context).inflate(R.layout.popup_title, null); customTitle.setText(title); AlertDialog.Builder popupBuilder = new AlertDialog.Builder(context); popupBuilder.setMessage(description) .setNeutralButton(R.string.ok, onSubmitListener) .setCustomTitle(customTitle); AlertDialog dialog = popupBuilder.create(); dialog.show(); TextView popupMessage = (TextView) dialog.findViewById(android.R.id.message); popupMessage.setTextColor(context.getResources().getColor(R.color.text_gray)); Utility.setFont(FontManager.Font.ROBOTO_LIGHT, new TextView[]{ customTitle, popupMessage }); return dialog; } /** * Shows the popup that is opened when the user clicks on the info action button of a details page. * An {@link ExpandableListView} is used as a layout. * * @param context The context to show the popup in. * @param category The category to inform about. * @return The dialog. */ public static AlertDialog showDetailsInfo(Context context, Category category, DialogInterface.OnClickListener onSubmitListener) { Resources res = context.getResources(); AlertDialog dialog; switch (category) { case CO2: dialog = Popup.showDetailsInfo(context, res.getString(R.string.details_info_co2_title), res.getStringArray(R.array.details_info_co2_groups), res.getStringArray(R.array.details_info_co2_entries), onSubmitListener); break; case WATER: dialog = Popup.showDetailsInfo(context, res.getString(R.string.details_info_water_title), res.getStringArray(R.array.details_info_water_groups), res.getStringArray(R.array.details_info_water_entries), onSubmitListener); break; case FEED: dialog = Popup.showDetailsInfo(context, res.getString(R.string.details_info_feed_title), res.getStringArray(R.array.details_info_feed_groups), res.getStringArray(R.array.details_info_feed_entries), onSubmitListener); break; case MEAT: dialog = Popup.showDetailsInfo(context, res.getString(R.string.details_info_meat_title), res.getStringArray(R.array.details_info_meat_groups), res.getStringArray(R.array.details_info_meat_entries), onSubmitListener); break; default: throw new IllegalStateException("Unsupported category " + category); } return dialog; } /** * Shows the popup that is opened when the user clicks on the info action button of a details page. * An {@link ExpandableListView} is used as a layout. * * @param context The context to show the popup in. * @param title The popup's title. * @param groups The expandable categories. * @param entries Each category in <code>groups</code> shows one entry when expanded. * @return The dialog. */ private static AlertDialog showDetailsInfo(Context context, String title, String[] groups, String[] entries, DialogInterface.OnClickListener onSubmitListener) { if (groups.length != entries.length) { throw new IllegalArgumentException(); } // Custom title TextView customTitle = (TextView) LayoutInflater.from(context).inflate(R.layout.popup_title, null); customTitle.setText(title); View popupDetailsInfo = LayoutInflater.from(context) .inflate(R.layout.popup_details_info, null); ExpandableListView listViewDetailsInfo = (ExpandableListView) popupDetailsInfo.findViewById(R.id.expandablelistview_popup_details_info); ExpandableListAdapter adapter = createDetailsInfoAdapter(context, groups, entries); listViewDetailsInfo.setAdapter(adapter); AlertDialog.Builder popupBuilder = new AlertDialog.Builder(context); popupBuilder.setView(popupDetailsInfo) .setNeutralButton(R.string.ok, onSubmitListener) .setCustomTitle(customTitle); AlertDialog dialog = popupBuilder.create(); dialog.show(); Utility.setFont(FontManager.Font.ROBOTO_LIGHT, new TextView[]{ customTitle }); return dialog; } private static ExpandableListAdapter createDetailsInfoAdapter(Context context, String[] groups, String[] entries) { // Creates the upper list level List<Map<String, String>> groupData = new LinkedList<>(); String groupName = "GroupName"; String[] groupFrom = new String[] {groupName}; int[] groupTo = new int[] {R.id.component_details_info_group}; for (String g : groups) { Map<String, String> group = new HashMap<>(1); group.put(groupName, g); groupData.add(group); } // Creates the lower list level List<List<Map<String, String>>> childData = new LinkedList<>(); String groupContent = "Content"; String[] childFrom = new String[] {groupContent}; int[] childTo = new int[] {R.id.component_details_info_entry}; for (String e : entries) { List<Map<String, String>> content = new LinkedList<>(); Map<String, String> entry = new HashMap<>(1); entry.put(groupContent, e); content.add(entry); childData.add(content); } return new DetailsInfoAdapter(context, groupData, groupFrom, groupTo, childData, childFrom, childTo); } private static AlertDialog showAchievement(Achievement achievement, Context context, DialogInterface.OnClickListener onSubmitListener, String heading, String shareText, String requirementsHeading, String requirements) { AlertDialog.Builder popupBuilder = new AlertDialog.Builder(context); // Set layout for popup LayoutInflater inflater = LayoutInflater.from(context); View popupLayout = inflater.inflate(R.layout.popup_achievements, null); // Custom Title TextView customTitle = (TextView) inflater.inflate(R.layout.popup_title, null); customTitle.setText(heading); popupBuilder.setView(popupLayout); // Set requirements TextView requirementHeading = (TextView) popupLayout.findViewById(R.id.achievements_popup_requirements_heading); requirementHeading.setText(requirementsHeading); TextView req = ((TextView) popupLayout.findViewById(R.id.achievements_popup_requirements)); req.setText(requirements); // Set description TextView des = ((TextView) popupLayout.findViewById(R.id.achievements_popup_description)); des.setText(achievement.getDescription()); Utility.setFont(FontManager.Font.ROBOTO_LIGHT, new TextView[]{ customTitle, req, des }); // Set icon ((ImageView) popupLayout.findViewById(R.id.achievements_popup_image)) .setImageResource(achievement.getIconUnlockedID()); popupBuilder.setCustomTitle(customTitle) .setNeutralButton(R.string.ok, onSubmitListener) .setPositiveButton(context.getResources().getString(R.string.popup_share_achievement), new ShareAchievementListener(context, shareText + "\n\n" + context.getResources().getString(R.string.app_playstore_url))); AlertDialog dialog = popupBuilder.create(); dialog.show(); return dialog; } /** * Shows an already unlocked achievement listed in the achievement activity. * * @param achievement The unlocked achievement to show. * @param context The application context. * @return The achievement. */ public static AlertDialog showAchievement(Achievement achievement, Context context, DialogInterface.OnClickListener onSubmitListener) { return showAchievement(achievement, context, onSubmitListener, achievement.getHeading(), achievement.getShareText(), context.getResources().getString(R.string.achievement_requirements_heading_unlocked), achievement.getRequirements()); } /** * Shows the just unlocked <code>achievement</code> as a popup dialog. * * @param achievement The achievement to show. * @param context The context in which to open the achievement in. * @return The achievement as a popup dialog. */ public static AlertDialog showNewAchievement(Context context, Achievement achievement, DialogInterface.OnClickListener onSubmitListener) { String heading = context.getResources().getString(R.string.achievement_unlocked) + "\n" + achievement.getHeading(); String requirementsHeading = context.getResources().getString( R.string.achievement_requirements_heading_unlocked); String requirements = achievement.getRequirements(); String shareText = achievement.getShareText(); return showAchievement(achievement, context, onSubmitListener, heading, shareText, requirementsHeading, requirements); } /** * A listener to be invoked when the user wants to share an achievement. */ public static class ShareAchievementListener implements DialogInterface.OnClickListener { private Context context; private String shareText; public ShareAchievementListener(Context context, String shareText) { this.context = context; this.shareText = shareText; } @Override public void onClick(DialogInterface dialogInterface, int buttonType) { Intent shareIntent = new Intent(Intent.ACTION_SEND); shareIntent.setType("text/plain"); shareIntent.putExtra(Intent.EXTRA_TEXT, shareText); context.startActivity(Intent.createChooser(shareIntent, context.getResources().getString(R.string .popup_share_achievement))); } } }
/* * Copyright (c) 2012 - 2015, Clark & Parsia, LLC. <http://www.clarkparsia.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.utah.ece.async.sboldesigner.sbol.editor.dialog; import java.awt.Component; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.io.File; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.regex.PatternSyntaxException; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.JTextField; import javax.swing.ListSelectionModel; import javax.swing.RowFilter; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.table.TableModel; import javax.swing.table.TableRowSorter; import org.sbolstandard.core2.ComponentDefinition; import org.sbolstandard.core2.SBOLDocument; import org.sbolstandard.core2.SBOLReader; import org.sbolstandard.core2.SBOLValidationException; import org.sbolstandard.core2.SequenceOntology; import org.sbolstandard.core2.TopLevel; import org.synbiohub.frontend.IdentifiedMetadata; import org.synbiohub.frontend.SynBioHubException; import org.synbiohub.frontend.SynBioHubFrontend; import org.synbiohub.frontend.WebOfRegistriesData; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import edu.utah.ece.async.sboldesigner.sbol.CharSequenceUtil; import edu.utah.ece.async.sboldesigner.sbol.SBOLUtils; import edu.utah.ece.async.sboldesigner.sbol.SBOLUtils.Types; import edu.utah.ece.async.sboldesigner.sbol.editor.Part; import edu.utah.ece.async.sboldesigner.sbol.editor.Parts; import edu.utah.ece.async.sboldesigner.sbol.editor.Registries; import edu.utah.ece.async.sboldesigner.sbol.editor.Registry; import edu.utah.ece.async.sboldesigner.sbol.editor.SBOLEditorPreferences; import edu.utah.ece.async.sboldesigner.sbol.editor.SynBioHubFrontends; import edu.utah.ece.async.sboldesigner.swing.ComboBoxRenderer; import edu.utah.ece.async.sboldesigner.swing.FormBuilder; /** * * @author Evren Sirin */ public class RegistryInputDialog extends InputDialog<SBOLDocument> { private final ComboBoxRenderer<Registry> registryRenderer = new ComboBoxRenderer<Registry>() { @Override protected String getLabel(Registry registry) { StringBuilder sb = new StringBuilder(); if (registry != null) { sb.append(registry.getName()); if (!registry.getLocation().equals("N/A")) { sb.append(" ("); sb.append(CharSequenceUtil.shorten(registry.getLocation(), 30)); sb.append(")"); } } return sb.toString(); } @Override protected String getToolTip(Registry registry) { return registry == null ? "" : registry.getDescription(); } }; private final ComboBoxRenderer<IdentifiedMetadata> collectionsRenderer = new ComboBoxRenderer<IdentifiedMetadata>() { @Override protected String getLabel(IdentifiedMetadata collection) { if (collection != null) { return collection.getDisplayId(); } else { return "Unknown"; } } @Override protected String getToolTip(IdentifiedMetadata collection) { return collection == null ? "" : collection.getDescription(); } }; private static final String TITLE = "Select a part from registry"; public static final Part ALL_PARTS = new Part("All parts", "All"); // represents what part we should display in role selection private Part part; // represents the role of the template CD, could be used in roleRefinement private URI refinementRole; private JComboBox<Part> roleSelection; private JComboBox<String> roleRefinement; private ActionListener roleRefinementListener = new ActionListener() { @Override public void actionPerformed(ActionEvent event) { updateTable(); } }; private Types type; private SBOLDocument workingDoc; private JComboBox<Types> typeSelection; private JComboBox<IdentifiedMetadata> collectionSelection; private boolean updateCollection = true; private ActionListener collectionSelectionListener = new ActionListener() { @Override public void actionPerformed(ActionEvent event) { // only update collectionSelection when we aren't programmatically // modifying it in collectionSelectionListener if (updateCollection) { updateCollectionSelection(false, null); updateTable(); } } }; private static HashMap<Registry, ArrayList<IdentifiedMetadata>> collectionPaths = new HashMap<>(); private JTable table; private JLabel tableLabel; private JScrollPane scroller; final JTextField filterSelection = new JTextField(); /* * Determines whether the table should be refreshed when a user types in * filter text. This is true when the results from SynBioHubQuery exceeds * the QUERY_LIMIT. */ private boolean refreshSearch = false; /* * Stores the filter text that caused the current ArrayList<TableMetadata>. */ private String cacheKey = ""; private ComponentDefinitionBox root; private static SynBioHubFrontend synBioHub; private boolean allowCollectionSelection = false; private String objectType = "ComponentDefinition"; /** * Allows a collection to be selected. */ public void allowCollectionSelection() { allowCollectionSelection = true; } public void setObjectType(String objectType) { this.objectType = objectType; } /** * For when the working document is known and the root is not needed. */ public RegistryInputDialog(final Component parent, final Part part, Types type, URI refinementRole, SBOLDocument workingDoc) { super(parent, TITLE); this.workingDoc = workingDoc; setup(null, part, type, refinementRole); } /** * For when the working document is unknown and the root is not needed. */ public RegistryInputDialog(final Component parent, final Part part, Types type, URI refinementRole) { super(parent, TITLE); this.workingDoc = null; setup(null, part, type, refinementRole); } /** * For when the working document is known and preferences node shouldn't be * used */ public RegistryInputDialog(final Component parent, ComponentDefinitionBox root, final Part part, Types type, URI refinementRole, SBOLDocument workingDoc) { super(parent, TITLE); this.workingDoc = workingDoc; setup(root, part, type, refinementRole); } /** * For when the working document is unknown and preferences node should be * used */ public RegistryInputDialog(final Component parent, ComponentDefinitionBox root, final Part part, Types type, URI refinementRole) { super(parent, TITLE); this.workingDoc = null; setup(root, part, type, refinementRole); } /** * root, if not null, will reference the root CD that was selected. */ private void setup(ComponentDefinitionBox root, final Part part, Types type, URI refinementRole) { this.root = root; this.part = part; this.refinementRole = refinementRole; this.type = type; Registries registries = Registries.get(); int selectedRegistry = registries.getVersionRegistryIndex(); registrySelection = new JComboBox<Registry>(Iterables.toArray(registries, Registry.class)); if (registries.size() > 0) { registrySelection.setSelectedIndex(selectedRegistry); } registrySelection.addActionListener(actionListener); registrySelection.setRenderer(registryRenderer); builder.add("Registry", registrySelection); if (registries.size() == 0) { JOptionPane.showMessageDialog(this, "No parts registries are defined.\nPlease click 'Options' and add a parts registry."); location = null; uriPrefix = null; } else { location = registries.get(selectedRegistry).getLocation(); uriPrefix = registries.get(selectedRegistry).getUriPrefix(); } } @Override public void initFormPanel(FormBuilder builder) { // set up type selection typeSelection = new JComboBox<Types>(Types.values()); typeSelection.setSelectedItem(type); typeSelection.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { updateTable(); updateContext(); } }); if (objectType == "ComponentDefinition" || objectType == "Variant") { builder.add("Part type", typeSelection); } // set up collection selection collectionSelection = new JComboBox<IdentifiedMetadata>(); collectionSelection.setRenderer(collectionsRenderer); updateCollectionSelection(true, null); collectionSelection.addActionListener(collectionSelectionListener); builder.add("Collection", collectionSelection); // set up role selection List<Part> parts = Lists.newArrayList(Parts.sorted()); parts.add(0, ALL_PARTS); roleSelection = new JComboBox<Part>(parts.toArray(new Part[0])); roleSelection.setRenderer(new PartCellRenderer()); roleSelection.setSelectedItem(part); roleSelection.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent event) { part = (Part) roleSelection.getSelectedItem(); updateRoleRefinement(); updateTable(); } }); if (objectType == "ComponentDefinition" || objectType == "Variant") { builder.add("Part role", roleSelection); } // set up the JComboBox for role refinement roleRefinement = new JComboBox<String>(); updateRoleRefinement(); roleRefinement.removeActionListener(roleRefinementListener); if (refinementRole != null && refinementRole != part.getRole()) { String roleName = new SequenceOntology().getName(refinementRole); if (!comboBoxContains(roleRefinement, roleName)) { roleRefinement.addItem(roleName); } roleRefinement.setSelectedItem(roleName); } roleRefinement.addActionListener(roleRefinementListener); if (objectType == "ComponentDefinition" || objectType == "Variant") { builder.add("Role refinement", roleRefinement); } updateContext(); // set up the filter filterSelection.getDocument().addDocumentListener(new DocumentListener() { @Override public void removeUpdate(DocumentEvent paramDocumentEvent) { searchOrFilterTable(); } @Override public void insertUpdate(DocumentEvent paramDocumentEvent) { searchOrFilterTable(); } @Override public void changedUpdate(DocumentEvent paramDocumentEvent) { searchOrFilterTable(); } private void searchOrFilterTable() { /* * System.out.println(); * System.out.println("searchOrFilterTable"); * System.out.println("refreshSearch: " + refreshSearch); * System.out.println("cacheKey: " + cacheKey); * System.out.println("filter: " + filterSelection.getText()); */ if ((refreshSearch || filterSelection.getText().equals("") || !filterSelection.getText().contains(cacheKey)) && isMetadata()) { searchParts(part, synBioHub, filterSelection.getText()); } else { updateFilter(filterSelection.getText()); } } }); builder.add("Filter parts", filterSelection); } /** * Returns whether box contains s */ private boolean comboBoxContains(JComboBox<String> box, String s) { for (int i = 0; i < box.getItemCount(); i++) { if (s != null && s.equals(roleRefinement.getItemAt(i))) { return true; } } return false; } @Override protected JPanel initMainPanel() { JPanel panel; Part part = null; if (roleSelection.isEnabled() && roleRefinement.isEnabled()) { String roleName = (String) roleRefinement.getSelectedItem(); if (roleName == null || roleName.equals("None")) { part = (Part) roleSelection.getSelectedItem(); } else { SequenceOntology so = new SequenceOntology(); URI role = so.getURIbyName(roleName); part = new Part(role, null, null); } } else { part = ALL_PARTS; } if (isMetadata()) { searchParts(part, synBioHub, filterSelection.getText()); TableMetadataTableModel tableModel = new TableMetadataTableModel(new ArrayList<TableMetadata>()); panel = createTablePanel(tableModel, "Matching parts (" + tableModel.getRowCount() + ")"); } else if(objectType == "Variant"){ List<TopLevel> topLevels = searchForPotentialVariants(part); TopLevelTableModel model = new TopLevelTableModel(topLevels); panel = createTablePanel(model, "Matching parts (" + model.getRowCount() + ")"); }else { List<ComponentDefinition> components = searchParts(part); ComponentDefinitionTableModel tableModel = new ComponentDefinitionTableModel(components); panel = createTablePanel(tableModel, "Matching parts (" + tableModel.getRowCount() + ")"); } table = (JTable) panel.getClientProperty("table"); tableLabel = (JLabel) panel.getClientProperty("label"); scroller = (JScrollPane) panel.getClientProperty("scroller"); return panel; } /** * Checks to see if the registry we are working on is represented by * IdentifiedMetadata. */ private boolean isMetadata() { return location.startsWith("http://") || location.startsWith("https://"); } private List<TopLevel> searchForPotentialVariants(Part part) { try { if (isMetadata()) { throw new Exception("Incorrect state. url isn't a path"); } if (part.equals(ALL_PARTS)) { part = null; } SBOLReader.setURIPrefix(SBOLEditorPreferences.INSTANCE.getUserInfo().getURI().toString()); SBOLReader.setCompliant(true); SBOLDocument doc; Registry registry = (Registry) registrySelection.getSelectedItem(); if (registry.equals(Registry.BUILT_IN)) { // read from BuiltInParts.xml doc = SBOLReader.read(Registry.class.getResourceAsStream("/BuiltInParts.xml")); } else if (registry.equals(Registry.WORKING_DOCUMENT)) { if (workingDoc != null) { // workingDoc is specified, so use that doc = workingDoc; } else { // read from SBOLUtils.setupFile(); File file = SBOLUtils.setupFile(); if (file.exists()) { doc = SBOLReader.read(file); } else { // JOptionPane.showMessageDialog(null, "The working // document could not be found on disk. Try opening the // file again."); return new ArrayList<TopLevel>(); } } } else { // read from the location (path) doc = SBOLReader.read(location); } doc.setDefaultURIprefix(SBOLEditorPreferences.INSTANCE.getUserInfo().getURI().toString()); return SBOLUtils.getCDCollectionsAndComboDerv(doc, part); } catch (Exception e) { e.printStackTrace(); MessageDialog.showMessage(null, "Getting the SBOLDocument from path failed: ", e.getMessage()); Registries registries = Registries.get(); registries.setVersionRegistryIndex(0); registries.save(); return null; } } /** * Gets the SBOLDocument from the path (file on disk) and returns all its * CDs. */ private List<ComponentDefinition> searchParts(Part part) { try { if (isMetadata()) { throw new Exception("Incorrect state. url isn't a path"); } if (part.equals(ALL_PARTS)) { part = null; } SBOLReader.setURIPrefix(SBOLEditorPreferences.INSTANCE.getUserInfo().getURI().toString()); SBOLReader.setCompliant(true); SBOLDocument doc; Registry registry = (Registry) registrySelection.getSelectedItem(); if (registry.equals(Registry.BUILT_IN)) { // read from BuiltInParts.xml doc = SBOLReader.read(Registry.class.getResourceAsStream("/BuiltInParts.xml")); } else if (registry.equals(Registry.WORKING_DOCUMENT)) { if (workingDoc != null) { // workingDoc is specified, so use that doc = workingDoc; } else { // read from SBOLUtils.setupFile(); File file = SBOLUtils.setupFile(); if (file.exists()) { doc = SBOLReader.read(file); } else { // JOptionPane.showMessageDialog(null, "The working // document could not be found on disk. Try opening the // file again."); return new ArrayList<ComponentDefinition>(); } } } else { // read from the location (path) doc = SBOLReader.read(location); } doc.setDefaultURIprefix(SBOLEditorPreferences.INSTANCE.getUserInfo().getURI().toString()); return SBOLUtils.getCDOfRole(doc, part); } catch (Exception e) { e.printStackTrace(); MessageDialog.showMessage(null, "Getting the SBOLDocument from path failed: ", e.getMessage()); Registries registries = Registries.get(); registries.setVersionRegistryIndex(0); registries.save(); return null; } } /** * Queries SynBioHub for CDs matching the role(s), type(s), and * collection(s) of the part. Also filters by the filterText. */ private void searchParts(Part part, SynBioHubFrontend synbiohub, String filterText) { try { if (!isMetadata()) { throw new Exception("Incorrect state. url is a path"); } if (synbiohub == null) { synbiohub = createSynBioHubFrontend(location, uriPrefix); } if (part != null) { // create the query IdentifiedMetadata selectedCollection = (IdentifiedMetadata) collectionSelection.getSelectedItem(); if (selectedCollection == null) { return; } Set<URI> setCollections = new HashSet<URI>(Arrays.asList(URI.create(selectedCollection.getUri()))); Set<URI> setRoles = new HashSet<URI>(part.getRoles()); Set<URI> setTypes = SBOLUtils.convertTypesToSet((Types) typeSelection.getSelectedItem()); String type = objectType; if(type == "Variant") type = "ComponentDefinition"; SynBioHubQuery query = new SynBioHubQuery(synbiohub, setRoles, setTypes, setCollections, filterText, type, new TableUpdater(), this); // non-blocking: will update using the TableUpdater query.execute(); } } catch (Exception e) { e.printStackTrace(); JOptionPane.showMessageDialog(null, "Querying this repository failed: " + e.getMessage() + "\n" + " Internet connection is required for importing from SynBioHub. Setting default registry to built-in parts, which doesn't require an internet connection."); Registries registries = Registries.get(); registries.setVersionRegistryIndex(0); registries.save(); } } public URI getSelectedURI() { TopLevel comp = null; int row = table.convertRowIndexToModel(table.getSelectedRow()); if (isMetadata()) { TableMetadata compMeta = ((TableMetadataTableModel) table.getModel()).getElement(row); return URI.create(compMeta.identified.getUri()); } else { comp = ((TopLevelTableModel) table.getModel()).getElement(row); return comp.getIdentity(); } } @Override protected SBOLDocument getSelection() { try { SBOLDocument document = null; TopLevel comp = null; int row = table.convertRowIndexToModel(table.getSelectedRow()); if (isMetadata()) { TableMetadata compMeta = ((TableMetadataTableModel) table.getModel()).getElement(row); if (synBioHub == null) { System.out.print(uriPrefix); synBioHub = createSynBioHubFrontend(location, uriPrefix); } if (compMeta.isCollection) { if(!allowCollectionSelection) { JOptionPane.showMessageDialog(getParent(), "Selecting collections is not allowed"); return new SBOLDocument(); }else { document = synBioHub.getSBOL(URI.create(compMeta.identified.getUri())); return document; } } if(!compMeta.identified.getUri().toString().startsWith(uriPrefix)) { Registries regs = Registries.get(); for(Registry reg : regs){ if(compMeta.identified.getUri().toString().startsWith(reg.getUriPrefix())) { synBioHub = createSynBioHubFrontend(reg.getLocation(), reg.getUriPrefix()); break; } } } document = synBioHub.getSBOL(URI.create(compMeta.identified.getUri())); comp = document.getComponentDefinition(URI.create(compMeta.identified.getUri())); if (comp == null) { // if cannot find it then return root component definition // from document for (ComponentDefinition cd : document.getRootComponentDefinitions()) { comp = cd; } } } else { document = new SBOLDocument(); comp = ((TopLevelTableModel) table.getModel()).getElement(row); document = document.createRecursiveCopy(comp); } if (root != null) { root.cd = document.getComponentDefinition(comp.getIdentity()); } return document; } catch (SBOLValidationException | SynBioHubException e) { e.printStackTrace(); MessageDialog.showMessage(null, "Getting this selection failed: ", e.getMessage()); return null; } } @Override protected void registryChanged() { if (isMetadata()) { synBioHub = createSynBioHubFrontend(location, uriPrefix); } loginButton.setEnabled(isMetadata()); updateCollectionSelection(true, null); updateTable(); } private void updateCollectionSelection(boolean registryChanged, IdentifiedMetadata newCollection) { collectionSelection.setEnabled(isMetadata()); if (!isMetadata()) { return; } if (synBioHub == null) { synBioHub = createSynBioHubFrontend(location, uriPrefix); } Registry registry = (Registry) registrySelection.getSelectedItem(); updateCollection = false; if (registryChanged) { // display only "rootCollections" IdentifiedMetadata allCollections = new IdentifiedMetadata(); allCollections.setName("All Collections"); allCollections.setDisplayId("All Collections"); allCollections.setUri("http://AllCollections"); IdentifiedMetadata rootCollections = new IdentifiedMetadata(); rootCollections.setName("Root Collections"); rootCollections.setDisplayId("Root Collections"); rootCollections.setUri("http://RootCollections"); collectionSelection.removeAllItems(); collectionSelection.addItem(allCollections); collectionSelection.addItem(rootCollections); collectionSelection.setSelectedItem(rootCollections); // restore/create cached collection path if (collectionPaths.containsKey(registry)) { for (IdentifiedMetadata collection : collectionPaths.get(registry)) { collectionSelection.addItem(collection); collectionSelection.setSelectedItem(collection); } } else { collectionPaths.put(registry, new ArrayList<>()); } } else { // clicked on different collection if (newCollection != null) { collectionSelection.addItem(newCollection); collectionSelection.setSelectedItem(newCollection); collectionPaths.get(registry).add(newCollection); } else { int stackMod = 1; if(collectionSelection.getSelectedIndex() == 0) { stackMod = 2; } while (collectionSelection.getSelectedIndex() + stackMod < collectionSelection.getItemCount()) { collectionSelection.removeItemAt(collectionSelection.getSelectedIndex() + stackMod); // TODO: new check if (collectionSelection.getSelectedIndex() < collectionPaths.get(registry).size()) { collectionPaths.get(registry).remove(collectionSelection.getSelectedIndex()); } } } } updateCollection = true; } private void updateRoleRefinement() { roleRefinement.removeActionListener(roleRefinementListener); roleRefinement.removeAllItems(); for (String s : SBOLUtils.createRefinements((Part) roleSelection.getSelectedItem())) { roleRefinement.addItem(s); } roleRefinement.addActionListener(roleRefinementListener); } private void updateContext() { boolean enableRoles = typeSelection.getSelectedItem() == Types.DNA || typeSelection.getSelectedItem() == Types.RNA; roleSelection.setEnabled(enableRoles); roleRefinement.setEnabled(enableRoles); } public void updateTable() { // create the part criteria Part part = null; if (roleSelection.isEnabled() && roleRefinement.isEnabled()) { String roleName = (String) roleRefinement.getSelectedItem(); if (roleName == null || roleName.equals("None")) { part = (Part) roleSelection.getSelectedItem(); } else { SequenceOntology so = new SequenceOntology(); URI role = so.getURIbyName(roleName); part = new Part(role, null, null); } } else { part = ALL_PARTS; } if (isMetadata()) { searchParts(part, synBioHub, filterSelection.getText()); } else if(objectType == "Variant"){ List<TopLevel> topLevels = searchForPotentialVariants(part); //topLevels = SBOLUtils.getTopLevelOfType(topLevels, (Types) typeSelection.getSelectedItem()); TopLevelTableModel tableModel = new TopLevelTableModel(topLevels); table = new JTable(tableModel); tableLabel.setText("Matching parts (" + topLevels.size() + ")"); refreshSearch = false; TableRowSorter<TableModel> sorter = new TableRowSorter<TableModel>(tableModel); table.setRowSorter(sorter); setWidthAsPercentages(table, tableModel.getWidths()); }else { List<ComponentDefinition> components = searchParts(part); components = SBOLUtils.getCDOfType(components, (Types) typeSelection.getSelectedItem()); ComponentDefinitionTableModel tableModel = new ComponentDefinitionTableModel(components); table = new JTable(tableModel); tableLabel.setText("Matching parts (" + components.size() + ")"); refreshSearch = false; TableRowSorter<TableModel> sorter = new TableRowSorter<TableModel>(tableModel); table.setRowSorter(sorter); setWidthAsPercentages(table, tableModel.getWidths()); } table.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); table.getSelectionModel().addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent event) { setSelectAllowed(table.getSelectedRow() >= 0); } }); table.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { if (e.getClickCount() == 2 && table.getSelectedRow() >= 0) { handleTableSelection(false); } } }); scroller.setViewportView(table); } @Override protected void handleTableSelection(boolean select) { // handle collection selected if (isMetadata()) { int row = table.convertRowIndexToModel(table.getSelectedRow()); TableMetadata meta = ((TableMetadataTableModel) table.getModel()).getElement(row); if (meta.isCollection && (!select || !allowCollectionSelection)) { updateCollectionSelection(false, meta.identified); updateTable(); return; } } // otherwise a part was selected canceled = false; setVisible(false); } private void updateFilter(String filterText) { filterText = "(?i)" + filterText; if (isMetadata()) { TableRowSorter<TableMetadataTableModel> sorter = (TableRowSorter) table.getRowSorter(); if (filterText.length() == 0) { sorter.setRowFilter(null); } else { try { RowFilter<TableMetadataTableModel, Object> rf = RowFilter.regexFilter(filterText, 0, 1, 2, 4); sorter.setRowFilter(rf); } catch (PatternSyntaxException e) { sorter.setRowFilter(null); } } tableLabel.setText("Matching parts (" + sorter.getViewRowCount() + ")"); } else { TableRowSorter<TopLevelTableModel> sorter = (TableRowSorter) table.getRowSorter(); if (filterText.length() == 0) { sorter.setRowFilter(null); } else { try { RowFilter<TopLevelTableModel, Object> rf = RowFilter.regexFilter(filterText, 0, 1, 2, 4); sorter.setRowFilter(rf); } catch (PatternSyntaxException e) { sorter.setRowFilter(null); } } tableLabel.setText("Matching parts (" + sorter.getViewRowCount() + ")"); } } /** * Updates the table using the provided components. This lets the * SBOLStackQuery thread update the table. */ public class TableUpdater { public void updateTable(ArrayList<TableMetadata> identified, String filterText) { if (!filterSelection.getText().equals(filterText)) { // don't update if the filterSelection text has changed. return; } TableMetadataTableModel tableModel = new TableMetadataTableModel(identified); table = new JTable(tableModel); tableLabel.setText("Matching parts (" + identified.size() + ")"); refreshSearch = identified.size() >= SynBioHubQuery.QUERY_LIMIT; if (filterText != null && !refreshSearch) { cacheKey = filterText; } /* * System.out.println(); System.out.println("TableUpdater"); * System.out.println("refreshSearch: " + refreshSearch); * System.out.println("cacheKey: " + cacheKey); System.out.println( * "filter: " + filterSelection.getText()); */ TableRowSorter<TableModel> sorter = new TableRowSorter<TableModel>(tableModel); table.setRowSorter(sorter); setWidthAsPercentages(table, tableModel.getWidths()); table.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); table.getSelectionModel().addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent event) { setSelectAllowed(table.getSelectedRow() >= 0); } }); table.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent e) { if (e.getClickCount() == 2 && table.getSelectedRow() >= 0) { handleTableSelection(false); } } }); scroller.setViewportView(table); } } /** * Wraps SynBioHubFrontend creation so legacy locations can be used. */ private SynBioHubFrontend createSynBioHubFrontend(String location, String uriPrefix) { // update location and SynBioHub location if not using https if (location == "http://synbiohub.org") { location = "https://synbiohub.org"; // This isn't elegant, but should work ArrayList<Registry> oldRegistries = new ArrayList<Registry>(); for (int i = 3; i < Registries.get().size(); i++) { oldRegistries.add(Registries.get().get(i)); } Registries.get().restoreDefaults(); for (Registry r : oldRegistries) { Registries.get().add(r); } Registries.get().save(); } // get logged in SynBioHubFrontend if possible SynBioHubFrontends frontends = new SynBioHubFrontends(); if (frontends.hasFrontend(location)) { return frontends.getFrontend(location); } return new SynBioHubFrontend(location, uriPrefix); } }
package battleship.gui.client; import java.awt.Color; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.event.MouseMotionListener; import java.util.Iterator; import javax.swing.JPanel; import battleship.client.HitMissMap; import battleship.common.Ship; import battleship.common.ShipConfiguration; import battleship.gui.client.gamemodes.GameMode; /** * Displays and manages the two game boards. The first game board shown is * the target map, and the second is the fleet map. * */ public class BoardPanel extends JPanel { private static class GridCoord { public int x, y; public GridCoord(int x, int y) { this.x = x; this.y = y; } public boolean equals(Object obj) { if (obj == null) { return false; } else { GridCoord b = (GridCoord)obj; return x == b.x && y == b.y; } } } private class Mouse implements MouseListener { @Override public void mouseReleased(MouseEvent e) { GridCoord tc, fc; tc = mouseToGridCoord(e.getX(), e.getY(), target_tr); fc = mouseToGridCoord(e.getX(), e.getY(), fleet_tr); boolean primary = e.getButton() == MouseEvent.BUTTON1; if (tc != null) { gameMode.targetBoxClick(tc.x, tc.y, primary); } if (fc != null) { gameMode.fleetBoxClick(fc.x, fc.y, primary); } } @Override public void mouseExited(MouseEvent e) { if (BoardPanel.this.target_coord != null) { gameMode.targetBoxOut(); BoardPanel.this.target_coord = null; } if (BoardPanel.this.fleet_coord != null) { gameMode.fleetBoxOut(); BoardPanel.this.fleet_coord = null; } } @Override public void mouseClicked(MouseEvent e) {} @Override public void mousePressed(MouseEvent e) {} @Override public void mouseEntered(MouseEvent e) {} } private class MouseMotion implements MouseMotionListener { @Override public void mouseMoved(MouseEvent e) { GridCoord tc, fc; tc = mouseToGridCoord(e.getX(), e.getY(), target_tr); fc = mouseToGridCoord(e.getX(), e.getY(), fleet_tr); if (tc == null) { if (BoardPanel.this.target_coord != null) { gameMode.targetBoxOut(); } } else if (!tc.equals(BoardPanel.this.target_coord)) { gameMode.targetBoxHover(tc.x, tc.y); } if (fc == null) { if (BoardPanel.this.fleet_coord != null) { gameMode.fleetBoxOut(); } } else if (!fc.equals(BoardPanel.this.fleet_coord)) { gameMode.fleetBoxHover(fc.x, fc.y); } BoardPanel.this.target_coord = tc; BoardPanel.this.fleet_coord = fc; } @Override public void mouseDragged(MouseEvent e) {} } private static final long serialVersionUID = 1L; private final GameMode NO_GAMEMODE = new GameMode() { @Override public void targetBoxOut() {} @Override public void targetBoxHover(int x, int y) {} @Override public void targetBoxClick(int x, int y, boolean primary) {} @Override public void fleetBoxOut() {} @Override public void fleetBoxHover(int x, int y) {} @Override public void fleetBoxClick(int x, int y, boolean primary) {} @Override public void draw(Graphics2D g2d, BoardPanelDraw target, BoardPanelDraw fleet) {} }; private BoardTransform target_tr, fleet_tr; private ShipConfiguration shipConfiguration; private HitMissMap targetHitMiss, fleetHitMiss; private GameMode gameMode; private int grid_columns; private int grid_rows; private GridCoord target_coord, fleet_coord; public BoardPanel(ShipConfiguration shipConfiguration, HitMissMap targetHitMiss, HitMissMap fleetHitMiss) { this.shipConfiguration = shipConfiguration; this.targetHitMiss = targetHitMiss; this.fleetHitMiss = fleetHitMiss; this.gameMode = NO_GAMEMODE; this.grid_columns = shipConfiguration.getColumns(); this.grid_rows = shipConfiguration.getRows(); calculateTransforms(getWidth(), getHeight()); addMouseListener(new Mouse()); addMouseMotionListener(new MouseMotion()); } public void setGameMode(GameMode gameMode) { this.gameMode = gameMode; repaint(); } public void clearGameMode() { this.gameMode = NO_GAMEMODE; repaint(); } @Override public void paintComponent(Graphics g) { Graphics2D g2d = DrawUtils.getGraphics2D(g); super.paintComponent(g); calculateTransforms(getWidth(), getHeight()); BoardPanelDraw drawTarget, drawFleet; drawTarget = new BoardPanelDraw(g2d, target_tr, grid_columns, grid_rows, false); drawFleet = new BoardPanelDraw(g2d, fleet_tr, grid_columns, grid_rows, true); drawTarget.grid(); drawFleet.grid(); g2d.setColor(new Color(64, 64, 128)); for (Ship s: shipConfiguration.getShips()) { drawFleet.ship(s.getX(), s.getY(), s.getLength(), s.isHorizontal()); } drawHitMiss(targetHitMiss, g2d, drawTarget); drawHitMiss(fleetHitMiss, g2d, drawFleet); gameMode.draw(g2d, drawTarget, drawFleet); } public GridCoord mouseToGridCoord(int m_x, int m_y, BoardTransform tr) { BoardTransform.Coord c = tr.transformInverse(m_x, m_y); int x = (int)Math.floor(c.x*grid_columns); int y = (int)Math.floor(c.y*grid_rows); boolean boxHit = x >= 0 && y >= 0 && x < grid_columns && y < grid_rows; if (boxHit) { return new GridCoord(x, y); } else { return null; } } private void drawHitMiss(HitMissMap hitMissMap, Graphics2D g2d,BoardPanelDraw drawTarget) { int columns = shipConfiguration.getColumns(); int x = 0; int y = 0; Iterator<Byte> hisMissIter; for (hisMissIter = hitMissMap.iterator(); hisMissIter.hasNext(); ) { byte state = hisMissIter.next(); Color color = null; switch (state) { case HitMissMap.HIT: color = new Color(255, 0, 0, 192); break; case HitMissMap.MISS: color = new Color(0, 0, 0, 192); break; } if (color != null) { g2d.setColor(color); drawTarget.square(x, y, 0); } x++; if (x >= columns) { x = 0; y++; } } assert(x == 0); assert(y == shipConfiguration.getRows()); } private void calculateTransforms(int width, int height) { double box_w = 500; double box_h = 580; double scale = height/box_h; double x_off = (width - box_w*scale)/2; double y_off = 0.0; double[] target_arr = {80, 420, 100, 400, 10, 300}; double[] fleet_arr = {120, 380, 10, 490, 330, 570}; int i; for (i = 0; i < 4; i++) { target_arr[i] = target_arr[i]*scale + x_off; fleet_arr[i] = fleet_arr[i]*scale + x_off; } for (; i < 6; i++) { target_arr[i] = target_arr[i]*scale + y_off; fleet_arr[i] = fleet_arr[i]*scale + y_off; } target_tr = new BoardTransform(target_arr[0], target_arr[1], target_arr[2], target_arr[3], target_arr[4], target_arr[5]); fleet_tr = new BoardTransform(fleet_arr[0], fleet_arr[1], fleet_arr[2], fleet_arr[3], fleet_arr[4], fleet_arr[5]); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainAction; import org.elasticsearch.action.admin.cluster.allocation.TransportClusterAllocationExplainAction; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsAction; import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.TransportGetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.delete.TransportDeleteRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesAction; import org.elasticsearch.action.admin.cluster.repositories.get.TransportGetRepositoriesAction; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.put.TransportPutRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.verify.TransportVerifyRepositoryAction; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryAction; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.settings.TransportClusterUpdateSettingsAction; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.shards.TransportClusterSearchShardsAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.create.TransportCreateSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.delete.TransportDeleteSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsAction; import org.elasticsearch.action.admin.cluster.snapshots.get.TransportGetSnapshotsAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.restore.TransportRestoreSnapshotAction; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusAction; import org.elasticsearch.action.admin.cluster.snapshots.status.TransportSnapshotsStatusAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.admin.cluster.state.TransportClusterStateAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportGetStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportPutStoredScriptAction; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksAction; import org.elasticsearch.action.admin.cluster.tasks.TransportPendingClusterTasksAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistAction; import org.elasticsearch.action.admin.indices.alias.exists.TransportAliasesExistAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.TransportGetAliasesAction; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.TransportClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.close.CloseIndexAction; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsAction; import org.elasticsearch.action.admin.indices.exists.indices.TransportIndicesExistsAction; import org.elasticsearch.action.admin.indices.exists.types.TransportTypesExistsAction; import org.elasticsearch.action.admin.indices.exists.types.TypesExistsAction; import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.SyncedFlushAction; import org.elasticsearch.action.admin.indices.flush.TransportFlushAction; import org.elasticsearch.action.admin.indices.flush.TransportSyncedFlushAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.forcemerge.TransportForceMergeAction; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.TransportGetIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.TransportGetFieldMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.TransportGetFieldMappingsIndexAction; import org.elasticsearch.action.admin.indices.mapping.get.TransportGetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.admin.indices.open.OpenIndexAction; import org.elasticsearch.action.admin.indices.open.TransportOpenIndexAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryAction; import org.elasticsearch.action.admin.indices.recovery.TransportRecoveryAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.TransportRefreshAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.rollover.TransportRolloverAction; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsAction; import org.elasticsearch.action.admin.indices.segments.TransportIndicesSegmentsAction; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction; import org.elasticsearch.action.admin.indices.settings.get.TransportGetSettingsAction; import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresAction; import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction; import org.elasticsearch.action.admin.indices.shrink.ShrinkAction; import org.elasticsearch.action.admin.indices.shrink.TransportShrinkAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; import org.elasticsearch.action.admin.indices.template.get.TransportGetIndexTemplatesAction; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; import org.elasticsearch.action.admin.indices.template.put.TransportPutIndexTemplateAction; import org.elasticsearch.action.admin.indices.upgrade.get.TransportUpgradeStatusAction; import org.elasticsearch.action.admin.indices.upgrade.get.UpgradeStatusAction; import org.elasticsearch.action.admin.indices.upgrade.post.TransportUpgradeAction; import org.elasticsearch.action.admin.indices.upgrade.post.TransportUpgradeSettingsAction; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeSettingsAction; import org.elasticsearch.action.admin.indices.validate.query.TransportValidateQueryAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.TransportBulkAction; import org.elasticsearch.action.bulk.TransportShardBulkAction; import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.explain.ExplainAction; import org.elasticsearch.action.explain.TransportExplainAction; import org.elasticsearch.action.fieldstats.FieldStatsAction; import org.elasticsearch.action.fieldstats.TransportFieldStatsAction; import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.MultiGetAction; import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.action.get.TransportMultiGetAction; import org.elasticsearch.action.get.TransportShardMultiGetAction; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.ingest.DeletePipelineAction; import org.elasticsearch.action.ingest.DeletePipelineTransportAction; import org.elasticsearch.action.ingest.GetPipelineAction; import org.elasticsearch.action.ingest.GetPipelineTransportAction; import org.elasticsearch.action.ingest.PutPipelineAction; import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.action.ingest.SimulatePipelineAction; import org.elasticsearch.action.ingest.SimulatePipelineTransportAction; import org.elasticsearch.action.main.MainAction; import org.elasticsearch.action.main.TransportMainAction; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.MultiSearchAction; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchScrollAction; import org.elasticsearch.action.search.TransportClearScrollAction; import org.elasticsearch.action.search.TransportMultiSearchAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.search.TransportSearchScrollAction; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.termvectors.MultiTermVectorsAction; import org.elasticsearch.action.termvectors.TermVectorsAction; import org.elasticsearch.action.termvectors.TransportMultiTermVectorsAction; import org.elasticsearch.action.termvectors.TransportShardMultiTermsVectorAction; import org.elasticsearch.action.termvectors.TransportTermVectorsAction; import org.elasticsearch.action.update.TransportUpdateAction; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.NamedRegistry; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.MapBinder; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ActionPlugin.ActionHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.action.RestFieldStatsAction; import org.elasticsearch.rest.action.RestMainAction; import org.elasticsearch.rest.action.admin.cluster.RestCancelTasksAction; import org.elasticsearch.rest.action.admin.cluster.RestClusterAllocationExplainAction; import org.elasticsearch.rest.action.admin.cluster.RestClusterGetSettingsAction; import org.elasticsearch.rest.action.admin.cluster.RestClusterHealthAction; import org.elasticsearch.rest.action.admin.cluster.RestClusterRerouteAction; import org.elasticsearch.rest.action.admin.cluster.RestClusterSearchShardsAction; import org.elasticsearch.rest.action.admin.cluster.RestClusterStateAction; import org.elasticsearch.rest.action.admin.cluster.RestClusterStatsAction; import org.elasticsearch.rest.action.admin.cluster.RestClusterUpdateSettingsAction; import org.elasticsearch.rest.action.admin.cluster.RestCreateSnapshotAction; import org.elasticsearch.rest.action.admin.cluster.RestDeleteRepositoryAction; import org.elasticsearch.rest.action.admin.cluster.RestDeleteSnapshotAction; import org.elasticsearch.rest.action.admin.cluster.RestDeleteStoredScriptAction; import org.elasticsearch.rest.action.admin.cluster.RestGetRepositoriesAction; import org.elasticsearch.rest.action.admin.cluster.RestGetSnapshotsAction; import org.elasticsearch.rest.action.admin.cluster.RestGetStoredScriptAction; import org.elasticsearch.rest.action.admin.cluster.RestGetTaskAction; import org.elasticsearch.rest.action.admin.cluster.RestListTasksAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesHotThreadsAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesInfoAction; import org.elasticsearch.rest.action.admin.cluster.RestNodesStatsAction; import org.elasticsearch.rest.action.admin.cluster.RestPendingClusterTasksAction; import org.elasticsearch.rest.action.admin.cluster.RestPutRepositoryAction; import org.elasticsearch.rest.action.admin.cluster.RestPutStoredScriptAction; import org.elasticsearch.rest.action.admin.cluster.RestRestoreSnapshotAction; import org.elasticsearch.rest.action.admin.cluster.RestSnapshotsStatusAction; import org.elasticsearch.rest.action.admin.cluster.RestVerifyRepositoryAction; import org.elasticsearch.rest.action.admin.indices.RestAnalyzeAction; import org.elasticsearch.rest.action.admin.indices.RestClearIndicesCacheAction; import org.elasticsearch.rest.action.admin.indices.RestCloseIndexAction; import org.elasticsearch.rest.action.admin.indices.RestCreateIndexAction; import org.elasticsearch.rest.action.admin.indices.RestDeleteIndexAction; import org.elasticsearch.rest.action.admin.indices.RestDeleteIndexTemplateAction; import org.elasticsearch.rest.action.admin.indices.RestFlushAction; import org.elasticsearch.rest.action.admin.indices.RestForceMergeAction; import org.elasticsearch.rest.action.admin.indices.RestGetAliasesAction; import org.elasticsearch.rest.action.admin.indices.RestGetFieldMappingAction; import org.elasticsearch.rest.action.admin.indices.RestGetIndexTemplateAction; import org.elasticsearch.rest.action.admin.indices.RestGetIndicesAction; import org.elasticsearch.rest.action.admin.indices.RestGetMappingAction; import org.elasticsearch.rest.action.admin.indices.RestGetSettingsAction; import org.elasticsearch.rest.action.admin.indices.RestIndexDeleteAliasesAction; import org.elasticsearch.rest.action.admin.indices.RestIndexPutAliasAction; import org.elasticsearch.rest.action.admin.indices.RestIndicesAliasesAction; import org.elasticsearch.rest.action.admin.indices.RestIndicesSegmentsAction; import org.elasticsearch.rest.action.admin.indices.RestIndicesShardStoresAction; import org.elasticsearch.rest.action.admin.indices.RestIndicesStatsAction; import org.elasticsearch.rest.action.admin.indices.RestOpenIndexAction; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; import org.elasticsearch.rest.action.admin.indices.RestPutMappingAction; import org.elasticsearch.rest.action.admin.indices.RestRecoveryAction; import org.elasticsearch.rest.action.admin.indices.RestRefreshAction; import org.elasticsearch.rest.action.admin.indices.RestRolloverIndexAction; import org.elasticsearch.rest.action.admin.indices.RestShrinkIndexAction; import org.elasticsearch.rest.action.admin.indices.RestSyncedFlushAction; import org.elasticsearch.rest.action.admin.indices.RestTypesExistsAction; import org.elasticsearch.rest.action.admin.indices.RestUpdateSettingsAction; import org.elasticsearch.rest.action.admin.indices.RestUpgradeAction; import org.elasticsearch.rest.action.admin.indices.RestValidateQueryAction; import org.elasticsearch.rest.action.cat.AbstractCatAction; import org.elasticsearch.rest.action.cat.RestAliasAction; import org.elasticsearch.rest.action.cat.RestAllocationAction; import org.elasticsearch.rest.action.cat.RestCatAction; import org.elasticsearch.rest.action.cat.RestFielddataAction; import org.elasticsearch.rest.action.cat.RestHealthAction; import org.elasticsearch.rest.action.cat.RestIndicesAction; import org.elasticsearch.rest.action.cat.RestMasterAction; import org.elasticsearch.rest.action.cat.RestNodeAttrsAction; import org.elasticsearch.rest.action.cat.RestNodesAction; import org.elasticsearch.rest.action.cat.RestPluginsAction; import org.elasticsearch.rest.action.cat.RestRepositoriesAction; import org.elasticsearch.rest.action.cat.RestSegmentsAction; import org.elasticsearch.rest.action.cat.RestShardsAction; import org.elasticsearch.rest.action.cat.RestSnapshotAction; import org.elasticsearch.rest.action.cat.RestTasksAction; import org.elasticsearch.rest.action.cat.RestTemplatesAction; import org.elasticsearch.rest.action.cat.RestThreadPoolAction; import org.elasticsearch.rest.action.document.RestBulkAction; import org.elasticsearch.rest.action.document.RestDeleteAction; import org.elasticsearch.rest.action.document.RestGetAction; import org.elasticsearch.rest.action.document.RestGetSourceAction; import org.elasticsearch.rest.action.document.RestIndexAction; import org.elasticsearch.rest.action.document.RestMultiGetAction; import org.elasticsearch.rest.action.document.RestMultiTermVectorsAction; import org.elasticsearch.rest.action.document.RestTermVectorsAction; import org.elasticsearch.rest.action.document.RestUpdateAction; import org.elasticsearch.rest.action.ingest.RestDeletePipelineAction; import org.elasticsearch.rest.action.ingest.RestGetPipelineAction; import org.elasticsearch.rest.action.ingest.RestPutPipelineAction; import org.elasticsearch.rest.action.ingest.RestSimulatePipelineAction; import org.elasticsearch.rest.action.search.RestClearScrollAction; import org.elasticsearch.rest.action.search.RestExplainAction; import org.elasticsearch.rest.action.search.RestMultiSearchAction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.rest.action.search.RestSearchScrollAction; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Consumer; import java.util.function.Supplier; import java.util.function.UnaryOperator; import java.util.stream.Collectors; import static java.util.Collections.unmodifiableList; import static java.util.Collections.unmodifiableMap; /** * Builds and binds the generic action map, all {@link TransportAction}s, and {@link ActionFilters}. */ public class ActionModule extends AbstractModule { private static final Logger logger = ESLoggerFactory.getLogger(ActionModule.class); private final boolean transportClient; private final Settings settings; private final IndexNameExpressionResolver indexNameExpressionResolver; private final IndexScopedSettings indexScopedSettings; private final ClusterSettings clusterSettings; private final SettingsFilter settingsFilter; private final List<ActionPlugin> actionPlugins; private final Map<String, ActionHandler<?, ?>> actions; private final List<Class<? extends ActionFilter>> actionFilters; private final AutoCreateIndex autoCreateIndex; private final DestructiveOperations destructiveOperations; private final RestController restController; public ActionModule(boolean transportClient, Settings settings, IndexNameExpressionResolver indexNameExpressionResolver, IndexScopedSettings indexScopedSettings, ClusterSettings clusterSettings, SettingsFilter settingsFilter, ThreadPool threadPool, List<ActionPlugin> actionPlugins, NodeClient nodeClient, CircuitBreakerService circuitBreakerService) { this.transportClient = transportClient; this.settings = settings; this.indexNameExpressionResolver = indexNameExpressionResolver; this.indexScopedSettings = indexScopedSettings; this.clusterSettings = clusterSettings; this.settingsFilter = settingsFilter; this.actionPlugins = actionPlugins; actions = setupActions(actionPlugins); actionFilters = setupActionFilters(actionPlugins); autoCreateIndex = transportClient ? null : new AutoCreateIndex(settings, clusterSettings, indexNameExpressionResolver); destructiveOperations = new DestructiveOperations(settings, clusterSettings); Set<String> headers = actionPlugins.stream().flatMap(p -> p.getRestHeaders().stream()).collect(Collectors.toSet()); UnaryOperator<RestHandler> restWrapper = null; for (ActionPlugin plugin : actionPlugins) { UnaryOperator<RestHandler> newRestWrapper = plugin.getRestHandlerWrapper(threadPool.getThreadContext()); if (newRestWrapper != null) { logger.debug("Using REST wrapper from plugin " + plugin.getClass().getName()); if (restWrapper != null) { throw new IllegalArgumentException("Cannot have more than one plugin implementing a REST wrapper"); } restWrapper = newRestWrapper; } } if (transportClient) { restController = null; } else { restController = new RestController(settings, headers, restWrapper, nodeClient, circuitBreakerService); } } public Map<String, ActionHandler<?, ?>> getActions() { return actions; } static Map<String, ActionHandler<?, ?>> setupActions(List<ActionPlugin> actionPlugins) { // Subclass NamedRegistry for easy registration class ActionRegistry extends NamedRegistry<ActionHandler<?, ?>> { ActionRegistry() { super("action"); } public void register(ActionHandler<?, ?> handler) { register(handler.getAction().name(), handler); } public <Request extends ActionRequest, Response extends ActionResponse> void register( GenericAction<Request, Response> action, Class<? extends TransportAction<Request, Response>> transportAction, Class<?>... supportTransportActions) { register(new ActionHandler<>(action, transportAction, supportTransportActions)); } } ActionRegistry actions = new ActionRegistry(); actions.register(MainAction.INSTANCE, TransportMainAction.class); actions.register(NodesInfoAction.INSTANCE, TransportNodesInfoAction.class); actions.register(NodesStatsAction.INSTANCE, TransportNodesStatsAction.class); actions.register(NodesHotThreadsAction.INSTANCE, TransportNodesHotThreadsAction.class); actions.register(ListTasksAction.INSTANCE, TransportListTasksAction.class); actions.register(GetTaskAction.INSTANCE, TransportGetTaskAction.class); actions.register(CancelTasksAction.INSTANCE, TransportCancelTasksAction.class); actions.register(ClusterAllocationExplainAction.INSTANCE, TransportClusterAllocationExplainAction.class); actions.register(ClusterStatsAction.INSTANCE, TransportClusterStatsAction.class); actions.register(ClusterStateAction.INSTANCE, TransportClusterStateAction.class); actions.register(ClusterHealthAction.INSTANCE, TransportClusterHealthAction.class); actions.register(ClusterUpdateSettingsAction.INSTANCE, TransportClusterUpdateSettingsAction.class); actions.register(ClusterRerouteAction.INSTANCE, TransportClusterRerouteAction.class); actions.register(ClusterSearchShardsAction.INSTANCE, TransportClusterSearchShardsAction.class); actions.register(PendingClusterTasksAction.INSTANCE, TransportPendingClusterTasksAction.class); actions.register(PutRepositoryAction.INSTANCE, TransportPutRepositoryAction.class); actions.register(GetRepositoriesAction.INSTANCE, TransportGetRepositoriesAction.class); actions.register(DeleteRepositoryAction.INSTANCE, TransportDeleteRepositoryAction.class); actions.register(VerifyRepositoryAction.INSTANCE, TransportVerifyRepositoryAction.class); actions.register(GetSnapshotsAction.INSTANCE, TransportGetSnapshotsAction.class); actions.register(DeleteSnapshotAction.INSTANCE, TransportDeleteSnapshotAction.class); actions.register(CreateSnapshotAction.INSTANCE, TransportCreateSnapshotAction.class); actions.register(RestoreSnapshotAction.INSTANCE, TransportRestoreSnapshotAction.class); actions.register(SnapshotsStatusAction.INSTANCE, TransportSnapshotsStatusAction.class); actions.register(IndicesStatsAction.INSTANCE, TransportIndicesStatsAction.class); actions.register(IndicesSegmentsAction.INSTANCE, TransportIndicesSegmentsAction.class); actions.register(IndicesShardStoresAction.INSTANCE, TransportIndicesShardStoresAction.class); actions.register(CreateIndexAction.INSTANCE, TransportCreateIndexAction.class); actions.register(ShrinkAction.INSTANCE, TransportShrinkAction.class); actions.register(RolloverAction.INSTANCE, TransportRolloverAction.class); actions.register(DeleteIndexAction.INSTANCE, TransportDeleteIndexAction.class); actions.register(GetIndexAction.INSTANCE, TransportGetIndexAction.class); actions.register(OpenIndexAction.INSTANCE, TransportOpenIndexAction.class); actions.register(CloseIndexAction.INSTANCE, TransportCloseIndexAction.class); actions.register(IndicesExistsAction.INSTANCE, TransportIndicesExistsAction.class); actions.register(TypesExistsAction.INSTANCE, TransportTypesExistsAction.class); actions.register(GetMappingsAction.INSTANCE, TransportGetMappingsAction.class); actions.register(GetFieldMappingsAction.INSTANCE, TransportGetFieldMappingsAction.class, TransportGetFieldMappingsIndexAction.class); actions.register(PutMappingAction.INSTANCE, TransportPutMappingAction.class); actions.register(IndicesAliasesAction.INSTANCE, TransportIndicesAliasesAction.class); actions.register(UpdateSettingsAction.INSTANCE, TransportUpdateSettingsAction.class); actions.register(AnalyzeAction.INSTANCE, TransportAnalyzeAction.class); actions.register(PutIndexTemplateAction.INSTANCE, TransportPutIndexTemplateAction.class); actions.register(GetIndexTemplatesAction.INSTANCE, TransportGetIndexTemplatesAction.class); actions.register(DeleteIndexTemplateAction.INSTANCE, TransportDeleteIndexTemplateAction.class); actions.register(ValidateQueryAction.INSTANCE, TransportValidateQueryAction.class); actions.register(RefreshAction.INSTANCE, TransportRefreshAction.class); actions.register(FlushAction.INSTANCE, TransportFlushAction.class); actions.register(SyncedFlushAction.INSTANCE, TransportSyncedFlushAction.class); actions.register(ForceMergeAction.INSTANCE, TransportForceMergeAction.class); actions.register(UpgradeAction.INSTANCE, TransportUpgradeAction.class); actions.register(UpgradeStatusAction.INSTANCE, TransportUpgradeStatusAction.class); actions.register(UpgradeSettingsAction.INSTANCE, TransportUpgradeSettingsAction.class); actions.register(ClearIndicesCacheAction.INSTANCE, TransportClearIndicesCacheAction.class); actions.register(GetAliasesAction.INSTANCE, TransportGetAliasesAction.class); actions.register(AliasesExistAction.INSTANCE, TransportAliasesExistAction.class); actions.register(GetSettingsAction.INSTANCE, TransportGetSettingsAction.class); actions.register(IndexAction.INSTANCE, TransportIndexAction.class); actions.register(GetAction.INSTANCE, TransportGetAction.class); actions.register(TermVectorsAction.INSTANCE, TransportTermVectorsAction.class); actions.register(MultiTermVectorsAction.INSTANCE, TransportMultiTermVectorsAction.class, TransportShardMultiTermsVectorAction.class); actions.register(DeleteAction.INSTANCE, TransportDeleteAction.class); actions.register(UpdateAction.INSTANCE, TransportUpdateAction.class); actions.register(MultiGetAction.INSTANCE, TransportMultiGetAction.class, TransportShardMultiGetAction.class); actions.register(BulkAction.INSTANCE, TransportBulkAction.class, TransportShardBulkAction.class); actions.register(SearchAction.INSTANCE, TransportSearchAction.class); actions.register(SearchScrollAction.INSTANCE, TransportSearchScrollAction.class); actions.register(MultiSearchAction.INSTANCE, TransportMultiSearchAction.class); actions.register(ExplainAction.INSTANCE, TransportExplainAction.class); actions.register(ClearScrollAction.INSTANCE, TransportClearScrollAction.class); actions.register(RecoveryAction.INSTANCE, TransportRecoveryAction.class); //Indexed scripts actions.register(PutStoredScriptAction.INSTANCE, TransportPutStoredScriptAction.class); actions.register(GetStoredScriptAction.INSTANCE, TransportGetStoredScriptAction.class); actions.register(DeleteStoredScriptAction.INSTANCE, TransportDeleteStoredScriptAction.class); actions.register(FieldStatsAction.INSTANCE, TransportFieldStatsAction.class); actions.register(PutPipelineAction.INSTANCE, PutPipelineTransportAction.class); actions.register(GetPipelineAction.INSTANCE, GetPipelineTransportAction.class); actions.register(DeletePipelineAction.INSTANCE, DeletePipelineTransportAction.class); actions.register(SimulatePipelineAction.INSTANCE, SimulatePipelineTransportAction.class); actionPlugins.stream().flatMap(p -> p.getActions().stream()).forEach(actions::register); return unmodifiableMap(actions.getRegistry()); } private List<Class<? extends ActionFilter>> setupActionFilters(List<ActionPlugin> actionPlugins) { return unmodifiableList(actionPlugins.stream().flatMap(p -> p.getActionFilters().stream()).collect(Collectors.toList())); } public void initRestHandlers(Supplier<DiscoveryNodes> nodesInCluster) { List<AbstractCatAction> catActions = new ArrayList<>(); Consumer<RestHandler> registerHandler = a -> { if (a instanceof AbstractCatAction) { catActions.add((AbstractCatAction) a); } }; registerHandler.accept(new RestMainAction(settings, restController)); registerHandler.accept(new RestNodesInfoAction(settings, restController, settingsFilter)); registerHandler.accept(new RestNodesStatsAction(settings, restController)); registerHandler.accept(new RestNodesHotThreadsAction(settings, restController)); registerHandler.accept(new RestClusterAllocationExplainAction(settings, restController)); registerHandler.accept(new RestClusterStatsAction(settings, restController)); registerHandler.accept(new RestClusterStateAction(settings, restController, settingsFilter)); registerHandler.accept(new RestClusterHealthAction(settings, restController)); registerHandler.accept(new RestClusterUpdateSettingsAction(settings, restController)); registerHandler.accept(new RestClusterGetSettingsAction(settings, restController, clusterSettings, settingsFilter)); registerHandler.accept(new RestClusterRerouteAction(settings, restController, settingsFilter)); registerHandler.accept(new RestClusterSearchShardsAction(settings, restController)); registerHandler.accept(new RestPendingClusterTasksAction(settings, restController)); registerHandler.accept(new RestPutRepositoryAction(settings, restController)); registerHandler.accept(new RestGetRepositoriesAction(settings, restController, settingsFilter)); registerHandler.accept(new RestDeleteRepositoryAction(settings, restController)); registerHandler.accept(new RestVerifyRepositoryAction(settings, restController)); registerHandler.accept(new RestGetSnapshotsAction(settings, restController)); registerHandler.accept(new RestCreateSnapshotAction(settings, restController)); registerHandler.accept(new RestRestoreSnapshotAction(settings, restController)); registerHandler.accept(new RestDeleteSnapshotAction(settings, restController)); registerHandler.accept(new RestSnapshotsStatusAction(settings, restController)); registerHandler.accept(new RestTypesExistsAction(settings, restController)); registerHandler.accept(new RestGetIndicesAction(settings, restController, indexScopedSettings, settingsFilter)); registerHandler.accept(new RestIndicesStatsAction(settings, restController)); registerHandler.accept(new RestIndicesSegmentsAction(settings, restController)); registerHandler.accept(new RestIndicesShardStoresAction(settings, restController)); registerHandler.accept(new RestGetAliasesAction(settings, restController)); registerHandler.accept(new RestIndexDeleteAliasesAction(settings, restController)); registerHandler.accept(new RestIndexPutAliasAction(settings, restController)); registerHandler.accept(new RestIndicesAliasesAction(settings, restController)); registerHandler.accept(new RestCreateIndexAction(settings, restController)); registerHandler.accept(new RestShrinkIndexAction(settings, restController)); registerHandler.accept(new RestRolloverIndexAction(settings, restController)); registerHandler.accept(new RestDeleteIndexAction(settings, restController)); registerHandler.accept(new RestCloseIndexAction(settings, restController)); registerHandler.accept(new RestOpenIndexAction(settings, restController)); registerHandler.accept(new RestUpdateSettingsAction(settings, restController)); registerHandler.accept(new RestGetSettingsAction(settings, restController, indexScopedSettings, settingsFilter)); registerHandler.accept(new RestAnalyzeAction(settings, restController)); registerHandler.accept(new RestGetIndexTemplateAction(settings, restController)); registerHandler.accept(new RestPutIndexTemplateAction(settings, restController)); registerHandler.accept(new RestDeleteIndexTemplateAction(settings, restController)); registerHandler.accept(new RestPutMappingAction(settings, restController)); registerHandler.accept(new RestGetMappingAction(settings, restController)); registerHandler.accept(new RestGetFieldMappingAction(settings, restController)); registerHandler.accept(new RestRefreshAction(settings, restController)); registerHandler.accept(new RestFlushAction(settings, restController)); registerHandler.accept(new RestSyncedFlushAction(settings, restController)); registerHandler.accept(new RestForceMergeAction(settings, restController)); registerHandler.accept(new RestUpgradeAction(settings, restController)); registerHandler.accept(new RestClearIndicesCacheAction(settings, restController)); registerHandler.accept(new RestIndexAction(settings, restController)); registerHandler.accept(new RestGetAction(settings, restController)); registerHandler.accept(new RestGetSourceAction(settings, restController)); registerHandler.accept(new RestMultiGetAction(settings, restController)); registerHandler.accept(new RestDeleteAction(settings, restController)); registerHandler.accept(new org.elasticsearch.rest.action.document.RestCountAction(settings, restController)); registerHandler.accept(new RestTermVectorsAction(settings, restController)); registerHandler.accept(new RestMultiTermVectorsAction(settings, restController)); registerHandler.accept(new RestBulkAction(settings, restController)); registerHandler.accept(new RestUpdateAction(settings, restController)); registerHandler.accept(new RestSearchAction(settings, restController)); registerHandler.accept(new RestSearchScrollAction(settings, restController)); registerHandler.accept(new RestClearScrollAction(settings, restController)); registerHandler.accept(new RestMultiSearchAction(settings, restController)); registerHandler.accept(new RestValidateQueryAction(settings, restController)); registerHandler.accept(new RestExplainAction(settings, restController)); registerHandler.accept(new RestRecoveryAction(settings, restController)); // Scripts API registerHandler.accept(new RestGetStoredScriptAction(settings, restController)); registerHandler.accept(new RestPutStoredScriptAction(settings, restController)); registerHandler.accept(new RestDeleteStoredScriptAction(settings, restController)); registerHandler.accept(new RestFieldStatsAction(settings, restController)); // Tasks API registerHandler.accept(new RestListTasksAction(settings, restController, nodesInCluster)); registerHandler.accept(new RestGetTaskAction(settings, restController)); registerHandler.accept(new RestCancelTasksAction(settings, restController, nodesInCluster)); // Ingest API registerHandler.accept(new RestPutPipelineAction(settings, restController)); registerHandler.accept(new RestGetPipelineAction(settings, restController)); registerHandler.accept(new RestDeletePipelineAction(settings, restController)); registerHandler.accept(new RestSimulatePipelineAction(settings, restController)); // CAT API registerHandler.accept(new RestAllocationAction(settings, restController)); registerHandler.accept(new RestShardsAction(settings, restController)); registerHandler.accept(new RestMasterAction(settings, restController)); registerHandler.accept(new RestNodesAction(settings, restController)); registerHandler.accept(new RestTasksAction(settings, restController, nodesInCluster)); registerHandler.accept(new RestIndicesAction(settings, restController, indexNameExpressionResolver)); registerHandler.accept(new RestSegmentsAction(settings, restController)); // Fully qualified to prevent interference with rest.action.count.RestCountAction registerHandler.accept(new org.elasticsearch.rest.action.cat.RestCountAction(settings, restController)); // Fully qualified to prevent interference with rest.action.indices.RestRecoveryAction registerHandler.accept(new org.elasticsearch.rest.action.cat.RestRecoveryAction(settings, restController)); registerHandler.accept(new RestHealthAction(settings, restController)); registerHandler.accept(new org.elasticsearch.rest.action.cat.RestPendingClusterTasksAction(settings, restController)); registerHandler.accept(new RestAliasAction(settings, restController)); registerHandler.accept(new RestThreadPoolAction(settings, restController)); registerHandler.accept(new RestPluginsAction(settings, restController)); registerHandler.accept(new RestFielddataAction(settings, restController)); registerHandler.accept(new RestNodeAttrsAction(settings, restController)); registerHandler.accept(new RestRepositoriesAction(settings, restController)); registerHandler.accept(new RestSnapshotAction(settings, restController)); registerHandler.accept(new RestTemplatesAction(settings, restController)); for (ActionPlugin plugin : actionPlugins) { for (RestHandler handler : plugin.getRestHandlers(settings, restController, clusterSettings, indexScopedSettings, settingsFilter, indexNameExpressionResolver, nodesInCluster)) { registerHandler.accept(handler); } } registerHandler.accept(new RestCatAction(settings, restController, catActions)); } @Override protected void configure() { Multibinder<ActionFilter> actionFilterMultibinder = Multibinder.newSetBinder(binder(), ActionFilter.class); for (Class<? extends ActionFilter> actionFilter : actionFilters) { actionFilterMultibinder.addBinding().to(actionFilter); } bind(ActionFilters.class).asEagerSingleton(); bind(DestructiveOperations.class).toInstance(destructiveOperations); if (false == transportClient) { // Supporting classes only used when not a transport client bind(AutoCreateIndex.class).toInstance(autoCreateIndex); bind(TransportLivenessAction.class).asEagerSingleton(); // register GenericAction -> transportAction Map used by NodeClient @SuppressWarnings("rawtypes") MapBinder<GenericAction, TransportAction> transportActionsBinder = MapBinder.newMapBinder(binder(), GenericAction.class, TransportAction.class); for (ActionHandler<?, ?> action : actions.values()) { // bind the action as eager singleton, so the map binder one will reuse it bind(action.getTransportAction()).asEagerSingleton(); transportActionsBinder.addBinding(action.getAction()).to(action.getTransportAction()).asEagerSingleton(); for (Class<?> supportAction : action.getSupportTransportActions()) { bind(supportAction).asEagerSingleton(); } } } } public RestController getRestController() { return restController; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; /** * The MBean interface for ColumnFamilyStore */ public interface ColumnFamilyStoreMBean { /** * @return the name of the column family */ public String getColumnFamilyName(); /** * Returns the total amount of data stored in the memtable, including * column related overhead. * * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#memtableDataSize * @return The size in bytes. * @deprecated */ @Deprecated public long getMemtableDataSize(); /** * Returns the total number of columns present in the memtable. * * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#memtableColumnsCount * @return The number of columns. */ @Deprecated public long getMemtableColumnsCount(); /** * Returns the number of times that a flush has resulted in the * memtable being switched out. * * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#memtableSwitchCount * @return the number of memtable switches */ @Deprecated public int getMemtableSwitchCount(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#recentSSTablesPerRead * @return a histogram of the number of sstable data files accessed per read: reading this property resets it */ @Deprecated public long[] getRecentSSTablesPerReadHistogram(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#sstablesPerReadHistogram * @return a histogram of the number of sstable data files accessed per read */ @Deprecated public long[] getSSTablesPerReadHistogram(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#readLatency * @return the number of read operations on this column family */ @Deprecated public long getReadCount(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#readLatency * @return total read latency (divide by getReadCount() for average) */ @Deprecated public long getTotalReadLatencyMicros(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#readLatency * @return an array representing the latency histogram */ @Deprecated public long[] getLifetimeReadLatencyHistogramMicros(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#readLatency * @return an array representing the latency histogram */ @Deprecated public long[] getRecentReadLatencyHistogramMicros(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#readLatency * @return average latency per read operation since the last call */ @Deprecated public double getRecentReadLatencyMicros(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#writeLatency * @return the number of write operations on this column family */ @Deprecated public long getWriteCount(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#writeLatency * @return total write latency (divide by getReadCount() for average) */ @Deprecated public long getTotalWriteLatencyMicros(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#writeLatency * @return an array representing the latency histogram */ @Deprecated public long[] getLifetimeWriteLatencyHistogramMicros(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#writeLatency * @return an array representing the latency histogram */ @Deprecated public long[] getRecentWriteLatencyHistogramMicros(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#writeLatency * @return average latency per write operation since the last call */ @Deprecated public double getRecentWriteLatencyMicros(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#pendingTasks * @return the estimated number of tasks pending for this column family */ @Deprecated public int getPendingTasks(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#liveSSTableCount * @return the number of SSTables on disk for this CF */ @Deprecated public int getLiveSSTableCount(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#liveDiskSpaceUsed * @return disk space used by SSTables belonging to this CF */ @Deprecated public long getLiveDiskSpaceUsed(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#totalDiskSpaceUsed * @return total disk space used by SSTables belonging to this CF, including obsolete ones waiting to be GC'd */ @Deprecated public long getTotalDiskSpaceUsed(); /** * force a major compaction of this column family */ public void forceMajorCompaction() throws ExecutionException, InterruptedException; /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#minRowSize * @return the size of the smallest compacted row */ @Deprecated public long getMinRowSize(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#maxRowSize * @return the size of the largest compacted row */ @Deprecated public long getMaxRowSize(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#meanRowSize * @return the average row size across all the sstables */ @Deprecated public long getMeanRowSize(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#bloomFilterFalsePositives */ @Deprecated public long getBloomFilterFalsePositives(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#recentBloomFilterFalsePositives */ @Deprecated public long getRecentBloomFilterFalsePositives(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#bloomFilterFalseRatio */ @Deprecated public double getBloomFilterFalseRatio(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#recentBloomFilterFalseRatio */ @Deprecated public double getRecentBloomFilterFalseRatio(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#bloomFilterDiskSpaceUsed */ @Deprecated public long getBloomFilterDiskSpaceUsed(); /** * Gets the minimum number of sstables in queue before compaction kicks off */ public int getMinimumCompactionThreshold(); /** * Sets the minimum number of sstables in queue before compaction kicks off */ public void setMinimumCompactionThreshold(int threshold); /** * Gets the maximum number of sstables in queue before compaction kicks off */ public int getMaximumCompactionThreshold(); /** * Sets the maximum and maximum number of SSTables in queue before compaction kicks off */ public void setCompactionThresholds(int minThreshold, int maxThreshold); /** * Sets the maximum number of sstables in queue before compaction kicks off */ public void setMaximumCompactionThreshold(int threshold); /** * Sets the compaction strategy by class name * @param className the name of the compaction strategy class */ public void setCompactionStrategyClass(String className); /** * Gets the compaction strategy class name */ public String getCompactionStrategyClass(); /** * Get the compression parameters */ public Map<String,String> getCompressionParameters(); /** * Set the compression parameters * @param opts map of string names to values */ public void setCompressionParameters(Map<String,String> opts); /** * Set new crc check chance */ public void setCrcCheckChance(double crcCheckChance); public boolean isAutoCompactionDisabled(); /** Number of tombstoned cells retreived during the last slicequery */ @Deprecated public double getTombstonesPerSlice(); /** Number of live cells retreived during the last slicequery */ @Deprecated public double getLiveCellsPerSlice(); public long estimateKeys(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#estimatedRowSizeHistogram */ @Deprecated public long[] getEstimatedRowSizeHistogram(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#estimatedColumnCountHistogram */ @Deprecated public long[] getEstimatedColumnCountHistogram(); /** * @see org.apache.cassandra.metrics.ColumnFamilyMetrics#compressionRatio */ @Deprecated public double getCompressionRatio(); /** * Returns a list of the names of the built column indexes for current store * @return list of the index names */ public List<String> getBuiltIndexes(); /** * Returns a list of filenames that contain the given key on this node * @param key * @return list of filenames containing the key */ public List<String> getSSTablesForKey(String key); /** * Scan through Keyspace/ColumnFamily's data directory * determine which SSTables should be loaded and load them */ public void loadNewSSTables(); /** * @return the number of SSTables in L0. Always return 0 if Leveled compaction is not enabled. */ public int getUnleveledSSTables(); /** * @return sstable count for each level. null unless leveled compaction is used. * array index corresponds to level(int[0] is for level 0, ...). */ public int[] getSSTableCountPerLevel(); /** * Get the ratio of droppable tombstones to real columns (and non-droppable tombstones) * @return ratio */ public double getDroppableTombstoneRatio(); }
package com.tinkerpop.blueprints.oupls.sail; import com.tinkerpop.blueprints.Direction; import com.tinkerpop.blueprints.Edge; import com.tinkerpop.blueprints.TransactionalGraph; import com.tinkerpop.blueprints.Vertex; import info.aduna.iteration.CloseableIteration; import net.fortytwo.sesametools.CompoundCloseableIteration; import net.fortytwo.sesametools.SailConnectionTripleSource; import org.openrdf.model.Namespace; import org.openrdf.model.Resource; import org.openrdf.model.Statement; import org.openrdf.model.URI; import org.openrdf.model.Value; import org.openrdf.model.impl.NamespaceImpl; import org.openrdf.query.BindingSet; import org.openrdf.query.Dataset; import org.openrdf.query.QueryEvaluationException; import org.openrdf.query.algebra.TupleExpr; import org.openrdf.query.algebra.evaluation.TripleSource; import org.openrdf.query.algebra.evaluation.impl.EvaluationStrategyImpl; import org.openrdf.sail.SailException; import org.openrdf.sail.helpers.DefaultSailChangedEvent; import org.openrdf.sail.helpers.NotifyingSailConnectionBase; import org.openrdf.sail.inferencer.InferencerConnection; import java.util.Collection; import java.util.Iterator; import java.util.LinkedList; /** * A stateful connection to a BlueprintsSail RDF store interface. * * @author Joshua Shinavier (http://fortytwo.net) */ public class GraphSailConnection extends NotifyingSailConnectionBase implements InferencerConnection { private static final Resource[] NULL_CONTEXT_ARRAY = {null}; private static final String DEFAULT_NAMESPACE_PREFIX_KEY = "default.namespace"; private final GraphSail.DataStore store; private final Collection<WriteAction> writeBuffer = new LinkedList<WriteAction>(); private boolean statementsAdded; private boolean statementsRemoved; public GraphSailConnection(final GraphSail.DataStore store) { super(store.sail); this.store = store; } protected void startTransactionInternal() throws SailException { statementsAdded = false; statementsRemoved = false; } public void commitInternal() throws SailException { if (store.manualTransactions) { ((TransactionalGraph) store.graph).stopTransaction(TransactionalGraph.Conclusion.SUCCESS); } if (statementsAdded || statementsRemoved) { DefaultSailChangedEvent e = new DefaultSailChangedEvent(store.sail); e.setStatementsAdded(statementsAdded); e.setStatementsRemoved(statementsRemoved); store.sail.notifySailChanged(e); } } public void rollbackInternal() throws SailException { if (store.manualTransactions) { ((TransactionalGraph) store.graph).stopTransaction(TransactionalGraph.Conclusion.FAILURE); } } public void closeInternal() throws SailException { // Roll back any uncommitted operations. if (store.manualTransactions) { ((TransactionalGraph) store.graph).stopTransaction(TransactionalGraph.Conclusion.FAILURE); } } public CloseableIteration<? extends BindingSet, QueryEvaluationException> evaluateInternal(final TupleExpr query, final Dataset dataset, final BindingSet bindings, final boolean includeInferred) throws SailException { try { TripleSource tripleSource = new SailConnectionTripleSource(this, store.valueFactory, includeInferred); EvaluationStrategyImpl strategy = new EvaluationStrategyImpl(tripleSource, dataset); return strategy.evaluate(query, bindings); } catch (QueryEvaluationException e) { throw new SailException(e); } } public CloseableIteration<? extends Resource, SailException> getContextIDsInternal() throws SailException { throw new UnsupportedOperationException("the getContextIDs operation is not yet supported"); } public CloseableIteration<? extends Statement, SailException> getStatementsInternal(final Resource subject, final URI predicate, final Value object, final boolean includeInferred, final Resource... contexts) throws SailException { //System.out.println("getting: " + subject + ", " + predicate + ", " + object + ", " + includeInferred + ", " + contexts); System.out.flush(); int index = 0; if (null != subject) { index |= 0x1; } if (null != predicate) { index |= 0x2; } if (null != object) { index |= 0x4; } if (0 == contexts.length) { return createIteration(store.matchers[index].match(subject, predicate, object, null)); } else { Collection<CloseableIteration<Statement, SailException>> iterations = new LinkedList<CloseableIteration<Statement, SailException>>(); // TODO: as an optimization, filter on multiple contexts simultaneously (when context is not used in the matcher), rather than trying each context consecutively. for (Resource context : contexts) { index |= 0x8; Matcher m = store.matchers[index]; iterations.add(createIteration(m.match(subject, predicate, object, context))); } return new CompoundCloseableIteration<Statement, SailException>(iterations); } } public long sizeInternal(final Resource... contexts) throws SailException { if (0 == contexts.length) { return countIterator(store.matchers[0x0].match(null, null, null, null)); } else { int count = 0; for (Resource context : contexts) { count += countIterator(store.matchers[0x8].match(null, null, null, context)); } return count; } } private int countIterator(final Iterable i) { Iterator iter = i.iterator(); int count = 0; while (iter.hasNext()) { count++; iter.next(); } return count; } public void addStatementInternal(final Resource subject, final URI predicate, final Value object, final Resource... contexts) throws SailException { addStatementInternal(false, subject, predicate, object, contexts); } private void addStatementInternal(final boolean inferred, final Resource subject, final URI predicate, final Value object, final Resource... contexts) throws SailException { //System.out.println("adding (" + inferred + "): " + subject + ", " + predicate + ", " + object + ", " + contexts); System.out.flush(); if (!canWrite()) { WriteAction a = new WriteAction(ActionType.ADD); a.inferred = inferred; a.subject = subject; a.predicate = predicate; a.object = object; a.contexts = contexts; queueUpdate(a); return; } if (null == subject || null == predicate || null == object) { throw new IllegalArgumentException("null part-of-speech for to-be-added statement"); } if (store.uniqueStatements) { if (0 == contexts.length) { removeStatementsInternal(inferred, subject, predicate, object, (Resource) null); if (!inferred) { removeStatementsInternal(true, subject, predicate, object, (Resource) null); } } else { removeStatementsInternal(inferred, subject, predicate, object, contexts); if (!inferred) { removeStatementsInternal(true, subject, predicate, object, contexts); } } } for (Resource context : ((0 == contexts.length) ? NULL_CONTEXT_ARRAY : contexts)) { String c = null == context ? GraphSail.NULL_CONTEXT_NATIVE : store.resourceToNative(context); Vertex out = getOrCreateVertex(subject); Vertex in = getOrCreateVertex(object); Edge edge = store.graph.addEdge(null, out, in, predicate.stringValue()); if (inferred) { edge.setProperty(GraphSail.INFERRED, inferred); } for (IndexingMatcher m : (Collection<IndexingMatcher>) store.indexers) { //System.out.println("\t\tindexing with: " + m); m.indexStatement(edge, subject, predicate, object, c); } if (hasConnectionListeners()) { Statement s = store.valueFactory.createStatement(subject, predicate, object, context); notifyStatementAdded(s); } //System.out.println("added (s: " + s + ", p: " + p + ", o: " + o + ", c: " + c + ")"); //System.out.print("\t--> "); //BlueprintsSail.debugEdge(edge); } statementsAdded = true; //System.out.println("\tdone adding"); } private Vertex getOrCreateVertex(final Value value) { Vertex v = store.findVertex(value); if (null == v) { v = store.addVertex(value); } return v; } public void removeStatementsInternal(final Resource subject, final URI predicate, final Value object, final Resource... contexts) throws SailException { removeStatementsInternal(false, subject, predicate, object, contexts); } private void removeStatementsInternal(final boolean inferred, final Resource subject, final URI predicate, final Value object, final Resource... contexts) throws SailException { //System.out.println("removing (" + inferred + "): " + subject + ", " + predicate + ", " + object + ", " + contexts); System.out.flush(); if (!canWrite()) { WriteAction a = new WriteAction(ActionType.REMOVE); a.inferred = inferred; a.subject = subject; a.predicate = predicate; a.object = object; a.contexts = contexts; queueUpdate(a); return; } Collection<Edge> edgesToRemove = new LinkedList<Edge>(); int index = 0; if (null != subject) { index |= 0x1; } if (null != predicate) { index |= 0x2; } if (null != object) { index |= 0x4; } if (0 == contexts.length) { Iterable<Edge> i = store.matchers[index].match(subject, predicate, object, null); for (Edge anI : i) { edgesToRemove.add(anI); } } else { // TODO: as an optimization, filter on multiple contexts simultaneously (when context is not used in the matcher), rather than trying each context consecutively. for (Resource context : contexts) { index |= 0x8; //System.out.println("matcher: " + indexes.matchers[index]); Iterable<Edge> i = store.matchers[index].match(subject, predicate, object, context); for (Edge e : i) { Boolean b = (Boolean) e.getProperty(GraphSail.INFERRED); if ((!inferred && null == b) || (inferred && null != b && b)) { edgesToRemove.add(e); } } } } for (Edge e : edgesToRemove) { SimpleStatement s; if (hasConnectionListeners()) { s = new SimpleStatement(); fillStatement(s, e); } else { s = null; } //System.out.println("removing this edge: " + e); removeEdge(e); if (null != s) { notifyStatementRemoved(s); } } if (0 < edgesToRemove.size()) { statementsRemoved = true; } //System.out.println("\tdone removing"); } public void clearInternal(final Resource... contexts) throws SailException { clearInternal(false, contexts); } private void clearInternal(final boolean inferred, final Resource... contexts) throws SailException { //System.out.println("clearing (" + inferred + "): " + contexts); System.out.flush(); if (!canWrite()) { WriteAction a = new WriteAction(ActionType.CLEAR); a.inferred = inferred; a.contexts = contexts; queueUpdate(a); return; } if (0 == contexts.length) { deleteEdgesInIterator(inferred, store.matchers[0x0].match(null, null, null, null)); } else { for (Resource context : contexts) { // Note: order of operands to the "or" is important here deleteEdgesInIterator(inferred, store.matchers[0x8].match(null, null, null, context)); } } } private void deleteEdgesInIterator(final boolean inferred, final Iterable<Edge> i) { Iterator<Edge> iter = i.iterator(); while (iter.hasNext()) { Edge e = iter.next(); Boolean b = (Boolean) e.getProperty(GraphSail.INFERRED); if ((!inferred && null == b) || (inferred && null != b && b)) { SimpleStatement s; if (hasConnectionListeners()) { s = new SimpleStatement(); fillStatement(s, e); } else { s = null; } try { iter.remove(); } catch (UnsupportedOperationException x) { // TODO: it so happens that Neo4jGraph, the only IndexableGraph implementation so far tested whose // iterators don't support remove(), does *not* throw ConcurrentModificationExceptions when you // delete an edge in an iterator currently being traversed. So for now, just ignore the // UnsupportedOperationException and proceed to delete the edge from the graph. } removeEdge(e); if (null != s) { notifyStatementRemoved(s); } statementsRemoved = true; } } } private void removeEdge(final Edge edge) { Vertex h = edge.getVertex(Direction.IN); Vertex t = edge.getVertex(Direction.OUT); store.graph.removeEdge(edge); if (!h.getEdges(Direction.IN).iterator().hasNext() && !h.getEdges(Direction.OUT).iterator().hasNext()) { try { store.graph.removeVertex(h); } catch (IllegalStateException ex) { // Just keep going. This is a hack for Neo4j vertices which appear in more than // one to-be-deleted edge. } } if (!t.getEdges(Direction.OUT).iterator().hasNext() && !t.getEdges(Direction.IN).iterator().hasNext()) { try { store.graph.removeVertex(t); } catch (IllegalStateException ex) { // Just keep going. This is a hack for Neo4j vertices which appear in more than // one to-be-deleted edge. } } } public CloseableIteration<? extends Namespace, SailException> getNamespacesInternal() throws SailException { final Iterator<String> prefixes = store.namespaces.getPropertyKeys().iterator(); return new CloseableIteration<Namespace, SailException>() { public void close() throws SailException { // Do nothing. } public boolean hasNext() throws SailException { return prefixes.hasNext(); } public Namespace next() throws SailException { String prefix = prefixes.next(); String uri = (String) store.namespaces.getProperty(prefix); return new NamespaceImpl(fromNativePrefixKey(prefix), uri); } public void remove() throws SailException { throw new UnsupportedOperationException(); } }; } public String getNamespaceInternal(final String prefix) throws SailException { return (String) store.namespaces.getProperty(toNativePrefixKey(prefix)); } public void setNamespaceInternal(final String prefix, final String uri) throws SailException { store.namespaces.setProperty(toNativePrefixKey(prefix), uri); } public void removeNamespaceInternal(final String prefix) throws SailException { store.namespaces.removeProperty(toNativePrefixKey(prefix)); } public void clearNamespacesInternal() throws SailException { throw new UnsupportedOperationException("the clearNamespaces operation is not yet supported"); } // write lock ////////////////////////////////////////////////////////////// private int writeSemaphore = 0; private boolean canWrite() { return 0 == writeSemaphore; } private void writeSemaphoreUp() { writeSemaphore++; } private void writeSemaphoreDown() throws SailException { writeSemaphore--; if (0 == writeSemaphore) { flushWrites(); } } private void queueUpdate(final WriteAction a) throws SailException { if (0 == writeSemaphore) { a.execute(); } else { writeBuffer.add(a); } } private void flushWrites() throws SailException { for (WriteAction a : writeBuffer) { switch (a.type) { case ADD: addStatementInternal(true, a.subject, a.predicate, a.object, a.contexts); break; case REMOVE: removeStatementsInternal(true, a.subject, a.predicate, a.object, a.contexts); break; case CLEAR: clearInternal(true, a.contexts); break; } } writeBuffer.clear(); } // inference /////////////////////////////////////////////////////////////// private enum ActionType {ADD, REMOVE, CLEAR} private class WriteAction { public final ActionType type; public WriteAction(final ActionType type) { this.type = type; } public Resource subject; public URI predicate; public Value object; public Resource[] contexts; public boolean inferred = true; public void execute() throws SailException { switch (type) { case ADD: addStatementInternal(inferred, subject, predicate, object, contexts); break; case REMOVE: removeStatementsInternal(inferred, subject, predicate, object, contexts); break; case CLEAR: clearInternal(inferred, contexts); break; } } } @Override public boolean addInferredStatement(final Resource subject, final URI predicate, final Value object, final Resource... contexts) throws SailException { for (Resource context : (0 == contexts.length ? NULL_CONTEXT_ARRAY : contexts)) { boolean doAdd = true; if (store.uniqueStatements) { CloseableIteration<?, SailException> i = getStatementsInternal(subject, predicate, object, true, context); try { if (i.hasNext()) { doAdd = false; } } finally { i.close(); } } if (doAdd) { addStatementInternal(true, subject, predicate, object, context); } } // Note: the meaning of the return value is not documented (in the Sesame 2.3.2 JavaDocs) return false; } @Override public boolean removeInferredStatement(final Resource subject, final URI predicate, final Value object, final Resource... contexts) throws SailException { removeStatementsInternal(true, subject, predicate, object, contexts); // Note: the meaning of the return value is not documented (in the Sesame 2.3.2 JavaDocs) return false; } @Override public void clearInferred(final Resource... contexts) throws SailException { clearInternal(true, contexts); } @Override public void flushUpdates() throws SailException { // No-op } // statement iteration ///////////////////////////////////////////////////// private CloseableIteration<Statement, SailException> createIteration(final Iterable<Edge> iterator) { return store.volatileStatements ? new VolatileStatementIteration(iterator) : new StableStatementIteration(iterator); } private class StableStatementIteration implements CloseableIteration<Statement, SailException> { private final Iterable<Edge> iterator; private final Iterator<Edge> iter; private boolean closed = false; public StableStatementIteration(final Iterable<Edge> iterator) { writeSemaphoreUp(); this.iterator = iterator; iter = iterator.iterator(); } public void close() throws SailException { if (!closed) { closed = true; writeSemaphoreDown(); } } public boolean hasNext() throws SailException { // Note: this used to throw an IllegalStateException if the iteration had already been closed, // but such is not the behavior of Aduna's LookAheadIteration, which simply does not provide any more // elements if the iteration has already been closed. // The CloseableIteration API says nothing about what to expect from a closed iteration, // so the behavior of LookAheadIteration will be taken as normative. return !closed && iter.hasNext(); } public Statement next() throws SailException { if (closed) { throw new IllegalStateException("already closed"); } Edge e = iter.next(); SimpleStatement s = new SimpleStatement(); fillStatement(s, e); return s; } public void remove() throws SailException { throw new UnsupportedOperationException(); } } private void fillStatement(final SimpleStatement s, final Edge e) { s.subject = (Resource) toSesame(e.getVertex(Direction.OUT)); s.predicate = (URI) toSesame(((String) e.getProperty(GraphSail.PREDICATE_PROP))); s.object = toSesame(e.getVertex(Direction.IN)); s.context = (Resource) toSesame(((String) e.getProperty(GraphSail.CONTEXT_PROP))); } private class VolatileStatementIteration implements CloseableIteration<Statement, SailException> { private final SimpleStatement s = new SimpleStatement(); private final Iterable<Edge> iterator; private final Iterator<Edge> iter; public VolatileStatementIteration(final Iterable<Edge> iterator) { this.iterator = iterator; iter = iterator.iterator(); } public void close() throws SailException { } public boolean hasNext() throws SailException { return iter.hasNext(); } public Statement next() throws SailException { Edge e = iter.next(); fillStatement(s, e); return s; } public void remove() throws SailException { throw new UnsupportedOperationException(); } } /** * A POJO statement containing a subject, predicate, object and context. * The purpose of using a special Statement implementation (rather than using an existing ValueFactory) is to * guarantee that it does not contain anything which would interfere * with JDK optimization aimed at eliminating creation of short-lived (Statement) objects. * You can observe the effect of such interference by un-commenting the <code>finalize()</code> method below. */ private class SimpleStatement implements Statement { private Resource subject; private URI predicate; private Value object; private Resource context; public Resource getSubject() { return subject; } public URI getPredicate() { return predicate; } public Value getObject() { return object; } public Resource getContext() { return context; } /* protected void finalize() throws Throwable { super.finalize(); } //*/ public String toString() { StringBuilder sb = new StringBuilder(); sb.append("(").append(subject).append(", ").append(predicate).append(", ").append(object); if (null != context) { sb.append(", ").append(context); } sb.append(")"); return sb.toString(); } } // value conversion //////////////////////////////////////////////////////// private String toNativePrefixKey(final String prefix) { return 0 == prefix.length() ? DEFAULT_NAMESPACE_PREFIX_KEY : prefix; } private String fromNativePrefixKey(final String prefix) { return prefix.equals(DEFAULT_NAMESPACE_PREFIX_KEY) ? "" : prefix; } private Value toSesame(final Vertex v) { String value = (String) v.getProperty(GraphSail.VALUE); String kind = (String) v.getProperty(GraphSail.KIND); if (kind.equals(GraphSail.URI)) { return store.valueFactory.createURI(value); } else if (kind.equals(GraphSail.LITERAL)) { String datatype = (String) v.getProperty(GraphSail.TYPE); String lang = (String) v.getProperty(GraphSail.LANG); return null != datatype ? store.valueFactory.createLiteral(value, store.valueFactory.createURI(datatype)) : null != lang ? store.valueFactory.createLiteral(value, lang) : store.valueFactory.createLiteral(value); } else if (kind.equals(GraphSail.BNODE)) { return store.valueFactory.createBNode(value); } else { throw new IllegalStateException("unexpected resource kind: " + kind); } } private Value toSesame(final String s) { int i; switch (s.charAt(0)) { case GraphSail.URI_PREFIX: return store.valueFactory.createURI(s.substring(2)); case GraphSail.BLANK_NODE_PREFIX: return store.valueFactory.createBNode(s.substring(2)); case GraphSail.PLAIN_LITERAL_PREFIX: return store.valueFactory.createLiteral(s.substring(2)); case GraphSail.TYPED_LITERAL_PREFIX: i = s.indexOf(GraphSail.SEPARATOR, 2); return store.valueFactory.createLiteral(s.substring(i + 1), store.valueFactory.createURI(s.substring(2, i))); case GraphSail.LANGUAGE_TAG_LITERAL_PREFIX: i = s.indexOf(GraphSail.SEPARATOR, 2); return store.valueFactory.createLiteral(s.substring(i + 1), s.substring(2, i)); case GraphSail.NULL_CONTEXT_PREFIX: return null; default: throw new IllegalStateException(); } } }
package org.ccci.gto.android.common.db; import android.os.Parcel; import android.os.Parcelable; import android.util.Pair; import org.ccci.gto.android.common.compat.os.ParcelCompat; import org.ccci.gto.android.common.util.ArrayUtils; import java.util.Arrays; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import static org.ccci.gto.android.common.db.AbstractDao.bindValues; public abstract class Expression implements Parcelable { public static final Literal NULL = new Literal((String) null, true); static final String[] NO_ARGS = new String[0]; Expression() { } @NonNull protected abstract Pair<String, String[]> buildSql(@NonNull AbstractDao dao); @NonNull public Expression args(@NonNull final Object... args) { return args(bindValues(args)); } /** * returns the number of "dynamic" arguments in this expression. This may not be the same as the actual number of * arguments returned from buildSql * * @return */ protected int numOfArgs() { return 0; } @NonNull public Expression args(@NonNull final String... args) { if (args.length > 0) { throw new IllegalArgumentException("invalid number of arguments specified"); } return this; } @NonNull public final Binary and(@NonNull final Expression expression) { return binaryExpr(Binary.AND, expression); } @NonNull public final Binary eq(@NonNull final Number constant) { return eq(constant(constant)); } @NonNull public final Binary eq(@NonNull final String constant) { return eq(constant(constant)); } @NonNull public final Binary eq(@NonNull final Object constant) { return eq(constant(constant)); } @NonNull public final Binary eq(@NonNull final Expression expression) { return binaryExpr(Binary.EQ, expression); } @NonNull public final Binary lt(@NonNull final Number constant) { return lt(constant(constant)); } @NonNull public final Binary lt(@NonNull final Object constant) { return lt(constant(constant)); } @NonNull public final Binary lt(@NonNull final Expression expression) { return binaryExpr(Binary.LT, expression); } @NonNull public final Binary lte(@NonNull final Number constant) { return lte(constant(constant)); } @NonNull public final Binary lte(@NonNull final Object constant) { return lte(constant(constant)); } @NonNull public final Binary lte(@NonNull final Expression expression) { return binaryExpr(Binary.LTE, expression); } @NonNull public final Binary gt(@NonNull final Number constant) { return gt(constant(constant)); } @NonNull public final Binary gt(@NonNull final Object constant) { return gt(constant(constant)); } @NonNull public final Binary gt(@NonNull final Expression expression) { return binaryExpr(Binary.GT, expression); } @NonNull public final Binary gte(@NonNull final Number constant) { return gte(constant(constant)); } @NonNull public final Binary gte(@NonNull final Object constant) { return gte(constant(constant)); } @NonNull public final Binary gte(@NonNull final Expression expression) { return binaryExpr(Binary.GTE, expression); } @NonNull public final Binary in(@NonNull final Expression... expressions) { return new Binary(Binary.IN, ArrayUtils.merge(Expression.class, new Expression[] {this}, expressions)); } @NonNull public final Binary notIn(@NonNull final Expression... expressions) { return new Binary(Binary.NOTIN, ArrayUtils.merge(Expression.class, new Expression[] {this}, expressions)); } @NonNull public final Binary is(@NonNull final Expression expression) { return binaryExpr(Binary.IS, expression); } @NonNull public final Binary isNot(@NonNull final Expression expression) { return binaryExpr(Binary.ISNOT, expression); } @NonNull public final Binary or(@NonNull final Expression expression) { return binaryExpr(Binary.OR, expression); } @NonNull public final Binary ne(@NonNull final Number constant) { return ne(constant(constant)); } @NonNull public final Binary ne(@NonNull final String constant) { return ne(constant(constant)); } @NonNull public final Binary ne(@NonNull final Object constant) { return ne(constant(constant)); } @NonNull public final Binary ne(@NonNull final Expression expression) { return binaryExpr(Binary.NE, expression); } @NonNull public Expression not() { return new Unary(Unary.NOT, this); } @NonNull public static Expression not(@NonNull final Expression expression) { return expression.not(); } @NonNull protected Binary binaryExpr(@NonNull final String op, @NonNull final Expression expression) { return new Binary(op, this, expression); } @NonNull public Raw toRaw(@NonNull final AbstractDao dao) { final Pair<String, String[]> sql = buildSql(dao); return raw(sql.first, sql.second); } @NonNull public static Field field(@NonNull final String name) { return new Field(null, name); } /** * @deprecated Since v0.9.0, use {@link Table#field(String)} instead */ @NonNull @Deprecated public static Field field(@NonNull final Table<?> table, @NonNull final String name) { return new Field(table, name); } /** * @deprecated Since v0.9.0, use {@link Table#forClass(Class)} and {@link Table#field(String)} instead */ @NonNull @Deprecated public static Field field(@NonNull final Class<?> type, @NonNull final String name) { return Table.forClass(type).field(name); } @NonNull public static Literal bind() { return new Literal((String) null, false); } @NonNull public static Literal bind(@NonNull final Object value) { return new Literal(bindValues(value)[0], false); } @NonNull public static Literal bind(@NonNull final Number value) { return new Literal(value, false); } @NonNull public static Literal bind(@NonNull final String value) { return new Literal(value, false); } @NonNull public static Literal constant(@NonNull final Object value) { return new Literal(bindValues(value)[0], true); } @NonNull public static Literal constant(@NonNull final Number value) { return new Literal(value, true); } @NonNull public static Literal constant(@NonNull final String value) { return new Literal(value, true); } @NonNull public static Literal[] constants(@NonNull final Object... values) { return constants(bindValues(values)); } @NonNull public static Literal[] constants(@NonNull final Number... values) { final Literal[] constants = new Literal[values.length]; for (int i = 0; i < values.length; i++) { constants[i] = constant(values[i]); } return constants; } @NonNull public static Literal[] constants(@NonNull final String... values) { final Literal[] constants = new Literal[values.length]; for (int i = 0; i < values.length; i++) { constants[i] = constant(values[i]); } return constants; } @NonNull public static Raw raw(@NonNull final String expr, @NonNull final Object... args) { return new Raw(expr, bindValues(args)); } @NonNull public static Raw raw(@NonNull final String expr, @Nullable final String... args) { return new Raw(expr, args); } @Override public int describeContents() { return 0; } @Override public void writeToParcel(final Parcel out, final int flags) { } public static class Literal extends Expression { @Nullable private final String mStrValue; @Nullable private final Number mNumValue; private final boolean mConstant; @Nullable private transient Pair<String, String[]> mSql; Literal(@Nullable final Number value, final boolean constant) { this(null, value, constant); } Literal(@Nullable final String value, final boolean constant) { this(value, null, constant); } private Literal(@Nullable final String strValue, @Nullable final Number numValue, final boolean constant) { mStrValue = strValue; mNumValue = numValue; mConstant = constant; } Literal(@NonNull final Parcel in, @Nullable final ClassLoader loader) { mStrValue = in.readString(); mNumValue = (Number) in.readValue(loader); mConstant = ParcelCompat.readBoolean(in); } @Override protected int numOfArgs() { return mConstant ? 0 : 1; } @NonNull @Override public Literal args(@NonNull final String... args) { if (args.length != (mConstant ? 0 : 1)) { throw new IllegalArgumentException("incorrect number of args specified"); } return mConstant ? this : new Literal(args[0], false); } @NonNull @Override protected Pair<String, String[]> buildSql(@NonNull final AbstractDao dao) { if (mSql == null) { // handle constants if (mConstant) { if (mNumValue != null) { mSql = Pair.create(mNumValue.toString(), NO_ARGS); } else if (mStrValue != null) { //TODO: how should we handle non-null constant string values? } else { mSql = Pair.create("NULL", NO_ARGS); } } // default if mSql is still null if (mSql == null) { mSql = Pair.create("?", new String[] {mNumValue != null ? mNumValue.toString() : mStrValue}); } } return mSql; } @Override public void writeToParcel(final Parcel out, final int flags) { super.writeToParcel(out, flags); out.writeString(mStrValue); out.writeValue(mNumValue); ParcelCompat.writeBoolean(out, mConstant); } public static final Creator<Literal> CREATOR = new LiteralCreator(); private static class LiteralCreator implements ClassLoaderCreator<Literal> { @Override public Literal createFromParcel(@NonNull final Parcel in) { return new Literal(in, null); } @Override public Literal[] newArray(final int size) { return new Literal[size]; } @Override public Literal createFromParcel(@NonNull final Parcel in, @Nullable final ClassLoader loader) { return new Literal(in, loader); } } } public static final class Field extends Expression { @Nullable private final Table<?> mTable; @NonNull private final String mName; @Nullable private transient Pair<String, String[]> mSql; Field(@Nullable final Table<?> table, @NonNull final String name) { mTable = table; mName = name; } Field(@NonNull final Parcel in, @Nullable final ClassLoader loader) { mTable = in.readParcelable(loader); mName = in.readString(); } @NonNull public Aggregate count() { return new Aggregate(Aggregate.COUNT, false, this); } @NonNull public Aggregate count(final boolean distinct) { return new Aggregate(Aggregate.COUNT, distinct, this); } @NonNull public Aggregate max() { return new Aggregate(Aggregate.MAX, false, this); } @NonNull public Aggregate max(final boolean distinct) { return new Aggregate(Aggregate.MAX, distinct, this); } @NonNull public Aggregate min() { return new Aggregate(Aggregate.MIN, false, this); } @NonNull public Aggregate min(final boolean distinct) { return new Aggregate(Aggregate.MIN, distinct, this); } @NonNull public Aggregate sum() { return new Aggregate(Aggregate.SUM, false, this); } @NonNull public Aggregate sum(final boolean distinct) { return new Aggregate(Aggregate.SUM, distinct, this); } @NonNull @Override protected Pair<String, String[]> buildSql(@NonNull final AbstractDao dao) { // generate SQL for this field if (mSql == null) { final StringBuilder sql = new StringBuilder(); if (mTable != null) { sql.append(mTable.sqlPrefix(dao)); } sql.append(mName); mSql = Pair.create(sql.toString(), NO_ARGS); } return mSql; } @Override public void writeToParcel(final Parcel out, final int flags) { super.writeToParcel(out, flags); out.writeParcelable(mTable, 0); out.writeString(mName); } public static final Creator<Field> CREATOR = new FieldCreator(); private static class FieldCreator implements ClassLoaderCreator<Field> { @Override public Field createFromParcel(@NonNull final Parcel in) { return new Field(in, null); } @Override public Field[] newArray(final int size) { return new Field[size]; } @Override public Field createFromParcel(@NonNull final Parcel in, @Nullable final ClassLoader loader) { return new Field(in, loader); } } } public static class Raw extends Expression { @NonNull private final String mExpr; @NonNull private final String[] mArgs; Raw(@NonNull final String expr, @Nullable final String... args) { mExpr = expr; mArgs = args != null ? args : NO_ARGS; } Raw(@NonNull final Parcel in, @Nullable final ClassLoader loader) { mExpr = in.readString(); mArgs = in.createStringArray(); } @Override protected int numOfArgs() { return mArgs.length; } @NonNull @Override public Raw args(@NonNull final Object... args) { return args(bindValues(args)); } @NonNull @Override public Raw args(@NonNull final String... args) { return new Raw(mExpr, args); } @NonNull @Override protected Pair<String, String[]> buildSql(@NonNull final AbstractDao dao) { return Pair.create(mExpr, mArgs); } @NonNull @Override public Raw toRaw(@NonNull AbstractDao dao) { return this; } @Override public void writeToParcel(final Parcel out, final int flags) { super.writeToParcel(out, flags); out.writeString(mExpr); out.writeStringArray(mArgs); } public static final Creator<Raw> CREATOR = new RawCreator(); private static class RawCreator implements ClassLoaderCreator<Raw> { @Override public Raw createFromParcel(@NonNull final Parcel in) { return new Raw(in, null); } @Override public Raw[] newArray(final int size) { return new Raw[size]; } @Override public Raw createFromParcel(@NonNull final Parcel in, @Nullable final ClassLoader loader) { return new Raw(in, loader); } } } public static class Binary extends Expression { static final String LT = "<"; static final String LTE = "<="; static final String GT = ">"; static final String GTE = ">="; static final String EQ = "=="; static final String NE = "!="; static final String IS = "IS"; static final String ISNOT = "IS NOT"; static final String IN = "IN"; static final String NOTIN = "NOT IN"; static final String AND = "AND"; static final String OR = "OR"; @NonNull private final String mOp; @NonNull private final Expression[] mExprs; private final int mNumOfArgs; @Nullable private transient Pair<String, String[]> mSql; Binary(@NonNull final String op, @NonNull final Expression... exprs) { mOp = op; mExprs = exprs; mNumOfArgs = calcNumOfArgs(); } Binary(@NonNull final Parcel in, @Nullable final ClassLoader loader) { mOp = in.readString(); final Parcelable[] exprs = in.readParcelableArray(loader); mExprs = new Expression[exprs.length]; for (int i = 0; i < exprs.length; i++) { mExprs[i] = (Expression) exprs[i]; } mNumOfArgs = calcNumOfArgs(); } private int calcNumOfArgs() { int sum = 0; for (final Expression expr : mExprs) { sum += expr.numOfArgs(); } return sum; } @NonNull @Override @SuppressWarnings("checkstyle:MissingSwitchDefault") protected Binary binaryExpr(@NonNull final String op, @NonNull final Expression expression) { // chain binary expressions together when possible switch (mOp) { case AND: case OR: if (mOp.equals(op)) { return new Binary(mOp, ArrayUtils.merge(Expression.class, mExprs, new Expression[] {expression})); } } return super.binaryExpr(op, expression); } @NonNull @Override public Expression not() { // sometimes we can just change our own op for not() switch (mOp) { case EQ: return new Binary(NE, mExprs); case NE: return new Binary(EQ, mExprs); case IS: return new Binary(ISNOT, mExprs); case ISNOT: return new Binary(IS, mExprs); case IN: return new Binary(NOTIN, mExprs); case NOTIN: return new Binary(IN, mExprs); default: return super.not(); } } @Override protected int numOfArgs() { return mNumOfArgs; } @NonNull @Override public Binary args(@NonNull final String... args) { if (args.length != mNumOfArgs) { throw new IllegalArgumentException("incorrect number of args specified"); } // short-circuit if there are no args if (args.length == 0) { return this; } int pos = 0; final Expression[] exprs = new Expression[mExprs.length]; for (int i = 0; i < mExprs.length; i++) { final int num = mExprs[i].numOfArgs(); exprs[i] = num > 0 ? mExprs[i].args(Arrays.copyOfRange(args, pos, pos + num)) : mExprs[i]; pos += num; } return new Binary(mOp, exprs); } @NonNull @Override protected Pair<String, String[]> buildSql(@NonNull final AbstractDao dao) { // generate SQL if it hasn't been generated yet if (mSql == null) { int i = 0; final StringBuilder sql = new StringBuilder(); String[] args = NO_ARGS; sql.append('('); final boolean isIn = IN.equals(mOp) || NOTIN.equals(mOp); if (isIn) { // "{mExpr[0]} IN (" final Pair<String, String[]> resp = mExprs[0].buildSql(dao); sql.append(resp.first); args = ArrayUtils.merge(String.class, args, resp.second); sql.append(' ').append(mOp).append(" ("); i++; } // "{mExpr[i]} {mOp} {mExpr[i+1]} ..." boolean first = true; for (; i < mExprs.length; i++) { final Expression expr = mExprs[i]; if (!first) { sql.append(' ').append(isIn ? ',' : mOp).append(' '); } final Pair<String, String[]> resp = expr.buildSql(dao); sql.append(resp.first); args = ArrayUtils.merge(String.class, args, resp.second); first = false; } if (isIn) { sql.append(')'); } sql.append(')'); mSql = Pair.create(sql.toString(), args); } return mSql; } @Override public void writeToParcel(final Parcel out, final int flags) { super.writeToParcel(out, flags); out.writeString(mOp); out.writeParcelableArray(mExprs, 0); } public static final Creator<Binary> CREATOR = new BinaryExpressionCreator(); private static class BinaryExpressionCreator implements ClassLoaderCreator<Binary> { @Override public Binary createFromParcel(@NonNull final Parcel in) { return new Binary(in, null); } @Override public Binary[] newArray(final int size) { return new Binary[size]; } @Override public Binary createFromParcel(@NonNull final Parcel in, @Nullable final ClassLoader loader) { return new Binary(in, loader); } } } public static class Unary extends Expression { static final String NOT = "NOT"; @NonNull private final String mOp; @NonNull private final Expression mExpr; @Nullable private transient Pair<String, String[]> mSql; Unary(@NonNull final String op, @NonNull final Expression expr) { mOp = op; mExpr = expr; } Unary(@NonNull final Parcel in, @Nullable final ClassLoader loader) { mOp = in.readString(); mExpr = in.readParcelable(loader); } @Override protected int numOfArgs() { return mExpr.numOfArgs(); } @NonNull @Override public Expression args(@NonNull final String... args) { return mExpr.args(args); } @NonNull @Override public Expression not() { switch (mOp) { case NOT: return mExpr; default: return super.not(); } } @NonNull @Override protected Pair<String, String[]> buildSql(@NonNull final AbstractDao dao) { // generate SQL if it hasn't been generated yet if (mSql == null) { final StringBuilder sql = new StringBuilder(mOp).append(" ("); String[] args = NO_ARGS; final Pair<String, String[]> resp = mExpr.buildSql(dao); sql.append(resp.first); args = ArrayUtils.merge(String.class, args, resp.second); sql.append(')'); mSql = Pair.create(sql.toString(), args); } return mSql; } @Override public void writeToParcel(final Parcel out, final int flags) { super.writeToParcel(out, flags); out.writeString(mOp); out.writeParcelable(mExpr, 0); } public static final Creator<Unary> CREATOR = new UnaryExpressionCreator(); private static class UnaryExpressionCreator implements ClassLoaderCreator<Unary> { @Override public Unary createFromParcel(@NonNull final Parcel in) { return new Unary(in, null); } @Override public Unary[] newArray(final int size) { return new Unary[size]; } @Override public Unary createFromParcel(@NonNull final Parcel in, @Nullable final ClassLoader loader) { return new Unary(in, loader); } } } public static class Aggregate extends Expression { static final String COUNT = "COUNT"; static final String MAX = "MAX"; static final String MIN = "MIN"; static final String SUM = "SUM"; @NonNull private final String mOp; @NonNull private final Field mField; private final boolean mDistinct; @Nullable private transient Pair<String, String[]> mSql; Aggregate(@NonNull final String op, final boolean distinct, @NonNull final Field field) { mOp = op; mField = field; mDistinct = distinct; } Aggregate(@NonNull final Parcel in, @Nullable final ClassLoader loader) { mOp = in.readString(); mField = in.readParcelable(loader); mDistinct = false; } @NonNull public Aggregate distinct(final boolean distinct) { return new Aggregate(mOp, distinct, mField); } @Override protected int numOfArgs() { return mField.numOfArgs(); } @NonNull @Override public Expression args(@NonNull final String... args) { return mField.args(args); } @NonNull @Override protected Pair<String, String[]> buildSql(@NonNull final AbstractDao dao) { // generate SQL if it hasn't been generated yet if (mSql == null) { final StringBuilder sql = new StringBuilder(mOp).append(" ("); String[] args = NO_ARGS; final Pair<String, String[]> resp = mField.buildSql(dao); // {mOp} (DISTINCT {mExpr}) if (mDistinct) { sql.append("DISTINCT "); } sql.append(resp.first); args = ArrayUtils.merge(String.class, args, resp.second); sql.append(')'); mSql = Pair.create(sql.toString(), args); } return mSql; } @Override public void writeToParcel(final Parcel out, final int flags) { super.writeToParcel(out, flags); out.writeString(mOp); out.writeParcelable(mField, 0); } public static final Creator<Aggregate> CREATOR = new AggregateExpressionCreator(); private static class AggregateExpressionCreator implements ClassLoaderCreator<Aggregate> { @Override public Aggregate createFromParcel(@NonNull final Parcel in) { return new Aggregate(in, null); } @Override public Aggregate[] newArray(final int size) { return new Aggregate[size]; } @Override public Aggregate createFromParcel(@NonNull final Parcel in, @Nullable final ClassLoader loader) { return new Aggregate(in, loader); } } } }
/* * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.tools.internal.xjc.reader; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.activation.MimeType; import com.sun.tools.internal.xjc.model.CElementPropertyInfo; import static com.sun.tools.internal.xjc.model.CElementPropertyInfo.CollectionMode.*; import com.sun.tools.internal.xjc.model.CReferencePropertyInfo; import com.sun.tools.internal.xjc.model.CTypeRef; import com.sun.tools.internal.xjc.model.Multiplicity; import com.sun.tools.internal.xjc.model.nav.NType; import com.sun.xml.internal.bind.v2.model.core.Element; import com.sun.xml.internal.bind.v2.model.core.ID; import java.math.BigInteger; /** * Set of {@link Ref}. * * @author Kohsuke Kawaguchi */ public final class RawTypeSet { public final Set<Ref> refs; /** * True if this type set can form references to types. */ public final Mode canBeTypeRefs; /** * The occurence of the whole references. */ public final Multiplicity mul; // computed inside canBeTypeRefs() private CElementPropertyInfo.CollectionMode collectionMode; /** * Should be called from one of the raw type set builders. */ public RawTypeSet( Set<Ref> refs, Multiplicity m ) { this.refs = refs; mul = m; canBeTypeRefs = canBeTypeRefs(); } public CElementPropertyInfo.CollectionMode getCollectionMode() { return collectionMode; } public boolean isRequired() { return mul.min.compareTo(BigInteger.ZERO) == 1; } /** * Represents the possible binding option for this {@link RawTypeSet}. */ public enum Mode { /** * This {@link RawTypeSet} can be either an reference property or * an element property, and XJC recommends element property. */ SHOULD_BE_TYPEREF(0), /** * This {@link RawTypeSet} can be either an reference property or * an element property, and XJC recommends reference property. */ CAN_BE_TYPEREF(1), /** * This {@link RawTypeSet} can be only bound to a reference property. */ MUST_BE_REFERENCE(2); private final int rank; Mode(int rank) { this.rank = rank; } Mode or(Mode that) { switch(Math.max(this.rank,that.rank)) { case 0: return SHOULD_BE_TYPEREF; case 1: return CAN_BE_TYPEREF; case 2: return MUST_BE_REFERENCE; } throw new AssertionError(); } } /** * Returns true if {@link #refs} can form refs of types. * * If there are multiple {@link Ref}s with the same type, * we cannot make them into type refs. Or if any of the {@link Ref} * says they cannot be in type refs, we cannot do that either. * * TODO: just checking if the refs are the same is not suffice. * If two refs derive from each other, they cannot form a list of refs * (because of a possible ambiguity). */ private Mode canBeTypeRefs() { Set<NType> types = new HashSet<NType>(); collectionMode = mul.isAtMostOnce()?NOT_REPEATED:REPEATED_ELEMENT; // the way we compute this is that we start from the most optimistic value, // and then gradually degrade as we find something problematic. Mode mode = Mode.SHOULD_BE_TYPEREF; for( Ref r : refs ) { mode = mode.or(r.canBeType(this)); if(mode== Mode.MUST_BE_REFERENCE) return mode; // no need to continue the processing if(!types.add(r.toTypeRef(null).getTarget().getType())) return Mode.MUST_BE_REFERENCE; // collision if(r.isListOfValues()) { if(refs.size()>1 || !mul.isAtMostOnce()) return Mode.MUST_BE_REFERENCE; // restriction on @XmlList collectionMode = REPEATED_VALUE; } } return mode; } public void addTo(CElementPropertyInfo prop) { assert canBeTypeRefs!= Mode.MUST_BE_REFERENCE; if(mul.isZero()) return; // the property can't have any value List<CTypeRef> dst = prop.getTypes(); for( Ref t : refs ) dst.add(t.toTypeRef(prop)); } public void addTo(CReferencePropertyInfo prop) { if(mul.isZero()) return; // the property can't have any value for( Ref t : refs ) t.toElementRef(prop); } public ID id() { for( Ref t : refs ) { ID id = t.id(); if(id!=ID.NONE) return id; } return ID.NONE; } public MimeType getExpectedMimeType() { for( Ref t : refs ) { MimeType mt = t.getExpectedMimeType(); if(mt!=null) return mt; } return null; } /** * A reference to something. * * <p> * A {@link Ref} can be either turned into {@link CTypeRef} to form * an element property, or {@link Element} to form a reference property. */ public static abstract class Ref { /** * @param ep * the property to which the returned {@link CTypeRef} will be * added to. */ protected abstract CTypeRef toTypeRef(CElementPropertyInfo ep); protected abstract void toElementRef(CReferencePropertyInfo prop); /** * Can this {@link Ref} be a type ref? * @return false to veto. * @param parent */ protected abstract Mode canBeType(RawTypeSet parent); protected abstract boolean isListOfValues(); /** * When this {@link RawTypeSet} binds to a {@link CElementPropertyInfo}, * this method is used to determine if the property is ID or not. */ protected abstract ID id(); /** * When this {@link RawTypeSet} binds to a {@link CElementPropertyInfo}, * this method is used to determine if the property has an associated expected MIME type or not. */ protected MimeType getExpectedMimeType() { return null; } } }
// Copyright (c) 2011-present, Facebook, Inc. All rights reserved. // This source code is licensed under both the GPLv2 (found in the // COPYING file in the root directory) and Apache 2.0 License // (found in the LICENSE.Apache file in the root directory). package org.rocksdb; /** * The logical mapping of tickers defined in rocksdb::Tickers. * * Java byte value mappings don't align 1:1 to the c++ values. c++ rocksdb::Tickers enumeration type * is uint32_t and java org.rocksdb.TickerType is byte, this causes mapping issues when * rocksdb::Tickers value is greater then 127 (0x7F) for jbyte jni interface as range greater is not * available. Without breaking interface in minor versions, value mappings for * org.rocksdb.TickerType leverage full byte range [-128 (-0x80), (0x7F)]. Newer tickers added * should descend into negative values until TICKER_ENUM_MAX reaches -128 (-0x80). */ public enum TickerType { /** * total block cache misses * * REQUIRES: BLOCK_CACHE_MISS == BLOCK_CACHE_INDEX_MISS + * BLOCK_CACHE_FILTER_MISS + * BLOCK_CACHE_DATA_MISS; */ BLOCK_CACHE_MISS((byte) 0x0), /** * total block cache hit * * REQUIRES: BLOCK_CACHE_HIT == BLOCK_CACHE_INDEX_HIT + * BLOCK_CACHE_FILTER_HIT + * BLOCK_CACHE_DATA_HIT; */ BLOCK_CACHE_HIT((byte) 0x1), BLOCK_CACHE_ADD((byte) 0x2), /** * # of failures when adding blocks to block cache. */ BLOCK_CACHE_ADD_FAILURES((byte) 0x3), /** * # of times cache miss when accessing index block from block cache. */ BLOCK_CACHE_INDEX_MISS((byte) 0x4), /** * # of times cache hit when accessing index block from block cache. */ BLOCK_CACHE_INDEX_HIT((byte) 0x5), /** * # of index blocks added to block cache. */ BLOCK_CACHE_INDEX_ADD((byte) 0x6), /** * # of bytes of index blocks inserted into cache */ BLOCK_CACHE_INDEX_BYTES_INSERT((byte) 0x7), /** * # of bytes of index block erased from cache */ BLOCK_CACHE_INDEX_BYTES_EVICT((byte) 0x8), /** * # of times cache miss when accessing filter block from block cache. */ BLOCK_CACHE_FILTER_MISS((byte) 0x9), /** * # of times cache hit when accessing filter block from block cache. */ BLOCK_CACHE_FILTER_HIT((byte) 0xA), /** * # of filter blocks added to block cache. */ BLOCK_CACHE_FILTER_ADD((byte) 0xB), /** * # of bytes of bloom filter blocks inserted into cache */ BLOCK_CACHE_FILTER_BYTES_INSERT((byte) 0xC), /** * # of bytes of bloom filter block erased from cache */ BLOCK_CACHE_FILTER_BYTES_EVICT((byte) 0xD), /** * # of times cache miss when accessing data block from block cache. */ BLOCK_CACHE_DATA_MISS((byte) 0xE), /** * # of times cache hit when accessing data block from block cache. */ BLOCK_CACHE_DATA_HIT((byte) 0xF), /** * # of data blocks added to block cache. */ BLOCK_CACHE_DATA_ADD((byte) 0x10), /** * # of bytes of data blocks inserted into cache */ BLOCK_CACHE_DATA_BYTES_INSERT((byte) 0x11), /** * # of bytes read from cache. */ BLOCK_CACHE_BYTES_READ((byte) 0x12), /** * # of bytes written into cache. */ BLOCK_CACHE_BYTES_WRITE((byte) 0x13), /** * # of times bloom filter has avoided file reads. */ BLOOM_FILTER_USEFUL((byte) 0x14), /** * # persistent cache hit */ PERSISTENT_CACHE_HIT((byte) 0x15), /** * # persistent cache miss */ PERSISTENT_CACHE_MISS((byte) 0x16), /** * # total simulation block cache hits */ SIM_BLOCK_CACHE_HIT((byte) 0x17), /** * # total simulation block cache misses */ SIM_BLOCK_CACHE_MISS((byte) 0x18), /** * # of memtable hits. */ MEMTABLE_HIT((byte) 0x19), /** * # of memtable misses. */ MEMTABLE_MISS((byte) 0x1A), /** * # of Get() queries served by L0 */ GET_HIT_L0((byte) 0x1B), /** * # of Get() queries served by L1 */ GET_HIT_L1((byte) 0x1C), /** * # of Get() queries served by L2 and up */ GET_HIT_L2_AND_UP((byte) 0x1D), /** * COMPACTION_KEY_DROP_* count the reasons for key drop during compaction * There are 4 reasons currently. */ /** * key was written with a newer value. */ COMPACTION_KEY_DROP_NEWER_ENTRY((byte) 0x1E), /** * Also includes keys dropped for range del. * The key is obsolete. */ COMPACTION_KEY_DROP_OBSOLETE((byte) 0x1F), /** * key was covered by a range tombstone. */ COMPACTION_KEY_DROP_RANGE_DEL((byte) 0x20), /** * User compaction function has dropped the key. */ COMPACTION_KEY_DROP_USER((byte) 0x21), /** * all keys in range were deleted. */ COMPACTION_RANGE_DEL_DROP_OBSOLETE((byte) 0x22), /** * Number of keys written to the database via the Put and Write call's. */ NUMBER_KEYS_WRITTEN((byte) 0x23), /** * Number of Keys read. */ NUMBER_KEYS_READ((byte) 0x24), /** * Number keys updated, if inplace update is enabled */ NUMBER_KEYS_UPDATED((byte) 0x25), /** * The number of uncompressed bytes issued by DB::Put(), DB::Delete(),\ * DB::Merge(), and DB::Write(). */ BYTES_WRITTEN((byte) 0x26), /** * The number of uncompressed bytes read from DB::Get(). It could be * either from memtables, cache, or table files. * * For the number of logical bytes read from DB::MultiGet(), * please use {@link #NUMBER_MULTIGET_BYTES_READ}. */ BYTES_READ((byte) 0x27), /** * The number of calls to seek. */ NUMBER_DB_SEEK((byte) 0x28), /** * The number of calls to next. */ NUMBER_DB_NEXT((byte) 0x29), /** * The number of calls to prev. */ NUMBER_DB_PREV((byte) 0x2A), /** * The number of calls to seek that returned data. */ NUMBER_DB_SEEK_FOUND((byte) 0x2B), /** * The number of calls to next that returned data. */ NUMBER_DB_NEXT_FOUND((byte) 0x2C), /** * The number of calls to prev that returned data. */ NUMBER_DB_PREV_FOUND((byte) 0x2D), /** * The number of uncompressed bytes read from an iterator. * Includes size of key and value. */ ITER_BYTES_READ((byte) 0x2E), NO_FILE_CLOSES((byte) 0x2F), NO_FILE_OPENS((byte) 0x30), NO_FILE_ERRORS((byte) 0x31), /** * Time system had to wait to do LO-L1 compactions. * * @deprecated */ @Deprecated STALL_L0_SLOWDOWN_MICROS((byte) 0x32), /** * Time system had to wait to move memtable to L1. * * @deprecated */ @Deprecated STALL_MEMTABLE_COMPACTION_MICROS((byte) 0x33), /** * write throttle because of too many files in L0. * * @deprecated */ @Deprecated STALL_L0_NUM_FILES_MICROS((byte) 0x34), /** * Writer has to wait for compaction or flush to finish. */ STALL_MICROS((byte) 0x35), /** * The wait time for db mutex. * * Disabled by default. To enable it set stats level to {@link StatsLevel#ALL} */ DB_MUTEX_WAIT_MICROS((byte) 0x36), RATE_LIMIT_DELAY_MILLIS((byte) 0x37), /** * Number of iterators created. * */ NO_ITERATORS((byte) 0x38), /** * Number of MultiGet calls. */ NUMBER_MULTIGET_CALLS((byte) 0x39), /** * Number of MultiGet keys read. */ NUMBER_MULTIGET_KEYS_READ((byte) 0x3A), /** * Number of MultiGet bytes read. */ NUMBER_MULTIGET_BYTES_READ((byte) 0x3B), /** * Number of deletes records that were not required to be * written to storage because key does not exist. */ NUMBER_FILTERED_DELETES((byte) 0x3C), NUMBER_MERGE_FAILURES((byte) 0x3D), /** * Number of times bloom was checked before creating iterator on a * file, and the number of times the check was useful in avoiding * iterator creation (and thus likely IOPs). */ BLOOM_FILTER_PREFIX_CHECKED((byte) 0x3E), BLOOM_FILTER_PREFIX_USEFUL((byte) 0x3F), /** * Number of times we had to reseek inside an iteration to skip * over large number of keys with same userkey. */ NUMBER_OF_RESEEKS_IN_ITERATION((byte) 0x40), /** * Record the number of calls to {@link RocksDB#getUpdatesSince(long)}. Useful to keep track of * transaction log iterator refreshes. */ GET_UPDATES_SINCE_CALLS((byte) 0x41), /** * Miss in the compressed block cache. */ BLOCK_CACHE_COMPRESSED_MISS((byte) 0x42), /** * Hit in the compressed block cache. */ BLOCK_CACHE_COMPRESSED_HIT((byte) 0x43), /** * Number of blocks added to compressed block cache. */ BLOCK_CACHE_COMPRESSED_ADD((byte) 0x44), /** * Number of failures when adding blocks to compressed block cache. */ BLOCK_CACHE_COMPRESSED_ADD_FAILURES((byte) 0x45), /** * Number of times WAL sync is done. */ WAL_FILE_SYNCED((byte) 0x46), /** * Number of bytes written to WAL. */ WAL_FILE_BYTES((byte) 0x47), /** * Writes can be processed by requesting thread or by the thread at the * head of the writers queue. */ WRITE_DONE_BY_SELF((byte) 0x48), /** * Equivalent to writes done for others. */ WRITE_DONE_BY_OTHER((byte) 0x49), /** * Number of writes ending up with timed-out. */ WRITE_TIMEDOUT((byte) 0x4A), /** * Number of Write calls that request WAL. */ WRITE_WITH_WAL((byte) 0x4B), /** * Bytes read during compaction. */ COMPACT_READ_BYTES((byte) 0x4C), /** * Bytes written during compaction. */ COMPACT_WRITE_BYTES((byte) 0x4D), /** * Bytes written during flush. */ FLUSH_WRITE_BYTES((byte) 0x4E), /** * Number of table's properties loaded directly from file, without creating * table reader object. */ NUMBER_DIRECT_LOAD_TABLE_PROPERTIES((byte) 0x4F), NUMBER_SUPERVERSION_ACQUIRES((byte) 0x50), NUMBER_SUPERVERSION_RELEASES((byte) 0x51), NUMBER_SUPERVERSION_CLEANUPS((byte) 0x52), /** * # of compressions/decompressions executed */ NUMBER_BLOCK_COMPRESSED((byte) 0x53), NUMBER_BLOCK_DECOMPRESSED((byte) 0x54), NUMBER_BLOCK_NOT_COMPRESSED((byte) 0x55), MERGE_OPERATION_TOTAL_TIME((byte) 0x56), FILTER_OPERATION_TOTAL_TIME((byte) 0x57), /** * Row cache. */ ROW_CACHE_HIT((byte) 0x58), ROW_CACHE_MISS((byte) 0x59), /** * Read amplification statistics. * * Read amplification can be calculated using this formula * (READ_AMP_TOTAL_READ_BYTES / READ_AMP_ESTIMATE_USEFUL_BYTES) * * REQUIRES: ReadOptions::read_amp_bytes_per_bit to be enabled */ /** * Estimate of total bytes actually used. */ READ_AMP_ESTIMATE_USEFUL_BYTES((byte) 0x5A), /** * Total size of loaded data blocks. */ READ_AMP_TOTAL_READ_BYTES((byte) 0x5B), /** * Number of refill intervals where rate limiter's bytes are fully consumed. */ NUMBER_RATE_LIMITER_DRAINS((byte) 0x5C), /** * Number of internal skipped during iteration */ NUMBER_ITER_SKIP((byte) 0x5D), /** * Number of MultiGet keys found (vs number requested) */ NUMBER_MULTIGET_KEYS_FOUND((byte) 0x5E), // -0x01 to fixate the new value that incorrectly changed TICKER_ENUM_MAX /** * Number of iterators created. */ NO_ITERATOR_CREATED((byte) -0x01), /** * Number of iterators deleted. */ NO_ITERATOR_DELETED((byte) 0x60), /** * Deletions obsoleted before bottom level due to file gap optimization. */ COMPACTION_OPTIMIZED_DEL_DROP_OBSOLETE((byte) 0x61), /** * If a compaction was cancelled in sfm to prevent ENOSPC */ COMPACTION_CANCELLED((byte) 0x62), /** * # of times bloom FullFilter has not avoided the reads. */ BLOOM_FILTER_FULL_POSITIVE((byte) 0x63), /** * # of times bloom FullFilter has not avoided the reads and data actually * exist. */ BLOOM_FILTER_FULL_TRUE_POSITIVE((byte) 0x64), /** * BlobDB specific stats * # of Put/PutTTL/PutUntil to BlobDB. */ BLOB_DB_NUM_PUT((byte) 0x65), /** * # of Write to BlobDB. */ BLOB_DB_NUM_WRITE((byte) 0x66), /** * # of Get to BlobDB. */ BLOB_DB_NUM_GET((byte) 0x67), /** * # of MultiGet to BlobDB. */ BLOB_DB_NUM_MULTIGET((byte) 0x68), /** * # of Seek/SeekToFirst/SeekToLast/SeekForPrev to BlobDB iterator. */ BLOB_DB_NUM_SEEK((byte) 0x69), /** * # of Next to BlobDB iterator. */ BLOB_DB_NUM_NEXT((byte) 0x6A), /** * # of Prev to BlobDB iterator. */ BLOB_DB_NUM_PREV((byte) 0x6B), /** * # of keys written to BlobDB. */ BLOB_DB_NUM_KEYS_WRITTEN((byte) 0x6C), /** * # of keys read from BlobDB. */ BLOB_DB_NUM_KEYS_READ((byte) 0x6D), /** * # of bytes (key + value) written to BlobDB. */ BLOB_DB_BYTES_WRITTEN((byte) 0x6E), /** * # of bytes (keys + value) read from BlobDB. */ BLOB_DB_BYTES_READ((byte) 0x6F), /** * # of keys written by BlobDB as non-TTL inlined value. */ BLOB_DB_WRITE_INLINED((byte) 0x70), /** * # of keys written by BlobDB as TTL inlined value. */ BLOB_DB_WRITE_INLINED_TTL((byte) 0x71), /** * # of keys written by BlobDB as non-TTL blob value. */ BLOB_DB_WRITE_BLOB((byte) 0x72), /** * # of keys written by BlobDB as TTL blob value. */ BLOB_DB_WRITE_BLOB_TTL((byte) 0x73), /** * # of bytes written to blob file. */ BLOB_DB_BLOB_FILE_BYTES_WRITTEN((byte) 0x74), /** * # of bytes read from blob file. */ BLOB_DB_BLOB_FILE_BYTES_READ((byte) 0x75), /** * # of times a blob files being synced. */ BLOB_DB_BLOB_FILE_SYNCED((byte) 0x76), /** * # of blob index evicted from base DB by BlobDB compaction filter because * of expiration. */ BLOB_DB_BLOB_INDEX_EXPIRED_COUNT((byte) 0x77), /** * Size of blob index evicted from base DB by BlobDB compaction filter * because of expiration. */ BLOB_DB_BLOB_INDEX_EXPIRED_SIZE((byte) 0x78), /** * # of blob index evicted from base DB by BlobDB compaction filter because * of corresponding file deleted. */ BLOB_DB_BLOB_INDEX_EVICTED_COUNT((byte) 0x79), /** * Size of blob index evicted from base DB by BlobDB compaction filter * because of corresponding file deleted. */ BLOB_DB_BLOB_INDEX_EVICTED_SIZE((byte) 0x7A), /** * # of blob files being garbage collected. */ BLOB_DB_GC_NUM_FILES((byte) 0x7B), /** * # of blob files generated by garbage collection. */ BLOB_DB_GC_NUM_NEW_FILES((byte) 0x7C), /** * # of BlobDB garbage collection failures. */ BLOB_DB_GC_FAILURES((byte) 0x7D), /** * # of keys drop by BlobDB garbage collection because they had been * overwritten. */ BLOB_DB_GC_NUM_KEYS_OVERWRITTEN((byte) 0x7E), /** * # of keys drop by BlobDB garbage collection because of expiration. */ BLOB_DB_GC_NUM_KEYS_EXPIRED((byte) 0x7F), /** * # of keys relocated to new blob file by garbage collection. */ BLOB_DB_GC_NUM_KEYS_RELOCATED((byte) -0x02), /** * # of bytes drop by BlobDB garbage collection because they had been * overwritten. */ BLOB_DB_GC_BYTES_OVERWRITTEN((byte) -0x03), /** * # of bytes drop by BlobDB garbage collection because of expiration. */ BLOB_DB_GC_BYTES_EXPIRED((byte) -0x04), /** * # of bytes relocated to new blob file by garbage collection. */ BLOB_DB_GC_BYTES_RELOCATED((byte) -0x05), /** * # of blob files evicted because of BlobDB is full. */ BLOB_DB_FIFO_NUM_FILES_EVICTED((byte) -0x06), /** * # of keys in the blob files evicted because of BlobDB is full. */ BLOB_DB_FIFO_NUM_KEYS_EVICTED((byte) -0x07), /** * # of bytes in the blob files evicted because of BlobDB is full. */ BLOB_DB_FIFO_BYTES_EVICTED((byte) -0x08), /** * These counters indicate a performance issue in WritePrepared transactions. * We should not seem them ticking them much. * # of times prepare_mutex_ is acquired in the fast path. */ TXN_PREPARE_MUTEX_OVERHEAD((byte) -0x09), /** * # of times old_commit_map_mutex_ is acquired in the fast path. */ TXN_OLD_COMMIT_MAP_MUTEX_OVERHEAD((byte) -0x0A), /** * # of times we checked a batch for duplicate keys. */ TXN_DUPLICATE_KEY_OVERHEAD((byte) -0x0B), /** * # of times snapshot_mutex_ is acquired in the fast path. */ TXN_SNAPSHOT_MUTEX_OVERHEAD((byte) -0x0C), TICKER_ENUM_MAX((byte) 0x5F); private final byte value; TickerType(final byte value) { this.value = value; } /** * @deprecated Exposes internal value of native enum mappings. * This method will be marked package private in the next major release. * * @return the internal representation */ @Deprecated public byte getValue() { return value; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.common; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.List; import java.util.Properties; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.slf4j.Logger; public class Configuration { private final Logger log; private List<Object> configObjectList = new ArrayList<Object>(4); private String storePath; private boolean storePathFromConfig = false; private Object storePathObject; private Field storePathField; private DataVersion dataVersion = new DataVersion(); private ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); /** * All properties include configs in object and extend properties. */ private Properties allConfigs = new Properties(); public Configuration(Logger log) { this.log = log; } public Configuration(Logger log, Object... configObjects) { this.log = log; if (configObjects == null || configObjects.length == 0) { return; } for (Object configObject : configObjects) { registerConfig(configObject); } } public Configuration(Logger log, String storePath, Object... configObjects) { this(log, configObjects); this.storePath = storePath; } /** * register config object * * @param configObject * @return the current Configuration object */ public Configuration registerConfig(Object configObject) { try { readWriteLock.writeLock().lockInterruptibly(); try { Properties registerProps = MixAll.object2Properties(configObject); merge(registerProps, this.allConfigs); configObjectList.add(configObject); } finally { readWriteLock.writeLock().unlock(); } } catch (InterruptedException e) { log.error("registerConfig lock error"); } return this; } /** * register config properties * * @param extProperties * @return the current Configuration object */ public Configuration registerConfig(Properties extProperties) { if (extProperties == null) { return this; } try { readWriteLock.writeLock().lockInterruptibly(); try { merge(extProperties, this.allConfigs); } finally { readWriteLock.writeLock().unlock(); } } catch (InterruptedException e) { log.error("register lock error. {}" + extProperties); } return this; } /** * The store path will be gotten from the field of object. * * @param object * @param fieldName * @throws java.lang.RuntimeException if the field of object is not exist. */ public void setStorePathFromConfig(Object object, String fieldName) { assert object != null; try { readWriteLock.writeLock().lockInterruptibly(); try { this.storePathFromConfig = true; this.storePathObject = object; // check this.storePathField = object.getClass().getDeclaredField(fieldName); assert this.storePathField != null && !Modifier.isStatic(this.storePathField.getModifiers()); this.storePathField.setAccessible(true); } catch (NoSuchFieldException e) { throw new RuntimeException(e); } finally { readWriteLock.writeLock().unlock(); } } catch (InterruptedException e) { log.error("setStorePathFromConfig lock error"); } } private String getStorePath() { String realStorePath = null; try { readWriteLock.readLock().lockInterruptibly(); try { realStorePath = this.storePath; if (this.storePathFromConfig) { try { realStorePath = (String) storePathField.get(this.storePathObject); } catch (IllegalAccessException e) { log.error("getStorePath error, ", e); } } } finally { readWriteLock.readLock().unlock(); } } catch (InterruptedException e) { log.error("getStorePath lock error"); } return realStorePath; } public void setStorePath(final String storePath) { this.storePath = storePath; } public void update(Properties properties) { try { readWriteLock.writeLock().lockInterruptibly(); try { // the property must be exist when update mergeIfExist(properties, this.allConfigs); for (Object configObject : configObjectList) { // not allConfigs to update... MixAll.properties2Object(properties, configObject); } this.dataVersion.nextVersion(); } finally { readWriteLock.writeLock().unlock(); } } catch (InterruptedException e) { log.error("update lock error, {}", properties); return; } persist(); } public void persist() { try { readWriteLock.readLock().lockInterruptibly(); try { String allConfigs = getAllConfigsInternal(); MixAll.string2File(allConfigs, getStorePath()); } catch (IOException e) { log.error("persist string2File error, ", e); } finally { readWriteLock.readLock().unlock(); } } catch (InterruptedException e) { log.error("persist lock error"); } } public String getAllConfigsFormatString() { try { readWriteLock.readLock().lockInterruptibly(); try { return getAllConfigsInternal(); } finally { readWriteLock.readLock().unlock(); } } catch (InterruptedException e) { log.error("getAllConfigsFormatString lock error"); } return null; } public String getDataVersionJson() { return this.dataVersion.toJson(); } public Properties getAllConfigs() { try { readWriteLock.readLock().lockInterruptibly(); try { return this.allConfigs; } finally { readWriteLock.readLock().unlock(); } } catch (InterruptedException e) { log.error("getAllConfigs lock error"); } return null; } private String getAllConfigsInternal() { StringBuilder stringBuilder = new StringBuilder(); // reload from config object ? for (Object configObject : this.configObjectList) { Properties properties = MixAll.object2Properties(configObject); if (properties != null) { merge(properties, this.allConfigs); } else { log.warn("getAllConfigsInternal object2Properties is null, {}", configObject.getClass()); } } { stringBuilder.append(MixAll.properties2String(this.allConfigs)); } return stringBuilder.toString(); } private void merge(Properties from, Properties to) { for (Object key : from.keySet()) { Object fromObj = from.get(key), toObj = to.get(key); if (toObj != null && !toObj.equals(fromObj)) { log.info("Replace, key: {}, value: {} -> {}", key, toObj, fromObj); } to.put(key, fromObj); } } private void mergeIfExist(Properties from, Properties to) { for (Object key : from.keySet()) { if (!to.containsKey(key)) { continue; } Object fromObj = from.get(key), toObj = to.get(key); if (toObj != null && !toObj.equals(fromObj)) { log.info("Replace, key: {}, value: {} -> {}", key, toObj, fromObj); } to.put(key, fromObj); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.dbcp; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; /** * Tracks db connection usage for recovering and reporting * abandoned db connections. * * The JDBC Connection, Statement, and ResultSet classes * extend this class. * * @author Glenn L. Nielsen * @version $Revision: 758745 $ $Date: 2009-03-26 18:02:20 +0100 (Do, 26 Mrz 2009) $ */ public class AbandonedTrace { /** Date format */ private static SimpleDateFormat format = new SimpleDateFormat ("'DBCP object created' yyyy-MM-dd HH:mm:ss " + "'by the following code was never closed:'"); /** DBCP AbandonedConfig */ private AbandonedConfig config = null; /** A stack trace of the code that created me (if in debug mode) */ private volatile Exception createdBy; /** Time created */ private volatile long createdTime; /** A list of objects created by children of this object */ private final List trace = new ArrayList(); /** Last time this connection was used */ private volatile long lastUsed = 0; /** * Create a new AbandonedTrace without config and * without doing abandoned tracing. */ public AbandonedTrace() { init(null); } /** * Construct a new AbandonedTrace with no parent object. * * @param config AbandonedConfig */ public AbandonedTrace(AbandonedConfig config) { this.config = config; init(null); } /** * Construct a new AbandonedTrace with a parent object. * * @param parent AbandonedTrace parent object */ public AbandonedTrace(AbandonedTrace parent) { this.config = parent.getConfig(); init(parent); } /** * Initialize abandoned tracing for this object. * * @param parent AbandonedTrace parent object */ private void init(AbandonedTrace parent) { if (parent != null) { parent.addTrace(this); } if (config == null) { return; } if (config.getLogAbandoned()) { createdBy = new Exception(); createdTime = System.currentTimeMillis(); } } /** * Get the abandoned config for this object. * * @return AbandonedConfig for this object */ protected AbandonedConfig getConfig() { return config; } /** * Get the last time this object was used in ms. * * @return long time in ms */ protected long getLastUsed() { return lastUsed; } /** * Set the time this object was last used to the * current time in ms. */ protected void setLastUsed() { lastUsed = System.currentTimeMillis(); } /** * Set the time in ms this object was last used. * * @param time time in ms */ protected void setLastUsed(long time) { lastUsed = time; } /** * If logAbandoned=true generate a stack trace * for this object then add this object to the parent * object trace list. */ protected void setStackTrace() { if (config == null) { return; } if (config.getLogAbandoned()) { createdBy = new Exception(); createdTime = System.currentTimeMillis(); } } /** * Add an object to the list of objects being * traced. * * @param trace AbandonedTrace object to add */ protected void addTrace(AbandonedTrace trace) { synchronized (this.trace) { this.trace.add(trace); } setLastUsed(); } /** * Clear the list of objects being traced by this * object. */ protected void clearTrace() { synchronized(this.trace) { this.trace.clear(); } } /** * Get a list of objects being traced by this object. * * @return List of objects */ protected List getTrace() { synchronized (this.trace) { return new ArrayList(trace); } } /** * Prints a stack trace of the code that * created this object. */ public void printStackTrace() { if (createdBy != null && config != null) { config.getLogWriter().println(format.format(new Date(createdTime))); createdBy.printStackTrace(config.getLogWriter()); } synchronized(this.trace) { Iterator it = this.trace.iterator(); while (it.hasNext()) { AbandonedTrace at = (AbandonedTrace)it.next(); at.printStackTrace(); } } } /** * Remove a child object this object is tracing. * * @param trace AbandonedTrace object to remvoe */ protected void removeTrace(AbandonedTrace trace) { synchronized(this.trace) { this.trace.remove(trace); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.graph; import java.util.function.Predicate; import org.apache.jena.shared.PrefixMapping ; import org.apache.jena.util.iterator.ExtendedIterator ; import org.apache.jena.util.iterator.NullIterator ; /** Triples are the basis for RDF statements; they have a subject, predicate, and object field (all nodes) and express the notion that the relationship named by the predicate holds between the subject and the object. */ public class Triple { private final Node subj, pred, obj; public Triple( Node s, Node p, Node o ) { if (s == null) throw new UnsupportedOperationException( "subject cannot be null" ); if (p == null) throw new UnsupportedOperationException( "predicate cannot be null" ); if (o == null) throw new UnsupportedOperationException( "object cannot be null" ); subj = s; pred = p; obj = o; } /** A triple-iterator with no elements. @deprecated Use {@link NullIterator#instance()} */ @Deprecated public static final ExtendedIterator<Triple> None = NullIterator.instance() ; /** return a human-readable string "subject @predicate object" describing the triple */ @Override public String toString() { return toString( PrefixMapping.Standard ); } public String toString( PrefixMapping pm ) { return subj.toString( pm, true ) + " @" + pred.toString( pm, true ) + " " + obj.toString( pm, true ); } /** @return the subject of the triple */ public final Node getSubject() { return subj; } /** @return the predicate of the triple */ public final Node getPredicate() { return pred; } /** @return the object of the triple */ public final Node getObject() { return obj; } /** Return subject or null, not Node.ANY */ public Node getMatchSubject() { return anyToNull( subj ); } /** Return predicate or null, not Node.ANY */ public Node getMatchPredicate() { return anyToNull( pred ); } /** Return object or null, not Node.ANY */ public Node getMatchObject() { return anyToNull( obj ); } private static Node anyToNull( Node n ) { return Node.ANY.equals( n ) ? null : n; } private static Node nullToAny( Node n ) { return n == null ? Node.ANY : n; } public boolean isConcrete() { return subj.isConcrete() && pred.isConcrete() && obj.isConcrete(); } /** Answer true if <code>o</code> is a Triple with the same subject, predicate, and object as this triple. */ @Override public boolean equals(Object o) { return o instanceof Triple && ((Triple) o).sameAs( subj, pred, obj ); } /** Answer true iff this triple has subject s, predicate p, and object o. */ public boolean sameAs( Node s, Node p, Node o ) { return subj.equals( s ) && pred.equals( p ) && obj.equals( o ); } /** Does this triple, used as a pattern match, the other triple (usually a ground triple) */ public boolean matches( Triple other ) { return other.matchedBy( subj, pred, obj ); } public boolean matches( Node s, Node p, Node o ) { return subj.matches( s ) && pred.matches( p ) && obj.matches( o ); } private boolean matchedBy( Node s, Node p, Node o ) { return s.matches( subj ) && p.matches( pred ) && o.matches( obj ); } public boolean subjectMatches( Node s ) { return subj.matches( s ); } public boolean predicateMatches( Node p ) { return pred.matches( p ); } public boolean objectMatches( Node o ) { return obj.matches( o ); } /** The hash-code of a triple is the hash-codes of its components munged together: see hashCode(S, P, O). */ @Override public int hashCode() { return hashCode( subj, pred, obj ); } /** Return the munged hashCodes of the specified nodes, an exclusive-or of the slightly-shifted component hashcodes; this means (almost) all of the bits count, and the order matters, so (S @P O) has a different hash from (O @P S), etc. */ public static int hashCode( Node s, Node p, Node o ) { return (s.hashCode() >> 1) ^ p.hashCode() ^ (o.hashCode() << 1); } public static Triple create( Node s, Node p, Node o ) { return new Triple( s, p, o ) ; } public static Triple createMatch( Node s, Node p, Node o ) { return Triple.create( nullToAny( s ), nullToAny( p ), nullToAny( o ) ); } /** A Triple that is wildcarded in all fields. */ public static final Triple ANY = Triple.create( Node.ANY, Node.ANY, Node.ANY ); /** A Field is a selector from Triples; it allows selectors to be passed around as if they were functions, hooray. */ public static abstract class Field { public abstract Node getField( Triple t ); public abstract Predicate<Triple> filterOn( Node n ); public final Predicate<Triple> filterOn( Triple t ) { return filterOn( getField( t ) ); } protected static final Predicate<Triple> anyTriple = t -> true; public static final Field fieldSubject = new Field() { @Override public Node getField( Triple t ) { return t.subj; } @Override public Predicate<Triple> filterOn( final Node n ) { return n.isConcrete() ? x -> n.equals( x.subj ) : anyTriple ; } }; public static final Field fieldObject = new Field() { @Override public Node getField( Triple t ) { return t.obj; } @Override public Predicate<Triple> filterOn( final Node n ) { return n.isConcrete() ? x -> n.sameValueAs( x.obj ) : anyTriple; } }; public static final Field fieldPredicate = new Field() { @Override public Node getField( Triple t ) { return t.pred; } @Override public Predicate<Triple> filterOn( final Node n ) { return n.isConcrete() ? x -> n.equals( x.pred ) : anyTriple; } }; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.serviceconsumermanagement.v1; /** * ServiceConsumerManagement request. * * @since 1.3 */ @SuppressWarnings("javadoc") public abstract class ServiceConsumerManagementRequest<T> extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest<T> { /** * @param client Google client * @param method HTTP Method * @param uriTemplate URI template for the path relative to the base URL. If it starts with a "/" * the base path from the base URL will be stripped out. The URI template can also be a * full URL. URI template expansion is done using * {@link com.google.api.client.http.UriTemplate#expand(String, String, Object, boolean)} * @param content A POJO that can be serialized into JSON or {@code null} for none * @param responseClass response class to parse into */ public ServiceConsumerManagementRequest( ServiceConsumerManagement client, String method, String uriTemplate, Object content, Class<T> responseClass) { super( client, method, uriTemplate, content, responseClass); } /** V1 error format. */ @com.google.api.client.util.Key("$.xgafv") private java.lang.String $Xgafv; /** * V1 error format. */ public java.lang.String get$Xgafv() { return $Xgafv; } /** V1 error format. */ public ServiceConsumerManagementRequest<T> set$Xgafv(java.lang.String $Xgafv) { this.$Xgafv = $Xgafv; return this; } /** OAuth access token. */ @com.google.api.client.util.Key("access_token") private java.lang.String accessToken; /** * OAuth access token. */ public java.lang.String getAccessToken() { return accessToken; } /** OAuth access token. */ public ServiceConsumerManagementRequest<T> setAccessToken(java.lang.String accessToken) { this.accessToken = accessToken; return this; } /** Data format for response. */ @com.google.api.client.util.Key private java.lang.String alt; /** * Data format for response. [default: json] */ public java.lang.String getAlt() { return alt; } /** Data format for response. */ public ServiceConsumerManagementRequest<T> setAlt(java.lang.String alt) { this.alt = alt; return this; } /** JSONP */ @com.google.api.client.util.Key private java.lang.String callback; /** * JSONP */ public java.lang.String getCallback() { return callback; } /** JSONP */ public ServiceConsumerManagementRequest<T> setCallback(java.lang.String callback) { this.callback = callback; return this; } /** Selector specifying which fields to include in a partial response. */ @com.google.api.client.util.Key private java.lang.String fields; /** * Selector specifying which fields to include in a partial response. */ public java.lang.String getFields() { return fields; } /** Selector specifying which fields to include in a partial response. */ public ServiceConsumerManagementRequest<T> setFields(java.lang.String fields) { this.fields = fields; return this; } /** * API key. Your API key identifies your project and provides you with API access, quota, and * reports. Required unless you provide an OAuth 2.0 token. */ @com.google.api.client.util.Key private java.lang.String key; /** * API key. Your API key identifies your project and provides you with API access, quota, and * reports. Required unless you provide an OAuth 2.0 token. */ public java.lang.String getKey() { return key; } /** * API key. Your API key identifies your project and provides you with API access, quota, and * reports. Required unless you provide an OAuth 2.0 token. */ public ServiceConsumerManagementRequest<T> setKey(java.lang.String key) { this.key = key; return this; } /** OAuth 2.0 token for the current user. */ @com.google.api.client.util.Key("oauth_token") private java.lang.String oauthToken; /** * OAuth 2.0 token for the current user. */ public java.lang.String getOauthToken() { return oauthToken; } /** OAuth 2.0 token for the current user. */ public ServiceConsumerManagementRequest<T> setOauthToken(java.lang.String oauthToken) { this.oauthToken = oauthToken; return this; } /** Returns response with indentations and line breaks. */ @com.google.api.client.util.Key private java.lang.Boolean prettyPrint; /** * Returns response with indentations and line breaks. [default: true] */ public java.lang.Boolean getPrettyPrint() { return prettyPrint; } /** Returns response with indentations and line breaks. */ public ServiceConsumerManagementRequest<T> setPrettyPrint(java.lang.Boolean prettyPrint) { this.prettyPrint = prettyPrint; return this; } /** * Available to use for quota purposes for server-side applications. Can be any arbitrary string * assigned to a user, but should not exceed 40 characters. */ @com.google.api.client.util.Key private java.lang.String quotaUser; /** * Available to use for quota purposes for server-side applications. Can be any arbitrary string * assigned to a user, but should not exceed 40 characters. */ public java.lang.String getQuotaUser() { return quotaUser; } /** * Available to use for quota purposes for server-side applications. Can be any arbitrary string * assigned to a user, but should not exceed 40 characters. */ public ServiceConsumerManagementRequest<T> setQuotaUser(java.lang.String quotaUser) { this.quotaUser = quotaUser; return this; } /** Legacy upload protocol for media (e.g. "media", "multipart"). */ @com.google.api.client.util.Key private java.lang.String uploadType; /** * Legacy upload protocol for media (e.g. "media", "multipart"). */ public java.lang.String getUploadType() { return uploadType; } /** Legacy upload protocol for media (e.g. "media", "multipart"). */ public ServiceConsumerManagementRequest<T> setUploadType(java.lang.String uploadType) { this.uploadType = uploadType; return this; } /** Upload protocol for media (e.g. "raw", "multipart"). */ @com.google.api.client.util.Key("upload_protocol") private java.lang.String uploadProtocol; /** * Upload protocol for media (e.g. "raw", "multipart"). */ public java.lang.String getUploadProtocol() { return uploadProtocol; } /** Upload protocol for media (e.g. "raw", "multipart"). */ public ServiceConsumerManagementRequest<T> setUploadProtocol(java.lang.String uploadProtocol) { this.uploadProtocol = uploadProtocol; return this; } @Override public final ServiceConsumerManagement getAbstractGoogleClient() { return (ServiceConsumerManagement) super.getAbstractGoogleClient(); } @Override public ServiceConsumerManagementRequest<T> setDisableGZipContent(boolean disableGZipContent) { return (ServiceConsumerManagementRequest<T>) super.setDisableGZipContent(disableGZipContent); } @Override public ServiceConsumerManagementRequest<T> setRequestHeaders(com.google.api.client.http.HttpHeaders headers) { return (ServiceConsumerManagementRequest<T>) super.setRequestHeaders(headers); } @Override public ServiceConsumerManagementRequest<T> set(String parameterName, Object value) { return (ServiceConsumerManagementRequest<T>) super.set(parameterName, value); } }
package fr.x9nico.viacheckconnection.bungee; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; import net.md_5.bungee.api.plugin.Plugin; import net.md_5.bungee.config.Configuration; import net.md_5.bungee.config.ConfigurationProvider; import net.md_5.bungee.config.YamlConfiguration; import javax.net.ssl.HttpsURLConnection; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.URL; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; import java.util.zip.GZIPOutputStream; /** * bStats collects some data for plugin authors. * * Check out https://bStats.org/ to learn more about bStats! */ public class Metrics { static { // You can use the property to disable the check in your test environment if (System.getProperty("bstats.relocatecheck") == null || !System.getProperty("bstats.relocatecheck").equals("false")) { // Maven's Relocate is clever and changes strings, too. So we have to use this little "trick" ... :D final String defaultPackage = new String( new byte[]{'o', 'r', 'g', '.', 'b', 's', 't', 'a', 't', 's', '.', 'b', 'u', 'n', 'g', 'e', 'e', 'c', 'o', 'r', 'd'}); final String examplePackage = new String(new byte[]{'y', 'o', 'u', 'r', '.', 'p', 'a', 'c', 'k', 'a', 'g', 'e'}); // We want to make sure nobody just copy & pastes the example and use the wrong package names if (Metrics.class.getPackage().getName().equals(defaultPackage) || Metrics.class.getPackage().getName().equals(examplePackage)) { throw new IllegalStateException("bStats Metrics class has not been relocated correctly!"); } } } // The version of this bStats class public static final int B_STATS_VERSION = 1; // The url to which the data is sent private static final String URL = "https://bStats.org/submitData/bungeecord"; // The plugin private final Plugin plugin; // Is bStats enabled on this server? private boolean enabled; // The uuid of the server private String serverUUID; // Should failed requests be logged? private boolean logFailedRequests = false; // A list with all known metrics class objects including this one private static final List<Object> knownMetricsInstances = new ArrayList<>(); // A list with all custom charts private final List<CustomChart> charts = new ArrayList<>(); public Metrics(Plugin plugin) { this.plugin = plugin; try { loadConfig(); } catch (IOException e) { // Failed to load configuration plugin.getLogger().log(Level.WARNING, "Failed to load bStats config!", e); return; } // We are not allowed to send data about this server :( if (!enabled) { return; } Class<?> usedMetricsClass = getFirstBStatsClass(); if (usedMetricsClass == null) { // Failed to get first metrics class return; } if (usedMetricsClass == getClass()) { // We are the first! :) linkMetrics(this); startSubmitting(); } else { // We aren't the first so we link to the first metrics class try { usedMetricsClass.getMethod("linkMetrics", Object.class).invoke(null,this); } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { if (logFailedRequests) { plugin.getLogger().log(Level.WARNING, "Failed to link to first metrics class " + usedMetricsClass.getName() + "!", e); } } } } /** * Adds a custom chart. * * @param chart The chart to add. */ public void addCustomChart(CustomChart chart) { if (chart == null) { plugin.getLogger().log(Level.WARNING, "Chart cannot be null"); } charts.add(chart); } /** * Links an other metrics class with this class. * This method is called using Reflection. * * @param metrics An object of the metrics class to link. */ public static void linkMetrics(Object metrics) { knownMetricsInstances.add(metrics); } /** * Gets the plugin specific data. * This method is called using Reflection. * * @return The plugin specific data. */ public JsonObject getPluginData() { JsonObject data = new JsonObject(); String pluginName = plugin.getDescription().getName(); String pluginVersion = plugin.getDescription().getVersion(); data.addProperty("pluginName", pluginName); data.addProperty("pluginVersion", pluginVersion); JsonArray customCharts = new JsonArray(); for (CustomChart customChart : charts) { // Add the data of the custom charts JsonObject chart = customChart.getRequestJsonObject(plugin.getLogger(), logFailedRequests); if (chart == null) { // If the chart is null, we skip it continue; } customCharts.add(chart); } data.add("customCharts", customCharts); return data; } private void startSubmitting() { plugin.getProxy().getScheduler().schedule(plugin, new Runnable() { @Override public void run() { // The data collection is async, as well as sending the data // Bungeecord does not have a main thread, everything is async submitData(); } }, 2, 30, TimeUnit.MINUTES); // Submit the data every 30 minutes, first time after 2 minutes to give other plugins enough time to start // WARNING: Changing the frequency has no effect but your plugin WILL be blocked/deleted! // WARNING: Just don't do it! } /** * Gets the server specific data. * * @return The server specific data. */ @SuppressWarnings("deprecation") private JsonObject getServerData() { // Minecraft specific data int playerAmount = plugin.getProxy().getOnlineCount(); playerAmount = playerAmount > 500 ? 500 : playerAmount; int onlineMode = plugin.getProxy().getConfig().isOnlineMode() ? 1 : 0; String bungeecordVersion = plugin.getProxy().getVersion(); int managedServers = plugin.getProxy().getServers().size(); // OS/Java specific data String javaVersion = System.getProperty("java.version"); String osName = System.getProperty("os.name"); String osArch = System.getProperty("os.arch"); String osVersion = System.getProperty("os.version"); int coreCount = Runtime.getRuntime().availableProcessors(); JsonObject data = new JsonObject(); data.addProperty("serverUUID", serverUUID); data.addProperty("playerAmount", playerAmount); data.addProperty("managedServers", managedServers); data.addProperty("onlineMode", onlineMode); data.addProperty("bungeecordVersion", bungeecordVersion); data.addProperty("javaVersion", javaVersion); data.addProperty("osName", osName); data.addProperty("osArch", osArch); data.addProperty("osVersion", osVersion); data.addProperty("coreCount", coreCount); return data; } /** * Collects the data and sends it afterwards. */ private void submitData() { final JsonObject data = getServerData(); final JsonArray pluginData = new JsonArray(); // Search for all other bStats Metrics classes to get their plugin data for (Object metrics : knownMetricsInstances) { try { Object plugin = metrics.getClass().getMethod("getPluginData").invoke(metrics); if (plugin instanceof JsonObject) { pluginData.add((JsonObject) plugin); } } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException ignored) { } } data.add("plugins", pluginData); try { // Send the data sendData(data); } catch (Exception e) { // Something went wrong! :( if (logFailedRequests) { plugin.getLogger().log(Level.WARNING, "Could not submit plugin stats!", e); } } } /** * Loads the bStats configuration. * * @throws IOException If something did not work :( */ private void loadConfig() throws IOException { Path configPath = plugin.getDataFolder().toPath().getParent().resolve("bStats"); configPath.toFile().mkdirs(); File configFile = new File(configPath.toFile(), "config.yml"); if (!configFile.exists()) { writeFile(configFile, "#bStats collects some data for plugin authors like how many servers are using their plugins.", "#To honor their work, you should not disable it.", "#This has nearly no effect on the server performance!", "#Check out https://bStats.org/ to learn more :)", "enabled: true", "serverUuid: \"" + UUID.randomUUID().toString() + "\"", "logFailedRequests: false"); } Configuration configuration = ConfigurationProvider.getProvider(YamlConfiguration.class).load(configFile); // Load configuration enabled = configuration.getBoolean("enabled", true); serverUUID = configuration.getString("serverUuid"); logFailedRequests = configuration.getBoolean("logFailedRequests", false); } /** * Gets the first bStat Metrics class. * * @return The first bStats metrics class. */ private Class<?> getFirstBStatsClass() { Path configPath = plugin.getDataFolder().toPath().getParent().resolve("bStats"); configPath.toFile().mkdirs(); File tempFile = new File(configPath.toFile(), "temp.txt"); try { String className = readFile(tempFile); if (className != null) { try { // Let's check if a class with the given name exists. return Class.forName(className); } catch (ClassNotFoundException ignored) { } } writeFile(tempFile, getClass().getName()); return getClass(); } catch (IOException e) { if (logFailedRequests) { plugin.getLogger().log(Level.WARNING, "Failed to get first bStats class!", e); } return null; } } /** * Reads the first line of the file. * * @param file The file to read. Cannot be null. * @return The first line of the file or <code>null</code> if the file does not exist or is empty. * @throws IOException If something did not work :( */ private String readFile(File file) throws IOException { if (!file.exists()) { return null; } try ( FileReader fileReader = new FileReader(file); BufferedReader bufferedReader = new BufferedReader(fileReader); ) { return bufferedReader.readLine(); } } /** * Writes a String to a file. It also adds a note for the user, * * @param file The file to write to. Cannot be null. * @param lines The lines to write. * @throws IOException If something did not work :( */ private void writeFile(File file, String... lines) throws IOException { if (!file.exists()) { file.createNewFile(); } try ( FileWriter fileWriter = new FileWriter(file); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter) ) { for (String line : lines) { bufferedWriter.write(line); bufferedWriter.newLine(); } } } /** * Sends the data to the bStats server. * * @param data The data to send. * @throws Exception If the request failed. */ private static void sendData(JsonObject data) throws Exception { if (data == null) { throw new IllegalArgumentException("Data cannot be null"); } HttpsURLConnection connection = (HttpsURLConnection) new URL(URL).openConnection(); // Compress the data to save bandwidth byte[] compressedData = compress(data.toString()); // Add headers connection.setRequestMethod("POST"); connection.addRequestProperty("Accept", "application/Json"); connection.addRequestProperty("Connection", "close"); connection.addRequestProperty("Content-Encoding", "gzip"); // We gzip our request connection.addRequestProperty("Content-Length", String.valueOf(compressedData.length)); connection.setRequestProperty("Content-Type", "application/Json"); // We send our data in Json format connection.setRequestProperty("User-Agent", "MC-Server/" + B_STATS_VERSION); // Send data connection.setDoOutput(true); DataOutputStream outputStream = new DataOutputStream(connection.getOutputStream()); outputStream.write(compressedData); outputStream.flush(); outputStream.close(); connection.getInputStream().close(); // We don't care about the response - Just send our data :) } /** * Gzips the given String. * * @param str The string to gzip. * @return The gzipped String. * @throws IOException If the compression failed. */ private static byte[] compress(final String str) throws IOException { if (str == null) { return null; } ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); GZIPOutputStream gzip = new GZIPOutputStream(outputStream); gzip.write(str.getBytes("UTF-8")); gzip.close(); return outputStream.toByteArray(); } /** * Represents a custom chart. */ public static abstract class CustomChart { // The id of the chart private final String chartId; /** * Class constructor. * * @param chartId The id of the chart. */ CustomChart(String chartId) { if (chartId == null || chartId.isEmpty()) { throw new IllegalArgumentException("ChartId cannot be null or empty!"); } this.chartId = chartId; } private JsonObject getRequestJsonObject(Logger logger, boolean logFailedRequests) { JsonObject chart = new JsonObject(); chart.addProperty("chartId", chartId); try { JsonObject data = getChartData(); if (data == null) { // If the data is null we don't send the chart. return null; } chart.add("data", data); } catch (Throwable t) { if (logFailedRequests) { logger.log(Level.WARNING, "Failed to get data for custom chart with id " + chartId, t); } return null; } return chart; } protected abstract JsonObject getChartData() throws Exception; } /** * Represents a custom simple pie. */ public static class SimplePie extends CustomChart { private final Callable<String> callable; /** * Class constructor. * * @param chartId The id of the chart. * @param callable The callable which is used to request the chart data. */ public SimplePie(String chartId, Callable<String> callable) { super(chartId); this.callable = callable; } @Override protected JsonObject getChartData() throws Exception { JsonObject data = new JsonObject(); String value = callable.call(); if (value == null || value.isEmpty()) { // Null = skip the chart return null; } data.addProperty("value", value); return data; } } /** * Represents a custom advanced pie. */ public static class AdvancedPie extends CustomChart { private final Callable<Map<String, Integer>> callable; /** * Class constructor. * * @param chartId The id of the chart. * @param callable The callable which is used to request the chart data. */ public AdvancedPie(String chartId, Callable<Map<String, Integer>> callable) { super(chartId); this.callable = callable; } @Override protected JsonObject getChartData() throws Exception { JsonObject data = new JsonObject(); JsonObject values = new JsonObject(); Map<String, Integer> map = callable.call(); if (map == null || map.isEmpty()) { // Null = skip the chart return null; } boolean allSkipped = true; for (Map.Entry<String, Integer> entry : map.entrySet()) { if (entry.getValue() == 0) { continue; // Skip this invalid } allSkipped = false; values.addProperty(entry.getKey(), entry.getValue()); } if (allSkipped) { // Null = skip the chart return null; } data.add("values", values); return data; } } /** * Represents a custom drilldown pie. */ public static class DrilldownPie extends CustomChart { private final Callable<Map<String, Map<String, Integer>>> callable; /** * Class constructor. * * @param chartId The id of the chart. * @param callable The callable which is used to request the chart data. */ public DrilldownPie(String chartId, Callable<Map<String, Map<String, Integer>>> callable) { super(chartId); this.callable = callable; } @Override public JsonObject getChartData() throws Exception { JsonObject data = new JsonObject(); JsonObject values = new JsonObject(); Map<String, Map<String, Integer>> map = callable.call(); if (map == null || map.isEmpty()) { // Null = skip the chart return null; } boolean reallyAllSkipped = true; for (Map.Entry<String, Map<String, Integer>> entryValues : map.entrySet()) { JsonObject value = new JsonObject(); boolean allSkipped = true; for (Map.Entry<String, Integer> valueEntry : map.get(entryValues.getKey()).entrySet()) { value.addProperty(valueEntry.getKey(), valueEntry.getValue()); allSkipped = false; } if (!allSkipped) { reallyAllSkipped = false; values.add(entryValues.getKey(), value); } } if (reallyAllSkipped) { // Null = skip the chart return null; } data.add("values", values); return data; } } /** * Represents a custom single line chart. */ public static class SingleLineChart extends CustomChart { private final Callable<Integer> callable; /** * Class constructor. * * @param chartId The id of the chart. * @param callable The callable which is used to request the chart data. */ public SingleLineChart(String chartId, Callable<Integer> callable) { super(chartId); this.callable = callable; } @Override protected JsonObject getChartData() throws Exception { JsonObject data = new JsonObject(); int value = callable.call(); if (value == 0) { // Null = skip the chart return null; } data.addProperty("value", value); return data; } } /** * Represents a custom multi line chart. */ public static class MultiLineChart extends CustomChart { private final Callable<Map<String, Integer>> callable; /** * Class constructor. * * @param chartId The id of the chart. * @param callable The callable which is used to request the chart data. */ public MultiLineChart(String chartId, Callable<Map<String, Integer>> callable) { super(chartId); this.callable = callable; } @Override protected JsonObject getChartData() throws Exception { JsonObject data = new JsonObject(); JsonObject values = new JsonObject(); Map<String, Integer> map = callable.call(); if (map == null || map.isEmpty()) { // Null = skip the chart return null; } boolean allSkipped = true; for (Map.Entry<String, Integer> entry : map.entrySet()) { if (entry.getValue() == 0) { continue; // Skip this invalid } allSkipped = false; values.addProperty(entry.getKey(), entry.getValue()); } if (allSkipped) { // Null = skip the chart return null; } data.add("values", values); return data; } } /** * Represents a custom simple bar chart. */ public static class SimpleBarChart extends CustomChart { private final Callable<Map<String, Integer>> callable; /** * Class constructor. * * @param chartId The id of the chart. * @param callable The callable which is used to request the chart data. */ public SimpleBarChart(String chartId, Callable<Map<String, Integer>> callable) { super(chartId); this.callable = callable; } @Override protected JsonObject getChartData() throws Exception { JsonObject data = new JsonObject(); JsonObject values = new JsonObject(); Map<String, Integer> map = callable.call(); if (map == null || map.isEmpty()) { // Null = skip the chart return null; } for (Map.Entry<String, Integer> entry : map.entrySet()) { JsonArray categoryValues = new JsonArray(); categoryValues.add(new JsonPrimitive(entry.getValue())); values.add(entry.getKey(), categoryValues); } data.add("values", values); return data; } } /** * Represents a custom advanced bar chart. */ public static class AdvancedBarChart extends CustomChart { private final Callable<Map<String, int[]>> callable; /** * Class constructor. * * @param chartId The id of the chart. * @param callable The callable which is used to request the chart data. */ public AdvancedBarChart(String chartId, Callable<Map<String, int[]>> callable) { super(chartId); this.callable = callable; } @Override protected JsonObject getChartData() throws Exception { JsonObject data = new JsonObject(); JsonObject values = new JsonObject(); Map<String, int[]> map = callable.call(); if (map == null || map.isEmpty()) { // Null = skip the chart return null; } boolean allSkipped = true; for (Map.Entry<String, int[]> entry : map.entrySet()) { if (entry.getValue().length == 0) { continue; // Skip this invalid } allSkipped = false; JsonArray categoryValues = new JsonArray(); for (int categoryValue : entry.getValue()) { categoryValues.add(new JsonPrimitive(categoryValue)); } values.add(entry.getKey(), categoryValues); } if (allSkipped) { // Null = skip the chart return null; } data.add("values", values); return data; } } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.08.09 at 09:22:32 PM IST // package com.pacificmetrics.ims.apip.qti.item; import java.util.ArrayList; import java.util.List; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlElementRefs; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlMixed; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.NormalizedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import org.w3.math.mathml.Math; import org.w3.xinclude.Include; /** * * The SimpleChoice complexType is the container for the information that is to be presented to a candidate as a possible answer to be selected i.e. in a multiple choice, true/false types of questions. * * * <p>Java class for SimpleChoice.Type complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="SimpleChoice.Type"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;choice maxOccurs="unbounded" minOccurs="0"> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}printedVariable"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}feedbackBlock"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}feedbackInline"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}templateInline"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}templateBlock"/> * &lt;element ref="{http://www.w3.org/1998/Math/MathML}math"/> * &lt;element ref="{http://www.w3.org/2001/XInclude}include"/> * &lt;choice> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}pre"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}h1"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}h2"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}h3"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}h4"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}h5"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}h6"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}p"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}address"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}dl"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}ol"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}ul"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}br"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}hr"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}img"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}object"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}blockquote"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}em"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}a"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}code"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}span"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}sub"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}acronym"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}big"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}tt"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}kbd"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}q"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}i"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}dfn"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}abbr"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}strong"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}sup"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}var"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}small"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}samp"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}b"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}cite"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}table"/> * &lt;element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}div"/> * &lt;/choice> * &lt;/choice> * &lt;/sequence> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}label.SimpleChoice.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}showHide.SimpleChoice.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}xmllang.SimpleChoice.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}class.SimpleChoice.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}id.SimpleChoice.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}templateIdentifier.SimpleChoice.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}identifier.SimpleChoice.Attr"/> * &lt;attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}fixed.SimpleChoice.Attr"/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "SimpleChoice.Type", propOrder = { "content" }) @XmlRootElement(name = "simpleChoice") public class SimpleChoice { @XmlElementRefs({ @XmlElementRef(name = "samp", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "i", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "templateBlock", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = TemplateBlock.class, required = false), @XmlElementRef(name = "hr", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = Hr.class, required = false), @XmlElementRef(name = "big", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "em", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "templateInline", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = TemplateInline.class, required = false), @XmlElementRef(name = "tt", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "sub", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "code", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "feedbackBlock", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = FeedbackBlock.class, required = false), @XmlElementRef(name = "table", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = Table.class, required = false), @XmlElementRef(name = "address", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "div", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = Div.class, required = false), @XmlElementRef(name = "h3", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "printedVariable", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = PrintedVariable.class, required = false), @XmlElementRef(name = "h1", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "math", namespace = "http://www.w3.org/1998/Math/MathML", type = Math.class, required = false), @XmlElementRef(name = "small", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "feedbackInline", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = FeedbackInline.class, required = false), @XmlElementRef(name = "b", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "cite", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "kbd", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "h5", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "abbr", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "dfn", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "ul", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "blockquote", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = Blockquote.class, required = false), @XmlElementRef(name = "pre", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "q", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = Q.class, required = false), @XmlElementRef(name = "h4", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "include", namespace = "http://www.w3.org/2001/XInclude", type = Include.class, required = false), @XmlElementRef(name = "br", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = Br.class, required = false), @XmlElementRef(name = "a", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = A.class, required = false), @XmlElementRef(name = "object", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = com.pacificmetrics.ims.apip.qti.item.Object.class, required = false), @XmlElementRef(name = "p", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "span", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "img", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = Img.class, required = false), @XmlElementRef(name = "ol", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "var", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "dl", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = Dl.class, required = false), @XmlElementRef(name = "h6", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "h2", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "sup", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "strong", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false), @XmlElementRef(name = "acronym", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false) }) @XmlMixed protected List<java.lang.Object> content; @XmlAttribute(name = "label") @XmlJavaTypeAdapter(NormalizedStringAdapter.class) @XmlSchemaType(name = "normalizedString") protected String label; @XmlAttribute(name = "showHide") protected String showHide; @XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace") protected String lang; @XmlAttribute(name = "class") protected List<String> clazzs; @XmlAttribute(name = "id") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID protected String id; @XmlAttribute(name = "templateIdentifier") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String templateIdentifier; @XmlAttribute(name = "identifier", required = true) @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String identifier; @XmlAttribute(name = "fixed") protected Boolean fixed; /** * * The SimpleChoice complexType is the container for the information that is to be presented to a candidate as a possible answer to be selected i.e. in a multiple choice, true/false types of questions. * Gets the value of the content property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the content property. * * <p> * For example, to add a new item, do as follows: * <pre> * getContent().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link TemplateBlock } * {@link String } * {@link Hr } * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link TemplateInline } * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link FeedbackBlock } * {@link Table } * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link PrintedVariable } * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link Div } * {@link Math } * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link FeedbackInline } * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link OULType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link Blockquote } * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link Q } * {@link Include } * {@link Br } * {@link com.pacificmetrics.ims.apip.qti.item.Object } * {@link A } * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link Img } * {@link JAXBElement }{@code <}{@link OULType }{@code >} * {@link Dl } * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >} * * */ public List<java.lang.Object> getContent() { if (content == null) { content = new ArrayList<java.lang.Object>(); } return this.content; } /** * Gets the value of the label property. * * @return * possible object is * {@link String } * */ public String getLabel() { return label; } /** * Sets the value of the label property. * * @param value * allowed object is * {@link String } * */ public void setLabel(String value) { this.label = value; } /** * Gets the value of the showHide property. * * @return * possible object is * {@link String } * */ public String getShowHide() { if (showHide == null) { return "show"; } else { return showHide; } } /** * Sets the value of the showHide property. * * @param value * allowed object is * {@link String } * */ public void setShowHide(String value) { this.showHide = value; } /** * Gets the value of the lang property. * * @return * possible object is * {@link String } * */ public String getLang() { return lang; } /** * Sets the value of the lang property. * * @param value * allowed object is * {@link String } * */ public void setLang(String value) { this.lang = value; } /** * Gets the value of the clazzs property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the clazzs property. * * <p> * For example, to add a new item, do as follows: * <pre> * getClazzs().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getClazzs() { if (clazzs == null) { clazzs = new ArrayList<String>(); } return this.clazzs; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the templateIdentifier property. * * @return * possible object is * {@link String } * */ public String getTemplateIdentifier() { return templateIdentifier; } /** * Sets the value of the templateIdentifier property. * * @param value * allowed object is * {@link String } * */ public void setTemplateIdentifier(String value) { this.templateIdentifier = value; } /** * Gets the value of the identifier property. * * @return * possible object is * {@link String } * */ public String getIdentifier() { return identifier; } /** * Sets the value of the identifier property. * * @param value * allowed object is * {@link String } * */ public void setIdentifier(String value) { this.identifier = value; } /** * Gets the value of the fixed property. * * @return * possible object is * {@link Boolean } * */ public boolean isFixed() { if (fixed == null) { return false; } else { return fixed; } } /** * Sets the value of the fixed property. * * @param value * allowed object is * {@link Boolean } * */ public void setFixed(Boolean value) { this.fixed = value; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.monitor.fluent; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.resourcemanager.monitor.fluent.models.LogSearchRuleResourceInner; import com.azure.resourcemanager.monitor.models.LogSearchRuleResourcePatch; import com.azure.resourcemanager.resources.fluentcore.collection.InnerSupportsDelete; import com.azure.resourcemanager.resources.fluentcore.collection.InnerSupportsGet; import com.azure.resourcemanager.resources.fluentcore.collection.InnerSupportsListing; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in ScheduledQueryRulesClient. */ public interface ScheduledQueryRulesClient extends InnerSupportsGet<LogSearchRuleResourceInner>, InnerSupportsListing<LogSearchRuleResourceInner>, InnerSupportsDelete<Void> { /** * Creates or updates an log search rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @param parameters The parameters of the rule to create or update. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the Log Search Rule resource. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<LogSearchRuleResourceInner>> createOrUpdateWithResponseAsync( String resourceGroupName, String ruleName, LogSearchRuleResourceInner parameters); /** * Creates or updates an log search rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @param parameters The parameters of the rule to create or update. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the Log Search Rule resource. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<LogSearchRuleResourceInner> createOrUpdateAsync( String resourceGroupName, String ruleName, LogSearchRuleResourceInner parameters); /** * Creates or updates an log search rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @param parameters The parameters of the rule to create or update. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the Log Search Rule resource. */ @ServiceMethod(returns = ReturnType.SINGLE) LogSearchRuleResourceInner createOrUpdate( String resourceGroupName, String ruleName, LogSearchRuleResourceInner parameters); /** * Creates or updates an log search rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @param parameters The parameters of the rule to create or update. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the Log Search Rule resource. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<LogSearchRuleResourceInner> createOrUpdateWithResponse( String resourceGroupName, String ruleName, LogSearchRuleResourceInner parameters, Context context); /** * Gets an Log Search rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an Log Search rule. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<LogSearchRuleResourceInner>> getByResourceGroupWithResponseAsync( String resourceGroupName, String ruleName); /** * Gets an Log Search rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an Log Search rule. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<LogSearchRuleResourceInner> getByResourceGroupAsync(String resourceGroupName, String ruleName); /** * Gets an Log Search rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an Log Search rule. */ @ServiceMethod(returns = ReturnType.SINGLE) LogSearchRuleResourceInner getByResourceGroup(String resourceGroupName, String ruleName); /** * Gets an Log Search rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an Log Search rule. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<LogSearchRuleResourceInner> getByResourceGroupWithResponse( String resourceGroupName, String ruleName, Context context); /** * Update log search Rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @param parameters The parameters of the rule to update. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the Log Search Rule resource. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<LogSearchRuleResourceInner>> updateWithResponseAsync( String resourceGroupName, String ruleName, LogSearchRuleResourcePatch parameters); /** * Update log search Rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @param parameters The parameters of the rule to update. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the Log Search Rule resource. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<LogSearchRuleResourceInner> updateAsync( String resourceGroupName, String ruleName, LogSearchRuleResourcePatch parameters); /** * Update log search Rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @param parameters The parameters of the rule to update. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the Log Search Rule resource. */ @ServiceMethod(returns = ReturnType.SINGLE) LogSearchRuleResourceInner update(String resourceGroupName, String ruleName, LogSearchRuleResourcePatch parameters); /** * Update log search Rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @param parameters The parameters of the rule to update. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the Log Search Rule resource. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<LogSearchRuleResourceInner> updateWithResponse( String resourceGroupName, String ruleName, LogSearchRuleResourcePatch parameters, Context context); /** * Deletes a Log Search rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Void>> deleteWithResponseAsync(String resourceGroupName, String ruleName); /** * Deletes a Log Search rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> deleteAsync(String resourceGroupName, String ruleName); /** * Deletes a Log Search rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) void delete(String resourceGroupName, String ruleName); /** * Deletes a Log Search rule. * * @param resourceGroupName The name of the resource group. * @param ruleName The name of the rule. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<Void> deleteWithResponse(String resourceGroupName, String ruleName, Context context); /** * List the Log Search rules within a subscription group. * * @param filter The filter to apply on the operation. For more information please see * https://msdn.microsoft.com/en-us/library/azure/dn931934.aspx. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a collection of Log Search rule resources. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<LogSearchRuleResourceInner> listAsync(String filter); /** * List the Log Search rules within a subscription group. * * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a collection of Log Search rule resources. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<LogSearchRuleResourceInner> listAsync(); /** * List the Log Search rules within a subscription group. * * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a collection of Log Search rule resources. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<LogSearchRuleResourceInner> list(); /** * List the Log Search rules within a subscription group. * * @param filter The filter to apply on the operation. For more information please see * https://msdn.microsoft.com/en-us/library/azure/dn931934.aspx. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a collection of Log Search rule resources. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<LogSearchRuleResourceInner> list(String filter, Context context); /** * List the Log Search rules within a resource group. * * @param resourceGroupName The name of the resource group. * @param filter The filter to apply on the operation. For more information please see * https://msdn.microsoft.com/en-us/library/azure/dn931934.aspx. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a collection of Log Search rule resources. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<LogSearchRuleResourceInner> listByResourceGroupAsync(String resourceGroupName, String filter); /** * List the Log Search rules within a resource group. * * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a collection of Log Search rule resources. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<LogSearchRuleResourceInner> listByResourceGroupAsync(String resourceGroupName); /** * List the Log Search rules within a resource group. * * @param resourceGroupName The name of the resource group. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a collection of Log Search rule resources. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<LogSearchRuleResourceInner> listByResourceGroup(String resourceGroupName); /** * List the Log Search rules within a resource group. * * @param resourceGroupName The name of the resource group. * @param filter The filter to apply on the operation. For more information please see * https://msdn.microsoft.com/en-us/library/azure/dn931934.aspx. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a collection of Log Search rule resources. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<LogSearchRuleResourceInner> listByResourceGroup( String resourceGroupName, String filter, Context context); }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.memory; import com.facebook.presto.ExceededCpuLimitException; import com.facebook.presto.execution.LocationFactory; import com.facebook.presto.execution.QueryExecution; import com.facebook.presto.execution.QueryIdGenerator; import com.facebook.presto.execution.QueryManagerConfig; import com.facebook.presto.metadata.InternalNodeManager; import com.facebook.presto.server.ServerConfig; import com.facebook.presto.spi.Node; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.QueryId; import com.facebook.presto.spi.memory.ClusterMemoryPoolManager; import com.facebook.presto.spi.memory.MemoryPoolId; import com.facebook.presto.spi.memory.MemoryPoolInfo; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.airlift.http.client.HttpClient; import io.airlift.json.JsonCodec; import io.airlift.log.Logger; import io.airlift.units.DataSize; import io.airlift.units.Duration; import org.weakref.jmx.JmxException; import org.weakref.jmx.MBeanExporter; import org.weakref.jmx.Managed; import org.weakref.jmx.ObjectNames; import javax.annotation.PreDestroy; import javax.annotation.concurrent.GuardedBy; import javax.inject.Inject; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import static com.facebook.presto.ExceededMemoryLimitException.exceededGlobalLimit; import static com.facebook.presto.SystemSessionProperties.RESOURCE_OVERCOMMIT; import static com.facebook.presto.SystemSessionProperties.getQueryMaxCpuTime; import static com.facebook.presto.SystemSessionProperties.getQueryMaxMemory; import static com.facebook.presto.SystemSessionProperties.resourceOvercommit; import static com.facebook.presto.memory.LocalMemoryManager.GENERAL_POOL; import static com.facebook.presto.memory.LocalMemoryManager.RESERVED_POOL; import static com.facebook.presto.spi.NodeState.ACTIVE; import static com.facebook.presto.spi.NodeState.SHUTTING_DOWN; import static com.facebook.presto.spi.StandardErrorCode.CLUSTER_OUT_OF_MEMORY; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.facebook.presto.util.ImmutableCollectors.toImmutableSet; import static com.google.common.collect.Sets.difference; import static io.airlift.units.DataSize.succinctBytes; import static io.airlift.units.Duration.nanosSince; import static java.lang.String.format; import static java.util.Objects.requireNonNull; public class ClusterMemoryManager implements ClusterMemoryPoolManager { private static final Logger log = Logger.get(ClusterMemoryManager.class); private final ExecutorService listenerExecutor = Executors.newSingleThreadExecutor(); private final InternalNodeManager nodeManager; private final LocationFactory locationFactory; private final HttpClient httpClient; private final MBeanExporter exporter; private final JsonCodec<MemoryInfo> memoryInfoCodec; private final JsonCodec<MemoryPoolAssignmentsRequest> assignmentsRequestJsonCodec; private final DataSize maxQueryMemory; private final Duration maxQueryCpuTime; private final boolean enabled; private final boolean killOnOutOfMemory; private final Duration killOnOutOfMemoryDelay; private final String coordinatorId; private final AtomicLong memoryPoolAssignmentsVersion = new AtomicLong(); private final AtomicLong clusterMemoryUsageBytes = new AtomicLong(); private final AtomicLong clusterMemoryBytes = new AtomicLong(); private final AtomicLong queriesKilledDueToOutOfMemory = new AtomicLong(); private final Map<String, RemoteNodeMemory> nodes = new HashMap<>(); @GuardedBy("this") private final Map<MemoryPoolId, List<Consumer<MemoryPoolInfo>>> changeListeners = new HashMap<>(); @GuardedBy("this") private final Map<MemoryPoolId, ClusterMemoryPool> pools = new HashMap<>(); @GuardedBy("this") private long lastTimeNotOutOfMemory = System.nanoTime(); @GuardedBy("this") private QueryId lastKilledQuery; @Inject public ClusterMemoryManager( @ForMemoryManager HttpClient httpClient, InternalNodeManager nodeManager, LocationFactory locationFactory, MBeanExporter exporter, JsonCodec<MemoryInfo> memoryInfoCodec, JsonCodec<MemoryPoolAssignmentsRequest> assignmentsRequestJsonCodec, QueryIdGenerator queryIdGenerator, ServerConfig serverConfig, MemoryManagerConfig config, QueryManagerConfig queryManagerConfig) { requireNonNull(config, "config is null"); this.nodeManager = requireNonNull(nodeManager, "nodeManager is null"); this.locationFactory = requireNonNull(locationFactory, "locationFactory is null"); this.httpClient = requireNonNull(httpClient, "httpClient is null"); this.exporter = requireNonNull(exporter, "exporter is null"); this.memoryInfoCodec = requireNonNull(memoryInfoCodec, "memoryInfoCodec is null"); this.assignmentsRequestJsonCodec = requireNonNull(assignmentsRequestJsonCodec, "assignmentsRequestJsonCodec is null"); this.maxQueryMemory = config.getMaxQueryMemory(); this.maxQueryCpuTime = queryManagerConfig.getQueryMaxCpuTime(); this.coordinatorId = queryIdGenerator.getCoordinatorId(); this.enabled = serverConfig.isCoordinator(); this.killOnOutOfMemoryDelay = config.getKillOnOutOfMemoryDelay(); this.killOnOutOfMemory = config.isKillOnOutOfMemory(); } @Override public synchronized void addChangeListener(MemoryPoolId poolId, Consumer<MemoryPoolInfo> listener) { changeListeners.computeIfAbsent(poolId, id -> new ArrayList<>()).add(listener); } public synchronized void process(Iterable<QueryExecution> queries) { if (!enabled) { return; } boolean outOfMemory = isClusterOutOfMemory(); if (!outOfMemory) { lastTimeNotOutOfMemory = System.nanoTime(); } boolean queryKilled = false; long totalBytes = 0; for (QueryExecution query : queries) { long bytes = query.getTotalMemoryReservation(); DataSize sessionMaxQueryMemory = getQueryMaxMemory(query.getSession()); long queryMemoryLimit = Math.min(maxQueryMemory.toBytes(), sessionMaxQueryMemory.toBytes()); totalBytes += bytes; if (resourceOvercommit(query.getSession()) && outOfMemory) { // If a query has requested resource overcommit, only kill it if the cluster has run out of memory DataSize memory = succinctBytes(bytes); query.fail(new PrestoException(CLUSTER_OUT_OF_MEMORY, format("The cluster is out of memory and %s=true, so this query was killed. It was using %s of memory", RESOURCE_OVERCOMMIT, memory))); queryKilled = true; } if (!resourceOvercommit(query.getSession()) && bytes > queryMemoryLimit) { DataSize maxMemory = succinctBytes(queryMemoryLimit); query.fail(exceededGlobalLimit(maxMemory)); queryKilled = true; } } clusterMemoryUsageBytes.set(totalBytes); if (killOnOutOfMemory) { boolean shouldKillQuery = nanosSince(lastTimeNotOutOfMemory).compareTo(killOnOutOfMemoryDelay) > 0 && outOfMemory; boolean lastKilledQueryIsGone = (lastKilledQuery == null); if (!lastKilledQueryIsGone) { ClusterMemoryPool generalPool = pools.get(GENERAL_POOL); if (generalPool != null) { lastKilledQueryIsGone = generalPool.getQueryMemoryReservations().containsKey(lastKilledQuery); } } if (shouldKillQuery && lastKilledQueryIsGone && !queryKilled) { // Kill the biggest query in the general pool QueryExecution biggestQuery = null; long maxMemory = -1; for (QueryExecution query : queries) { long bytesUsed = query.getTotalMemoryReservation(); if (bytesUsed > maxMemory && query.getMemoryPool().getId().equals(GENERAL_POOL)) { biggestQuery = query; maxMemory = bytesUsed; } } if (biggestQuery != null) { biggestQuery.fail(new PrestoException(CLUSTER_OUT_OF_MEMORY, "The cluster is out of memory, and your query was killed. Please try again in a few minutes.")); queriesKilledDueToOutOfMemory.incrementAndGet(); lastKilledQuery = biggestQuery.getQueryId(); } } } Map<MemoryPoolId, Integer> countByPool = new HashMap<>(); for (QueryExecution query : queries) { MemoryPoolId id = query.getMemoryPool().getId(); countByPool.put(id, countByPool.getOrDefault(id, 0) + 1); } updatePools(countByPool); updateNodes(updateAssignments(queries)); // check if CPU usage is over limit for (QueryExecution query : queries) { Duration cpuTime = query.getTotalCpuTime(); Duration sessionLimit = getQueryMaxCpuTime(query.getSession()); Duration limit = maxQueryCpuTime.compareTo(sessionLimit) < 0 ? maxQueryCpuTime : sessionLimit; if (cpuTime.compareTo(limit) > 0) { query.fail(new ExceededCpuLimitException(limit)); } } } @VisibleForTesting synchronized Map<MemoryPoolId, ClusterMemoryPool> getPools() { return ImmutableMap.copyOf(pools); } private synchronized boolean isClusterOutOfMemory() { ClusterMemoryPool reservedPool = pools.get(RESERVED_POOL); ClusterMemoryPool generalPool = pools.get(GENERAL_POOL); return reservedPool != null && generalPool != null && reservedPool.getAssignedQueries() > 0 && generalPool.getBlockedNodes() > 0; } private synchronized MemoryPoolAssignmentsRequest updateAssignments(Iterable<QueryExecution> queries) { ClusterMemoryPool reservedPool = pools.get(RESERVED_POOL); ClusterMemoryPool generalPool = pools.get(GENERAL_POOL); long version = memoryPoolAssignmentsVersion.incrementAndGet(); // Check that all previous assignments have propagated to the visible nodes. This doesn't account for temporary network issues, // and is more of a safety check than a guarantee if (reservedPool != null && generalPool != null && allAssignmentsHavePropagated(queries)) { if (reservedPool.getAssignedQueries() == 0 && generalPool.getBlockedNodes() > 0) { QueryExecution biggestQuery = null; long maxMemory = -1; for (QueryExecution queryExecution : queries) { if (resourceOvercommit(queryExecution.getSession())) { // Don't promote queries that requested resource overcommit to the reserved pool, // since their memory usage is unbounded. continue; } long bytesUsed = queryExecution.getTotalMemoryReservation(); if (bytesUsed > maxMemory) { biggestQuery = queryExecution; maxMemory = bytesUsed; } } if (biggestQuery != null) { biggestQuery.setMemoryPool(new VersionedMemoryPoolId(RESERVED_POOL, version)); } } } ImmutableList.Builder<MemoryPoolAssignment> assignments = ImmutableList.builder(); for (QueryExecution queryExecution : queries) { assignments.add(new MemoryPoolAssignment(queryExecution.getQueryId(), queryExecution.getMemoryPool().getId())); } return new MemoryPoolAssignmentsRequest(coordinatorId, version, assignments.build()); } private boolean allAssignmentsHavePropagated(Iterable<QueryExecution> queries) { if (nodes.isEmpty()) { // Assignments can't have propagated, if there are no visible nodes. return false; } long newestAssignment = ImmutableList.copyOf(queries).stream() .map(QueryExecution::getMemoryPool) .mapToLong(VersionedMemoryPoolId::getVersion) .min() .orElse(-1); long mostOutOfDateNode = nodes.values().stream() .mapToLong(RemoteNodeMemory::getCurrentAssignmentVersion) .min() .orElse(Long.MAX_VALUE); return newestAssignment <= mostOutOfDateNode; } private void updateNodes(MemoryPoolAssignmentsRequest assignments) { ImmutableSet.Builder<Node> builder = ImmutableSet.builder(); Set<Node> aliveNodes = builder .addAll(nodeManager.getNodes(ACTIVE)) .addAll(nodeManager.getNodes(SHUTTING_DOWN)) .build(); ImmutableSet<String> aliveNodeIds = aliveNodes.stream() .map(Node::getNodeIdentifier) .collect(toImmutableSet()); // Remove nodes that don't exist anymore // Make a copy to materialize the set difference Set<String> deadNodes = ImmutableSet.copyOf(difference(nodes.keySet(), aliveNodeIds)); nodes.keySet().removeAll(deadNodes); // Add new nodes for (Node node : aliveNodes) { if (!nodes.containsKey(node.getNodeIdentifier())) { nodes.put(node.getNodeIdentifier(), new RemoteNodeMemory(httpClient, memoryInfoCodec, assignmentsRequestJsonCodec, locationFactory.createMemoryInfoLocation(node))); } } // Schedule refresh for (RemoteNodeMemory node : nodes.values()) { node.asyncRefresh(assignments); } } private synchronized void updatePools(Map<MemoryPoolId, Integer> queryCounts) { // Update view of cluster memory and pools List<MemoryInfo> nodeMemoryInfos = nodes.values().stream() .map(RemoteNodeMemory::getInfo) .filter(Optional::isPresent) .map(Optional::get) .collect(toImmutableList()); long totalClusterMemory = nodeMemoryInfos.stream() .map(MemoryInfo::getTotalNodeMemory) .mapToLong(DataSize::toBytes) .sum(); clusterMemoryBytes.set(totalClusterMemory); Set<MemoryPoolId> activePoolIds = nodeMemoryInfos.stream() .flatMap(info -> info.getPools().keySet().stream()) .collect(toImmutableSet()); // Make a copy to materialize the set difference Set<MemoryPoolId> removedPools = ImmutableSet.copyOf(difference(pools.keySet(), activePoolIds)); for (MemoryPoolId removed : removedPools) { unexport(pools.get(removed)); pools.remove(removed); if (changeListeners.containsKey(removed)) { for (Consumer<MemoryPoolInfo> listener : changeListeners.get(removed)) { listenerExecutor.execute(() -> listener.accept(new MemoryPoolInfo(0, 0, ImmutableMap.of()))); } } } for (MemoryPoolId id : activePoolIds) { ClusterMemoryPool pool = pools.computeIfAbsent(id, poolId -> { ClusterMemoryPool newPool = new ClusterMemoryPool(poolId); String objectName = ObjectNames.builder(ClusterMemoryPool.class, newPool.getId().toString()).build(); try { exporter.export(objectName, newPool); } catch (JmxException e) { log.error(e, "Error exporting memory pool %s", poolId); } return newPool; }); pool.update(nodeMemoryInfos, queryCounts.getOrDefault(pool.getId(), 0)); if (changeListeners.containsKey(id)) { MemoryPoolInfo info = pool.getInfo(); for (Consumer<MemoryPoolInfo> listener : changeListeners.get(id)) { listenerExecutor.execute(() -> listener.accept(info)); } } } } @PreDestroy public synchronized void destroy() { try { for (ClusterMemoryPool pool : pools.values()) { unexport(pool); } pools.clear(); } finally { listenerExecutor.shutdownNow(); } } private void unexport(ClusterMemoryPool pool) { try { String objectName = ObjectNames.builder(ClusterMemoryPool.class, pool.getId().toString()).build(); exporter.unexport(objectName); } catch (JmxException e) { log.error(e, "Failed to unexport pool %s", pool.getId()); } } @Managed public long getClusterMemoryUsageBytes() { return clusterMemoryUsageBytes.get(); } @Managed public long getClusterMemoryBytes() { return clusterMemoryBytes.get(); } @Managed public long getQueriesKilledDueToOutOfMemory() { return queriesKilledDueToOutOfMemory.get(); } }
/* * Autopsy Forensic Browser * * Copyright 2014 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.modules.photoreccarver; import java.io.File; import java.io.IOException; import java.lang.ProcessBuilder.Redirect; import java.nio.file.DirectoryStream; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.text.SimpleDateFormat; import java.util.Date; import java.text.DateFormat; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Level; import org.openide.modules.InstalledFileLocator; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.ExecUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestModule; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.LayoutFile; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.Volume; import org.sleuthkit.autopsy.coreutils.FileUtil; import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.ingest.FileIngestModuleProcessTerminator; import org.sleuthkit.autopsy.ingest.IngestServices; /** * A file ingest module that runs the Unallocated Carver executable with unallocated space files as input. */ final class PhotoRecCarverFileIngestModule implements FileIngestModule { private static final String PHOTOREC_DIRECTORY = "photorec_exec"; //NON-NLS private static final String PHOTOREC_EXECUTABLE = "photorec_win.exe"; //NON-NLS private static final String PHOTOREC_RESULTS_BASE = "results"; //NON-NLS private static final String PHOTOREC_RESULTS_EXTENDED = "results.1"; //NON-NLS private static final String PHOTOREC_REPORT = "report.xml"; //NON-NLS private static final String LOG_FILE = "run_log.txt"; //NON-NLS private static final String TEMP_DIR_NAME = "temp"; // NON-NLS private static final Logger logger = Logger.getLogger(PhotoRecCarverFileIngestModule.class.getName()); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private static final Map<Long, WorkingPaths> pathsByJob = new ConcurrentHashMap<>(); private IngestJobContext context; private Path rootOutputDirPath; private File executableFile; /** * @inheritDoc */ @Override public void startUp(IngestJobContext context) throws IngestModule.IngestModuleException { this.context = context; // If the global unallocated space processing setting and the module // process unallocated space only setting are not in sych, throw an // exception. Although the result would not be incorrect, it would be // unfortunate for the user to get an accidental no-op for this module. if (!this.context.processingUnallocatedSpace()) { throw new IngestModule.IngestModuleException(NbBundle.getMessage(this.getClass(), "unallocatedSpaceProcessingSettingsError.message")); } this.rootOutputDirPath = PhotoRecCarverFileIngestModule.createModuleOutputDirectoryForCase(); Path execName = Paths.get(PHOTOREC_DIRECTORY, PHOTOREC_EXECUTABLE); executableFile = locateExecutable(execName.toString()); if (PhotoRecCarverFileIngestModule.refCounter.incrementAndGet(this.context.getJobId()) == 1) { try { // The first instance creates an output subdirectory with a date and time stamp DateFormat dateFormat = new SimpleDateFormat("MM-dd-yyyy-HH-mm-ss-SSSS"); // NON-NLS Date date = new Date(); String folder = this.context.getDataSource().getId() + "_" + dateFormat.format(date); Path outputDirPath = Paths.get(this.rootOutputDirPath.toAbsolutePath().toString(), folder); Files.createDirectories(outputDirPath); // A temp subdirectory is also created as a location for writing unallocated space files to disk. Path tempDirPath = Paths.get(outputDirPath.toString(), PhotoRecCarverFileIngestModule.TEMP_DIR_NAME); Files.createDirectory(tempDirPath); // Save the directories for the current job. PhotoRecCarverFileIngestModule.pathsByJob.put(this.context.getJobId(), new WorkingPaths(outputDirPath, tempDirPath)); } catch (SecurityException | IOException | UnsupportedOperationException ex) { throw new IngestModule.IngestModuleException(NbBundle.getMessage(this.getClass(), "cannotCreateOutputDir.message", ex.getLocalizedMessage())); } } } /** * @inheritDoc */ @Override public IngestModule.ProcessResult process(AbstractFile file) { // Skip everything except unallocated space files. if (file.getType() != TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) { return IngestModule.ProcessResult.OK; } Path tempFilePath = null; try { long id = getRootId(file); // make sure we have a valid systemID if (id == -1) { return ProcessResult.ERROR; } // Verify initialization succeeded. if (null == this.executableFile) { logger.log(Level.SEVERE, "PhotoRec carver called after failed start up"); // NON-NLS return IngestModule.ProcessResult.ERROR; } // Check that we have roughly enough disk space left to complete the operation long freeDiskSpace = IngestServices.getInstance().getFreeDiskSpace(); if ((file.getSize() * 2) > freeDiskSpace) { logger.log(Level.SEVERE, "PhotoRec error processing {0} with {1} Not enough space on primary disk to carve unallocated space.", // NON-NLS new Object[]{file.getName(), PhotoRecCarverIngestModuleFactory.getModuleName()}); // NON-NLS return IngestModule.ProcessResult.ERROR; } // Write the file to disk. WorkingPaths paths = PhotoRecCarverFileIngestModule.pathsByJob.get(this.context.getJobId()); tempFilePath = Paths.get(paths.getTempDirPath().toString(), file.getName()); ContentUtils.writeToFile(file, tempFilePath.toFile()); // Create a subdirectory for this file. Path outputDirPath = Paths.get(paths.getOutputDirPath().toString(), file.getName()); Files.createDirectory(outputDirPath); File log = new File(Paths.get(outputDirPath.toString(), LOG_FILE).toString()); //NON-NLS // Scan the file with Unallocated Carver. ProcessBuilder processAndSettings = new ProcessBuilder( "\"" + executableFile + "\"", "/d", // NON-NLS "\"" + outputDirPath.toAbsolutePath() + File.separator + PHOTOREC_RESULTS_BASE + "\"", "/cmd", // NON-NLS "\"" + tempFilePath.toFile() + "\"", "search"); // NON_NLS // Add environment variable to force PhotoRec to run with the same permissions Autopsy uses processAndSettings.environment().put("__COMPAT_LAYER", "RunAsInvoker"); //NON-NLS processAndSettings.redirectErrorStream(true); processAndSettings.redirectOutput(Redirect.appendTo(log)); int exitValue = ExecUtil.execute(processAndSettings, new FileIngestModuleProcessTerminator(this.context)); if (this.context.fileIngestIsCancelled() == true) { // if it was cancelled by the user, result is OK // cleanup the output path FileUtil.deleteDir(new File(outputDirPath.toString())); if (null != tempFilePath && Files.exists(tempFilePath)) { tempFilePath.toFile().delete(); } logger.log(Level.INFO, "PhotoRec cancelled by user"); // NON-NLS return IngestModule.ProcessResult.OK; } else if (0 != exitValue) { // if it failed or was cancelled by timeout, result is ERROR // cleanup the output path FileUtil.deleteDir(new File(outputDirPath.toString())); if (null != tempFilePath && Files.exists(tempFilePath)) { tempFilePath.toFile().delete(); } logger.log(Level.SEVERE, "PhotoRec carver returned error exit value = {0} when scanning {1}", // NON-NLS new Object[]{exitValue, file.getName()}); // NON-NLS return IngestModule.ProcessResult.ERROR; } // Move carver log file to avoid placement into Autopsy results. PhotoRec appends ".1" to the folder name. java.io.File oldAuditFile = new java.io.File(Paths.get(outputDirPath.toString(), PHOTOREC_RESULTS_EXTENDED, PHOTOREC_REPORT).toString()); //NON-NLS java.io.File newAuditFile = new java.io.File(Paths.get(outputDirPath.toString(), PHOTOREC_REPORT).toString()); //NON-NLS oldAuditFile.renameTo(newAuditFile); Path pathToRemove = Paths.get(outputDirPath.toAbsolutePath().toString()); try (DirectoryStream<Path> stream = Files.newDirectoryStream(pathToRemove)) { for (Path entry : stream) { if (Files.isDirectory(entry)) { FileUtil.deleteDir(new File(entry.toString())); } } } // Now that we've cleaned up the folders and data files, parse the xml output file to add carved items into the database PhotoRecCarverOutputParser parser = new PhotoRecCarverOutputParser(outputDirPath); List<LayoutFile> theList = parser.parse(newAuditFile, id, file); if (theList != null) { // if there were any results from carving, add the unallocated carving event to the reports list. context.addFilesToJob(new ArrayList<>(theList)); } } catch (IOException ex) { logger.log(Level.SEVERE, "Error processing " + file.getName() + " with PhotoRec carver", ex); // NON-NLS return IngestModule.ProcessResult.ERROR; } finally { if (null != tempFilePath && Files.exists(tempFilePath)) { // Get rid of the unallocated space file. tempFilePath.toFile().delete(); } } return IngestModule.ProcessResult.OK; } /** * @inheritDoc */ @Override public void shutDown() { if (this.context != null && refCounter.decrementAndGet(this.context.getJobId()) == 0) { try { // The last instance of this module for an ingest job cleans out // the working paths map entry for the job and deletes the temp dir. WorkingPaths paths = PhotoRecCarverFileIngestModule.pathsByJob.remove(this.context.getJobId()); FileUtil.deleteDir(new File(paths.getTempDirPath().toString())); } catch (SecurityException ex) { logger.log(Level.SEVERE, "Error shutting down PhotoRec carver module", ex); // NON-NLS } } } private static final class WorkingPaths { private final Path outputDirPath; private final Path tempDirPath; WorkingPaths(Path outputDirPath, Path tempDirPath) { this.outputDirPath = outputDirPath; this.tempDirPath = tempDirPath; } Path getOutputDirPath() { return this.outputDirPath; } Path getTempDirPath() { return this.tempDirPath; } } /** * Creates the output directory for this module for the current case, if it does not already exist. * * @return The absolute path of the output directory. * @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException */ synchronized static Path createModuleOutputDirectoryForCase() throws IngestModule.IngestModuleException { Path path = Paths.get(Case.getCurrentCase().getModulesOutputDirAbsPath(), PhotoRecCarverIngestModuleFactory.getModuleName()); try { Files.createDirectory(path); } catch (FileAlreadyExistsException ex) { // No worries. } catch (IOException | SecurityException | UnsupportedOperationException ex) { throw new IngestModule.IngestModuleException(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "cannotCreateOutputDir.message", ex.getLocalizedMessage())); } return path; } /** * Finds the root Volume or Image of the AbstractFile passed in. * * @param file The file we want to find the root parent for * @return The ID of the root parent Volume or Image */ private static long getRootId(AbstractFile file) { long id = -1; Content parent = null; try { parent = file.getParent(); while (parent != null) { if (parent instanceof Volume || parent instanceof Image) { id = parent.getId(); break; } parent = parent.getParent(); } } catch (TskCoreException ex) { logger.log(Level.SEVERE, "PhotoRec carver exception while trying to get parent of AbstractFile.", ex); //NON-NLS } return id; } /** * Finds and returns the path to the executable, if able. * * @param executableToFindName The name of the executable to find * @return A File reference or throws an exception * @throws IngestModuleException */ public static File locateExecutable(String executableToFindName) throws IngestModule.IngestModuleException { // Must be running under a Windows operating system. if (!PlatformUtil.isWindowsOS()) { throw new IngestModule.IngestModuleException(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "unsupportedOS.message")); } File exeFile = InstalledFileLocator.getDefault().locate(executableToFindName, PhotoRecCarverFileIngestModule.class.getPackage().getName(), false); if (null == exeFile) { throw new IngestModule.IngestModuleException(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "missingExecutable.message")); } if (!exeFile.canExecute()) { throw new IngestModule.IngestModuleException(NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "cannotRunExecutable.message")); } return exeFile; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.pdx; import static org.apache.geode.distributed.ConfigurationProperties.CONSERVE_SOCKETS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.AttributesFactory; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.DataPolicy; import org.apache.geode.cache.Operation; import org.apache.geode.cache.PartitionAttributesFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionFactory; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.cache.Scope; import org.apache.geode.cache.client.ClientCache; import org.apache.geode.cache.client.ClientCacheFactory; import org.apache.geode.cache.client.ClientRegionShortcut; import org.apache.geode.cache.server.CacheServer; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.internal.ClusterDistributionManager; import org.apache.geode.distributed.internal.DistributionConfig; import org.apache.geode.distributed.internal.DistributionMessage; import org.apache.geode.distributed.internal.DistributionMessageObserver; import org.apache.geode.internal.AvailablePortHelper; import org.apache.geode.internal.cache.DistributedPutAllOperation; import org.apache.geode.internal.cache.DistributedRegion; import org.apache.geode.internal.cache.EntryEventImpl; import org.apache.geode.internal.cache.EventID; import org.apache.geode.internal.cache.GemFireCacheImpl; import org.apache.geode.internal.cache.LocalRegion; import org.apache.geode.internal.cache.RegionEntry; import org.apache.geode.internal.cache.tier.sockets.BaseCommand; import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID; import org.apache.geode.internal.cache.tier.sockets.command.Put70; import org.apache.geode.internal.cache.versions.VMVersionTag; import org.apache.geode.internal.cache.versions.VersionTag; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.IgnoredException; import org.apache.geode.test.dunit.Invoke; import org.apache.geode.test.dunit.LogWriterUtils; import org.apache.geode.test.dunit.NetworkUtils; import org.apache.geode.test.dunit.SerializableCallable; import org.apache.geode.test.dunit.SerializableRunnable; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.Wait; import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase; import org.apache.geode.test.junit.categories.SerializationTest; @Category({SerializationTest.class}) public class ClientsWithVersioningRetryDUnitTest extends JUnit4CacheTestCase { // list of expected exceptions to remove in tearDown2() static List<IgnoredException> expectedExceptions = new LinkedList<IgnoredException>(); @Override public final void postSetUp() throws Exception { Invoke.invokeInEveryVM(new SerializableRunnable() { @Override public void run() { // Disable endpoint shuffling, so that the client will always connect // to the first server we give it. System.setProperty(DistributionConfig.GEMFIRE_PREFIX + "bridge.disableShufflingOfEndpoints", "true"); } }); } @Override public final void postTearDownCacheTestCase() throws Exception { Invoke.invokeInEveryVM(new SerializableRunnable() { @Override public void run() { System.setProperty(DistributionConfig.GEMFIRE_PREFIX + "bridge.disableShufflingOfEndpoints", "false"); } }); for (IgnoredException ex : expectedExceptions) { ex.remove(); } } /** * Test that we can successfully retry a distributed put all and get the version information. * second failure in bug 44951 */ @Test public void testRetryPut() { Host host = Host.getHost(0); final VM vm0 = host.getVM(0); final VM vm1 = host.getVM(1); createServerRegion(vm0, RegionShortcut.REPLICATE); createServerRegion(vm1, RegionShortcut.REPLICATE); // create an event tag in vm0 and then replay that event in vm1 final DistributedMember memberID = (DistributedMember) vm0.invoke(new SerializableCallable("get id") { @Override public Object call() { return ((DistributedRegion) getCache().getRegion("region")).getDistributionManager() .getDistributionManagerId(); } }); vm0.invoke(new SerializableCallable("create entry with fake event ID") { @Override public Object call() { DistributedRegion dr = (DistributedRegion) getCache().getRegion("region"); VersionTag tag = new VMVersionTag(); tag.setMemberID(dr.getVersionMember()); tag.setRegionVersion(123); tag.setEntryVersion(9); tag.setVersionTimeStamp(System.currentTimeMillis()); EventID eventID = new EventID(new byte[0], 1, 0); EntryEventImpl event = EntryEventImpl.create(dr, Operation.CREATE, "TestObject", "TestValue", null, false, memberID, true, eventID); event.setVersionTag(tag); event.setContext(new ClientProxyMembershipID(memberID)); dr.recordEvent(event); event.release(); return memberID; } }); vm1.invoke(new SerializableRunnable("recover event tag in vm1 from vm0") { @Override public void run() { DistributedRegion dr = (DistributedRegion) getCache().getRegion("region"); EventID eventID = new EventID(new byte[0], 1, 0); EntryEventImpl event = EntryEventImpl.create(dr, Operation.CREATE, "TestObject", "TestValue", null, false, memberID, true, eventID); try { event.setContext(new ClientProxyMembershipID(memberID)); boolean recovered = ((BaseCommand) Put70.getCommand()).recoverVersionTagForRetriedOperation(event); assertTrue("Expected to recover the version for this event ID", recovered); assertEquals("Expected the region version to be 123", 123, event.getVersionTag().getRegionVersion()); } finally { event.release(); } } }); // bug #48205 - a retried op in PR nodes not owning the primary bucket // may already have a version assigned to it in another backup bucket vm1.invoke(new SerializableRunnable("recover posdup event tag in vm1 event tracker from vm0") { @Override public void run() { DistributedRegion dr = (DistributedRegion) getCache().getRegion("region"); EventID eventID = new EventID(new byte[0], 1, 0); EntryEventImpl event = EntryEventImpl.create(dr, Operation.CREATE, "TestObject", "TestValue", null, false, memberID, true, eventID); event.setPossibleDuplicate(true); try { dr.hasSeenEvent(event); assertTrue("Expected to recover the version for the event ID", event.getVersionTag() != null); } finally { event.release(); } } }); } /** * Test that we can successfully retry a distributed put all and get the version information. bug * #45059 */ @Test public void testRetryPutAll() { Host host = Host.getHost(0); final VM vm0 = host.getVM(0); final VM vm1 = host.getVM(1); final VM vm2 = host.getVM(2); final VM vm3 = host.getVM(3); createServerRegion(vm0, RegionShortcut.PARTITION_REDUNDANT_PERSISTENT); vm0.invoke(new SerializableRunnable() { @Override public void run() { // Make sure the bucket 0 is primary in this member. Region region = getCache().getRegion("region"); region.put(0, "value"); // Add a listener to close vm1 when we send a distributed put all operation // this will cause a retry after we have applied the original put all to // the cache, causing a retry DistributionMessageObserver.setInstance(new DistributionMessageObserver() { @Override public void beforeSendMessage(ClusterDistributionManager dm, DistributionMessage message) { if (message instanceof DistributedPutAllOperation.PutAllMessage) { DistributionMessageObserver.setInstance(null); disconnectFromDS(vm1); } } }); } }); int port1 = createServerRegion(vm1, RegionShortcut.PARTITION_REDUNDANT_PERSISTENT); int port2 = createServerRegion(vm2, RegionShortcut.PARTITION_REDUNDANT_PERSISTENT); createClientRegion(vm3, port1, port2); // This will be a put all to bucket 0 // Here's the expected sequence // client->vm1 (accessor0) // vm1->vm0 // vm0 will kill vm1 // vm0->vm2 // client will retry the putall vm3.invoke(new SerializableCallable() { @Override public Object call() throws Exception { Region region = getCache().getRegion("region"); Map map = new HashMap(); map.put(0, "a"); map.put(113, "b"); region.putAll(map); RegionEntry entry = ((LocalRegion) region).getRegionEntry(0); assertNotNull(entry); assertNotNull(entry.getVersionStamp()); assertEquals(2, entry.getVersionStamp().getEntryVersion()); return null; } }); // Verify the observer was triggered vm0.invoke(new SerializableRunnable() { @Override public void run() { // if the observer was triggered, it would have cleared itself assertNull(DistributionMessageObserver.getInstance()); } }); // Make sure vm1 did in fact shut down vm1.invoke(new SerializableRunnable() { @Override public void run() { GemFireCacheImpl cache = (GemFireCacheImpl) basicGetCache(); assertTrue(cache == null || cache.isClosed()); } }); } /** * Test that we can successfully retry a distributed putAll on an accessor and get the version * information. bug #48205 */ @Test public void testRetryPutAllInAccessor() { Host host = Host.getHost(0); final VM vm0 = host.getVM(0); final VM vm1 = host.getVM(1); final VM vm2 = host.getVM(2); final VM vm3 = host.getVM(3); LogWriterUtils.getLogWriter().info("creating region in vm0"); createRegionInPeer(vm0, RegionShortcut.PARTITION_REDUNDANT_PERSISTENT); vm0.invoke(new SerializableRunnable() { @Override public void run() { // Make sure the bucket 0 is primary in this member. Region region = getCache().getRegion("region"); region.put(0, "value"); } }); LogWriterUtils.getLogWriter().info("creating region in vm1"); createRegionInPeer(vm1, RegionShortcut.PARTITION_REDUNDANT_PERSISTENT); LogWriterUtils.getLogWriter().info("creating region in vm2"); createRegionInPeer(vm2, RegionShortcut.PARTITION_REDUNDANT_PERSISTENT); LogWriterUtils.getLogWriter().info("creating region in vm3"); createRegionInPeer(vm3, RegionShortcut.PARTITION_PROXY); expectedExceptions.add(IgnoredException.addIgnoredException("RuntimeException", vm2)); vm2.invoke(new SerializableRunnable("install message listener to ignore update") { @Override public void run() { // Add a listener to close vm2 when we send a distributed put all operation // this will cause a retry after we have applied the original put all to // the cache, causing a retry DistributionMessageObserver.setInstance(new DistributionMessageObserver() { @Override public void beforeProcessMessage(ClusterDistributionManager dm, DistributionMessage msg) { if (msg instanceof DistributedPutAllOperation.PutAllMessage) { DistributionMessageObserver.setInstance(null); Wait.pause(5000); // give vm1 time to process the message that we're ignoring disconnectFromDS(vm0); // no reply will be sent to vm0 due to this exception, but that's okay // because vm0 has been shut down throw new RuntimeException("test code is ignoring message: " + msg); } } }); } }); // This will be a put all to bucket 0 // Here's the expected sequence // accessor->vm0 (primary) // vm0->vm1, vm2 // vm2 will ignore the message & kill vm0 // accessor->vm2 or vm1 // version tag is recovered and put in the event & cache vm3.invoke(new SerializableCallable("perform putAll in accessor") { @Override public Object call() throws Exception { Region region = getCache().getRegion("region"); Map map = new HashMap(); map.put(0, "a"); map.put(113, "b"); region.putAll(map); return null; } }); // verify that the version is correct vm1.invoke(new SerializableRunnable("verify vm1") { @Override public void run() { // if the observer was triggered, it would have cleared itself assertNull(DistributionMessageObserver.getInstance()); Region region = getCache().getRegion("region"); VersionTag tag = ((LocalRegion) region).getVersionTag(0); assertEquals(2, tag.getEntryVersion()); } }); // Verify the observer was triggered and the version is correct vm2.invoke(new SerializableRunnable("verify vm2") { @Override public void run() { // if the observer was triggered, it would have cleared itself assertNull(DistributionMessageObserver.getInstance()); Region region = getCache().getRegion("region"); VersionTag tag = ((LocalRegion) region).getVersionTag(0); assertEquals(2, tag.getEntryVersion()); } }); // Make sure vm1 did in fact shut down vm0.invoke(new SerializableRunnable() { @Override public void run() { GemFireCacheImpl cache = (GemFireCacheImpl) basicGetCache(); assertTrue(cache == null || cache.isClosed()); } }); } private void disconnectFromDS(VM vm) { vm.invoke(new SerializableCallable("disconnecting vm " + vm) { @Override public Object call() throws Exception { disconnectFromDS(); return null; } }); } private int createServerRegion(VM vm, final RegionShortcut shortcut) { SerializableCallable createRegion = new SerializableCallable("create server region") { @Override public Object call() throws Exception { RegionFactory<Object, Object> rf = getCache().createRegionFactory(shortcut); if (!shortcut.equals(RegionShortcut.REPLICATE)) { rf.setPartitionAttributes( new PartitionAttributesFactory().setRedundantCopies(2).create()); } rf.create("region"); CacheServer server = getCache().addCacheServer(); int port = AvailablePortHelper.getRandomAvailableTCPPort(); server.setPort(port); server.start(); return port; } }; return (Integer) vm.invoke(createRegion); } private void createRegionInPeer(VM vm, final RegionShortcut shortcut) { SerializableCallable createRegion = new SerializableCallable("create peer region") { @Override public Object call() throws Exception { RegionFactory<Object, Object> rf = getCache().createRegionFactory(shortcut); if (!shortcut.equals(RegionShortcut.REPLICATE)) { rf.setPartitionAttributes( new PartitionAttributesFactory().setRedundantCopies(2).create()); } rf.create("region"); return null; } }; vm.invoke(createRegion); } @Override public Properties getDistributedSystemProperties() { Properties p = super.getDistributedSystemProperties(); p.put(CONSERVE_SOCKETS, "false"); return p; } private int createServerRegionWithPersistence(VM vm, final boolean persistentPdxRegistry) { SerializableCallable createRegion = new SerializableCallable() { @Override public Object call() throws Exception { CacheFactory cf = new CacheFactory(); if (persistentPdxRegistry) { cf.setPdxPersistent(true).setPdxDiskStore("store"); } // Cache cache = getCache(cf); cache.createDiskStoreFactory().setDiskDirs(getDiskDirs()).create("store"); AttributesFactory af = new AttributesFactory(); af.setScope(Scope.DISTRIBUTED_ACK); af.setDataPolicy(DataPolicy.PERSISTENT_REPLICATE); af.setDiskStoreName("store"); createRootRegion("testSimplePdx", af.create()); CacheServer server = getCache().addCacheServer(); int port = AvailablePortHelper.getRandomAvailableTCPPort(); server.setPort(port); server.start(); return port; } }; return (Integer) vm.invoke(createRegion); } private int createServerAccessor(VM vm) { SerializableCallable createRegion = new SerializableCallable() { @Override public Object call() throws Exception { AttributesFactory af = new AttributesFactory(); af.setScope(Scope.DISTRIBUTED_ACK); af.setDataPolicy(DataPolicy.EMPTY); createRootRegion("testSimplePdx", af.create()); CacheServer server = getCache().addCacheServer(); int port = AvailablePortHelper.getRandomAvailableTCPPort(); server.setPort(port); server.start(); return port; } }; return (Integer) vm.invoke(createRegion); } private void createClientRegion(final VM vm, final int port1, final int port2) { SerializableCallable createRegion = new SerializableCallable("create client region in " + vm) { @Override public Object call() throws Exception { ClientCacheFactory cf = new ClientCacheFactory(); cf.addPoolServer(NetworkUtils.getServerHostName(vm.getHost()), port1); cf.addPoolServer(NetworkUtils.getServerHostName(vm.getHost()), port2); cf.setPoolPRSingleHopEnabled(false); cf.setPoolReadTimeout(10 * 60 * 1000); ClientCache cache = getClientCache(cf); cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY).create("region"); return null; } }; vm.invoke(createRegion); } }
package au.org.aurin.wif.controller.suitability; import static au.org.aurin.wif.io.RestAPIConstants.HEADER_USER_ID_KEY; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import javax.annotation.Resource; import javax.servlet.http.HttpServletResponse; import org.geotools.feature.FeatureCollection; import org.geotools.filter.text.cql2.CQLException; import org.opengis.feature.Feature; import org.opengis.feature.type.FeatureType; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.NoSuchAuthorityCodeException; import org.opengis.referencing.operation.TransformException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.stereotype.Controller; import org.springframework.validation.BindException; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.multipart.MultipartFile; import com.vividsolutions.jts.io.ParseException; import au.org.aurin.wif.controller.OWIURLs; import au.org.aurin.wif.exception.config.InvalidEntityIdException; import au.org.aurin.wif.exception.config.ParsingException; import au.org.aurin.wif.exception.config.WifInvalidConfigException; import au.org.aurin.wif.exception.io.DatabaseFailedException; import au.org.aurin.wif.exception.validate.SuitabilityAnalysisFailedException; import au.org.aurin.wif.exception.validate.WifInvalidInputException; import au.org.aurin.wif.executors.svc.AsyncSuitabilityService; import au.org.aurin.wif.impl.suitability.WMSOutcome; import au.org.aurin.wif.model.reports.suitability.SuitabilityAnalysisReport; import au.org.aurin.wif.model.suitability.SuitabilityScenario; import au.org.aurin.wif.svc.WifKeys; import au.org.aurin.wif.svc.report.ReportService; import au.org.aurin.wif.svc.suitability.SuitabilityScenarioService; /** * The Class SuitabilityScenarioController. */ @Controller @RequestMapping(OWIURLs.PROJECT_SVC_URI) public class SuitabilityScenarioController { /** The Constant LOGGER. */ private static final Logger LOGGER = LoggerFactory .getLogger(SuitabilityScenarioController.class); /** The suitability scenario service. */ @Resource private SuitabilityScenarioService suitabilityScenarioService; /** The async suitability service. */ @Resource private AsyncSuitabilityService asyncSuitabilityService; /** The scenarios pool. */ private final HashMap<String, Future<Boolean>> scenariosPool = new HashMap<String, Future<Boolean>>(); /** The report service. */ @Autowired private ReportService reportService; /** * Sets the suitability scenario service. * * @param suitabilityScenarioService * the new suitability scenario service */ public void setSuitabilityScenarioService( final SuitabilityScenarioService suitabilityScenarioService) { this.suitabilityScenarioService = suitabilityScenarioService; } /** * Gets the suitability scenarios for project. * * @param roleId * the role id * @param projectId * the project id * @return the suitability scenarios for project * @throws WifInvalidInputException * the wif invalid input exception */ @RequestMapping(method = RequestMethod.GET, value = "/{projectId}/suitabilityScenarios", produces = "application/json") @ResponseStatus(HttpStatus.OK) public @ResponseBody List<SuitabilityScenario> getSuitabilityScenariosForProject( @RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectId) throws WifInvalidInputException { LOGGER .info( "*******>> getSuitabilityScenariosForProject request for project id ={}", projectId); return suitabilityScenarioService.getSuitabilityScenarios(projectId); } /** * Gets the suitability scenario. * * @param roleId * the role id * @param projectId * the project id * @param id * the id * @return the suitability scenario * @throws WifInvalidInputException * the wif invalid input exception * @throws WifInvalidConfigException * the wif invalid config exception * @throws ParsingException * the parsing exception */ @RequestMapping(method = RequestMethod.GET, value = "/{projectId}/suitabilityScenarios/{id}", produces = "application/json") @ResponseStatus(HttpStatus.OK) public @ResponseBody SuitabilityScenario getSuitabilityScenario( @RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectId, @PathVariable("id") final String id) throws WifInvalidInputException, WifInvalidConfigException, ParsingException { LOGGER.info("*******>> getSuitabilityScenario request for project id ={}", projectId); return suitabilityScenarioService.getSuitabilityScenario(id); } /** * Creates the suitability scenario. * * @param roleId * the role id * @param projectId * the project id * @param suitabilityScenario * the suitability scenario * @param response * the response * @return the suitability scenario * @throws WifInvalidInputException * the wif invalid input exception * @throws BindException * the bind exception * @throws WifInvalidConfigException * the wif invalid config exception * @throws ParsingException * the parsing exception */ @RequestMapping(method = RequestMethod.POST, value = "/{projectId}/suitabilityScenarios", consumes = "application/json", produces = "application/json") @ResponseStatus(HttpStatus.CREATED) public @ResponseBody SuitabilityScenario createSuitabilityScenario( @RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectId, @RequestBody final SuitabilityScenario suitabilityScenario, final HttpServletResponse response) throws WifInvalidInputException, BindException, WifInvalidConfigException, ParsingException { LOGGER.info( "*******>> createSuitabilityScenario request for project id ={}", projectId); return suitabilityScenarioService.createSuitabilityScenario( suitabilityScenario, projectId); } /** * Update suitability scenario. * * @param roleId * the role id * @param projectId * the project id * @param id * the id * @param suitabilityScenario * the suitability scenario * @throws WifInvalidInputException * the wif invalid input exception * @throws BindException * the bind exception * @throws WifInvalidConfigException * the wif invalid config exception */ @RequestMapping(method = RequestMethod.PUT, value = "/{projectId}/suitabilityScenarios/{id}", consumes = "application/json") @ResponseStatus(HttpStatus.NO_CONTENT) public void updateSuitabilityScenario( @RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectId, @PathVariable("id") final String id, @RequestBody final SuitabilityScenario suitabilityScenario) throws WifInvalidInputException, BindException, WifInvalidConfigException { LOGGER.info( "*******>> updateSuitabilityScenario request for project id ={}", projectId); suitabilityScenarioService.updateSuitabilityScenario(suitabilityScenario, projectId); } /** * Delete suitability scenario. * * @param roleId * the role id * @param projectId * the project id * @param id * the id * @throws WifInvalidInputException * the wif invalid input exception * @throws WifInvalidConfigException * the wif invalid config exception */ @RequestMapping(method = RequestMethod.DELETE, value = "/{projectId}/suitabilityScenarios/{id}") @ResponseStatus(HttpStatus.NO_CONTENT) public void deleteSuitabilityScenario( @RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectId, @PathVariable("id") final String id) throws WifInvalidInputException, WifInvalidConfigException { LOGGER.info( "*******>> deleteSuitabilityScenario request for project id ={}", projectId); suitabilityScenarioService.deleteSuitabilityScenario(id, projectId); } /** * Gets the wMS outcome. * * @param roleId * the role id * @param projectId * the project id * @param id * the id * @param getWmsOutcomeParams * the get wms outcome params * @return the wMS outcome * @throws WifInvalidInputException * the wif invalid input exception * @throws WifInvalidConfigException * the wif invalid config exception * @throws MismatchedDimensionException * the mismatched dimension exception * @throws NoSuchAuthorityCodeException * the no such authority code exception * @throws FactoryException * the factory exception * @throws TransformException * the transform exception * @throws ParseException * the parse exception * @throws IOException * Signals that an I/O exception has occurred. * @throws SuitabilityAnalysisFailedException * the wif analysis failed exception * @throws CQLException * the cQL exception * @throws ParsingException * the parsing exception */ @RequestMapping(method = RequestMethod.POST, value = "/{projectId}/suitabilityScenarios/{id}/wms", produces = "application/json") @ResponseStatus(HttpStatus.NO_CONTENT) public void getWMSOutcome( @RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectId, @PathVariable("id") final String id, @RequestBody final Map<String, String> getWmsOutcomeParams) throws WifInvalidInputException, WifInvalidConfigException, MismatchedDimensionException, NoSuchAuthorityCodeException, FactoryException, TransformException, ParseException, IOException, SuitabilityAnalysisFailedException, CQLException, ParsingException { LOGGER.info("*******>> getWMSOutcome request for project id ={}", projectId); try { final String areaAnalyzed = getWmsOutcomeParams.get("areaAnalyzed"); final String crsArea = getWmsOutcomeParams.get("crsArea"); suitabilityScenarioService.getWMSOutcome(id, areaAnalyzed, crsArea); } catch (final WifInvalidConfigException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new WifInvalidConfigException(e.getMessage(), e); } catch (final WifInvalidInputException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new WifInvalidInputException(e.getMessage(), e); } catch (final MismatchedDimensionException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new MismatchedDimensionException(e.getMessage(), e); } catch (final CQLException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new CQLException(e.getMessage()); } } /** * Gets the wMS outcome async. * * @param roleId * the role id * @param projectId * the project id * @param id * the id * @param getWmsOutcomeParams * the get wms outcome params * @return the wMS outcome async * @throws WifInvalidInputException * the wif invalid input exception * @throws WifInvalidConfigException * the wif invalid config exception * @throws MismatchedDimensionException * the mismatched dimension exception * @throws NoSuchAuthorityCodeException * the no such authority code exception * @throws FactoryException * the factory exception * @throws TransformException * the transform exception * @throws ParseException * the parse exception * @throws IOException * Signals that an I/O exception has occurred. * @throws SuitabilityAnalysisFailedException * the wif analysis failed exception * @throws CQLException * the cQL exception * @throws InterruptedException * the interrupted exception * @throws ExecutionException * the execution exception * @throws ParsingException * the parsing exception */ @RequestMapping(method = RequestMethod.POST, value = "/{projectId}/suitabilityScenarios/{id}/async/wms", produces = "application/json") @ResponseStatus(HttpStatus.OK) public void getWMSOutcomeAsync( @RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectId, @PathVariable("id") final String id, @RequestBody final Map<String, String> getWmsOutcomeParams) throws WifInvalidInputException, WifInvalidConfigException, MismatchedDimensionException, NoSuchAuthorityCodeException, FactoryException, TransformException, ParseException, IOException, SuitabilityAnalysisFailedException, CQLException, InterruptedException, ExecutionException, ParsingException { LOGGER.info("*******>> getWMSOutcomeAsync request for project id ={}", projectId); try { final String areaAnalyzed = getWmsOutcomeParams.get("areaAnalyzed"); final String crsArea = getWmsOutcomeParams.get("crsArea"); LOGGER.info("areaAnalyzed ={}", areaAnalyzed); LOGGER.info("crsArea ={}", crsArea); final Future<Boolean> outcome = asyncSuitabilityService .doSuitabilityAnalysisWMSAsync(id, areaAnalyzed, crsArea); scenariosPool.put(id, outcome); } catch (final WifInvalidConfigException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new WifInvalidConfigException(e.getMessage(), e); } catch (final WifInvalidInputException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new WifInvalidInputException(e.getMessage(), e); } catch (final MismatchedDimensionException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new MismatchedDimensionException(e.getMessage(), e); } } /** * Gets the outcome. * * @param roleId * the role id * @param projectId * the project id * @param id * the id * @param getOutcomeParams * the get outcome params * @return the outcome * @throws WifInvalidInputException * the wif invalid input exception * @throws WifInvalidConfigException * the wif invalid config exception * @throws MismatchedDimensionException * the mismatched dimension exception * @throws NoSuchAuthorityCodeException * the no such authority code exception * @throws FactoryException * the factory exception * @throws TransformException * the transform exception * @throws ParseException * the parse exception * @throws IOException * Signals that an I/O exception has occurred. * @throws SuitabilityAnalysisFailedException * the wif analysis failed exception * @throws CQLException * the cQL exception * @throws ParsingException * the parsing exception * @throws DatabaseFailedException */ @SuppressWarnings({ "rawtypes", "unchecked" }) @RequestMapping(method = RequestMethod.POST, value = "/{projectId}/suitabilityScenarios/{id}/outcome", produces = "application/json") @ResponseStatus(HttpStatus.OK) public @ResponseBody FeatureCollection<FeatureType, Feature> getOutcome( @RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectId, @PathVariable("id") final String id, @RequestBody final Map<String, String> getOutcomeParams) throws WifInvalidInputException, WifInvalidConfigException, MismatchedDimensionException, NoSuchAuthorityCodeException, FactoryException, TransformException, ParseException, IOException, SuitabilityAnalysisFailedException, CQLException, ParsingException, DatabaseFailedException { LOGGER.info("*******>> getOutcome request for project id ={}", projectId); try { final String areaAnalyzed = getOutcomeParams.get("areaAnalyzed"); final String crsArea = getOutcomeParams.get("crsArea"); return (FeatureCollection) suitabilityScenarioService.getOutcome(id, areaAnalyzed, crsArea); } catch (final WifInvalidConfigException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new WifInvalidConfigException(e.getMessage(), e); } catch (final WifInvalidInputException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new WifInvalidInputException(e.getMessage(), e); } catch (final MismatchedDimensionException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new MismatchedDimensionException(e.getMessage(), e); } catch (final CQLException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new CQLException(e.getMessage()); } } /** * Gets the status. * * @param roleId * the role id * @param projectId * the project id * @param id * the id * @return the status * @throws WifInvalidInputException * the wif invalid input exception * @throws WifInvalidConfigException * the wif invalid config exception */ @RequestMapping(method = RequestMethod.GET, value = "/{projectId}/suitabilityScenarios/{id}/status", produces = "application/json") @ResponseStatus(HttpStatus.OK) public @ResponseBody HashMap<String, String> getStatus( @RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectId, @PathVariable("id") final String id) throws WifInvalidInputException, WifInvalidConfigException, SuitabilityAnalysisFailedException { LOGGER.debug("*******>> getScenarioStatus request for scenario id ={}", id); final HashMap<String, String> answer = new HashMap<String, String>(2); answer.put(WifKeys.SETUP_PROCESS_KEY, WifKeys.SUITABILITY_PROCESS_STATE_SETUP); String statusMessage = WifKeys.PROCESS_STATE_NA; final SuitabilityScenario suitabilityScenario = suitabilityScenarioService .getSuitabilityScenarioNoMapping(id); if (suitabilityScenario.getReady()) { statusMessage = WifKeys.PROCESS_STATE_SUCCESS; } else { try { final Future<Boolean> result = scenariosPool.get(id); if (result == null) { LOGGER.error("id not found in scenariosPool for {}", id); throw new WifInvalidInputException("id not found in scenariosPool"); } if (result.isDone()) { try { final Boolean msg = result.get(); LOGGER.info("process ended with result: {}", msg); } catch (final ExecutionException e) { statusMessage = WifKeys.PROCESS_STATE_FAILED; final String errorMessage = "suitability analysis asynchronous process failed"; answer.put(WifKeys.STATUS_KEY, statusMessage); LOGGER.info("Status is = {}", answer.get(WifKeys.STATUS_KEY)); LOGGER.error(errorMessage, e); scenariosPool.remove(id); throw new SuitabilityAnalysisFailedException(errorMessage, e); } statusMessage = WifKeys.PROCESS_STATE_SUCCESS; scenariosPool.remove(id); } else { statusMessage = WifKeys.PROCESS_STATE_RUNNING; } } catch (final Exception e) { if (e instanceof InterruptedException) { LOGGER.error("get status failed for {}", id); throw new InvalidEntityIdException("get status failed ", e); } } } answer.put(WifKeys.STATUS_KEY, statusMessage); LOGGER.debug("Status is ={}", answer.get(WifKeys.STATUS_KEY)); return answer; } /** * Gets the wms. * * @param roleId * the role id * @param projectId * the project id * @param id * the id * @return the wms * @throws WifInvalidInputException * the wif invalid input exception * @throws WifInvalidConfigException * the wif invalid config exception * @throws ParsingException * the parsing exception */ @RequestMapping(method = RequestMethod.GET, value = "/{projectId}/suitabilityScenarios/{id}/wmsinfo", produces = "application/json") @ResponseStatus(HttpStatus.OK) public @ResponseBody WMSOutcome getWMS(@RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectId, @PathVariable("id") final String id) throws WifInvalidInputException, WifInvalidConfigException, ParsingException { LOGGER.info("*******>> getWMS request for suitabilityScenario id ={}", id); try { return suitabilityScenarioService.getWMS(id); } catch (final WifInvalidConfigException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new WifInvalidConfigException(e.getMessage(), e); } catch (final WifInvalidInputException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new WifInvalidInputException(e.getMessage(), e); } catch (final ParsingException e) { LOGGER.error("getOutcome failed: {}", e.getMessage()); throw new ParsingException(e.getMessage(), e); } } /** * Gets the suitability scenario report based on the latest analysis * configuration. * * @param roleId * the role id * @param projectId * the project id * @param id * the id * @return the suitability scenario report * @throws WifInvalidInputException * the wif invalid input exception * @throws WifInvalidConfigException * the wif invalid config exception * @throws ParsingException * the parsing exception */ @RequestMapping(method = RequestMethod.GET, value = "/{projectId}/suitabilityScenarios/{id}/report", produces = "application/json") @ResponseStatus(HttpStatus.OK) public @ResponseBody SuitabilityAnalysisReport getSuitabilityScenarioReport( @RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectId, @PathVariable("id") final String id) throws WifInvalidInputException, WifInvalidConfigException, ParsingException { LOGGER.info( "*******>> getSuitabilityScenarioReport request for scenario id ={}", id); final SuitabilityScenario suitabilityScenario = suitabilityScenarioService .getSuitabilityScenario(id); return reportService.getSuitabilityAnalysisReport(suitabilityScenario); } /** * Gets the suitabilityLUsScores * * @param roleId * the role id * @param projectId * the project id * @return String * @throws WifInvalidInputException * the wif invalid input exception * @throws WifInvalidConfigException */ @RequestMapping(method = RequestMethod.GET, value = "/{projectId}/suitabilityLUsScores", produces = "application/json") @ResponseStatus(HttpStatus.OK) public @ResponseBody List<String> getSuitabilityLUsScoresForProject( @RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectId) throws WifInvalidInputException, WifInvalidConfigException { LOGGER .info( "*******>> getSuitabilityLUsScoresForProject request for project id ={}", projectId); return reportService.getSuitabilityLUsScores(projectId); } /** * duplicates suitability scenario. * * @param roleId * the role id * @param projectId * the project id * @param id * the id * @param inparams * the input name * @return List<String> * @throws Exception */ @RequestMapping(method = RequestMethod.POST, value = "/{projectId}/suitabilityScenarios/{id}/duplicate", produces = "application/json") @ResponseStatus(HttpStatus.OK) public @ResponseBody List<String> duplicateScenario( @RequestHeader(HEADER_USER_ID_KEY) final String roleId, @PathVariable("projectId") final String projectID, @PathVariable("id") final String scenarioID, @RequestBody final Map<String, String> params) { LOGGER.info("*******>> duplicate request for scenario id ={}, with a new name: {}", scenarioID, params.get("name")); final List<String> out= new ArrayList<String>(); try { final List<SuitabilityScenario> listScenario= suitabilityScenarioService.getSuitabilityScenarios(projectID); Boolean lsw= true; for (final SuitabilityScenario st: listScenario) { if (st.getLabel().equals(params.get("name"))) { lsw = false; } } if (lsw == true) { out.add(suitabilityScenarioService.duplicateSuitabiliyScenario(projectID, scenarioID, params.get("name"))); } else { out.add("Name already exists!"); } } catch (final Exception e) { LOGGER.error("duplicateScenario failed: {}", e.getMessage()); out.add("Error!" + e.getMessage()); } return out; } /** * duplicates suitability scenario. * * @param roleId * the role id * @param projectId * the project id * @param id * the id * @param inparams * the input name * @return List<String> * @throws Exception */ @RequestMapping(method = RequestMethod.POST, value = "/{projectId}/suitabilityScenarios/{id}/uploadXlsFactors", produces = "text/html") @ResponseStatus(HttpStatus.OK) public @ResponseBody String uploadFactorXLS( @PathVariable("projectId") final String projectID, @PathVariable("id") final String scenarioID, @RequestBody final MultipartFile file ) { final String filename= file.getOriginalFilename(); LOGGER.info("*******>> uploadXlsFactors request for scenario id ={}, with file name: {}", scenarioID, filename); String out=""; try { if (!file.isEmpty()) { out = suitabilityScenarioService.uploadXlsFactors(projectID, scenarioID, file.getInputStream()); } else { out = "{\"result\" : \"" +"File is not correct." +"\"}"; } } catch (final Exception e) { LOGGER.error("uploadXlsFactors failed: {}", e.getMessage()); out ="Error: " + e.getMessage(); //out="success"; out = "{\"result\" : \"" + out +"\"}"; } return out; } }
/* * Copyright 2017 Steve McDuff * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ibm.issw.jdbc.wrappers; import java.io.InputStream; import java.sql.ResultSet; import java.sql.SQLException; import oracle.jdbc.OracleResultSet; import oracle.sql.ARRAY; import oracle.sql.BFILE; import oracle.sql.BLOB; import oracle.sql.CHAR; import oracle.sql.CLOB; import oracle.sql.CustomDatum; import oracle.sql.CustomDatumFactory; import oracle.sql.DATE; import oracle.sql.Datum; import oracle.sql.INTERVALDS; import oracle.sql.INTERVALYM; import oracle.sql.NUMBER; import oracle.sql.OPAQUE; import oracle.sql.ORAData; import oracle.sql.ORADataFactory; import oracle.sql.RAW; import oracle.sql.REF; import oracle.sql.ROWID; import oracle.sql.STRUCT; import oracle.sql.TIMESTAMP; import oracle.sql.TIMESTAMPLTZ; import oracle.sql.TIMESTAMPTZ; import com.ibm.issw.jdbc.profiler.JdbcEvent; @SuppressWarnings("deprecation") public class WrappedOracleCalculatedResultSet extends WrappedCalculatedResultSet implements OracleResultSet { private final OracleResultSet oracleRs; public WrappedOracleCalculatedResultSet(ResultSet resultSet, String reference, JdbcEvent event) { super(resultSet, reference, event); oracleRs = (OracleResultSet) resultSet; } @Override public ARRAY getARRAY(int paramInt) throws SQLException { return oracleRs.getARRAY(paramInt); } @Override public InputStream getAsciiStream(int paramInt) throws SQLException { return oracleRs.getAsciiStream(paramInt); } @Override public BFILE getBFILE(int paramInt) throws SQLException { return oracleRs.getBFILE(paramInt); } @Override public BFILE getBfile(int paramInt) throws SQLException { return oracleRs.getBfile(paramInt); } @Override public InputStream getBinaryStream(int paramInt) throws SQLException { return oracleRs.getBinaryStream(paramInt); } @Override public InputStream getBinaryStream(String paramString) throws SQLException { return oracleRs.getBinaryStream(paramString); } @Override public BLOB getBLOB(int paramInt) throws SQLException { return oracleRs.getBLOB(paramInt); } @Override public CHAR getCHAR(int paramInt) throws SQLException { return oracleRs.getCHAR(paramInt); } @Override public CLOB getCLOB(int paramInt) throws SQLException { return oracleRs.getCLOB(paramInt); } @Override public ResultSet getCursor(int paramInt) throws SQLException { return oracleRs.getCursor(paramInt); } @Override public CustomDatum getCustomDatum(int paramInt, CustomDatumFactory paramCustomDatumFactory) throws SQLException { return oracleRs.getCustomDatum(paramInt, paramCustomDatumFactory); } @Override public ORAData getORAData(int paramInt, ORADataFactory paramORADataFactory) throws SQLException { return oracleRs.getORAData(paramInt, paramORADataFactory); } @Override public DATE getDATE(int paramInt) throws SQLException { return oracleRs.getDATE(paramInt); } @Override public NUMBER getNUMBER(int paramInt) throws SQLException { return oracleRs.getNUMBER(paramInt); } @Override public OPAQUE getOPAQUE(int paramInt) throws SQLException { return oracleRs.getOPAQUE(paramInt); } @Override public Datum getOracleObject(int paramInt) throws SQLException { return oracleRs.getOracleObject(paramInt); } @Override public RAW getRAW(int paramInt) throws SQLException { return oracleRs.getRAW(paramInt); } @Override public REF getREF(int paramInt) throws SQLException { return oracleRs.getREF(paramInt); } @Override public ROWID getROWID(int paramInt) throws SQLException { return oracleRs.getROWID(paramInt); } @Override public STRUCT getSTRUCT(int paramInt) throws SQLException { return oracleRs.getSTRUCT(paramInt); } @Override public INTERVALYM getINTERVALYM(int paramInt) throws SQLException { return oracleRs.getINTERVALYM(paramInt); } @Override public INTERVALDS getINTERVALDS(int paramInt) throws SQLException { return oracleRs.getINTERVALDS(paramInt); } @Override public TIMESTAMP getTIMESTAMP(int paramInt) throws SQLException { return oracleRs.getTIMESTAMP(paramInt); } @Override public TIMESTAMPTZ getTIMESTAMPTZ(int paramInt) throws SQLException { return oracleRs.getTIMESTAMPTZ(paramInt); } @Override public TIMESTAMPLTZ getTIMESTAMPLTZ(int paramInt) throws SQLException { return oracleRs.getTIMESTAMPLTZ(paramInt); } @Override public InputStream getUnicodeStream(int paramInt) throws SQLException { return oracleRs.getUnicodeStream(paramInt); } @Override public InputStream getUnicodeStream(String paramString) throws SQLException { return oracleRs.getUnicodeStream(paramString); } @Override public ARRAY getARRAY(String paramString) throws SQLException { return oracleRs.getARRAY(paramString); } @Override public BFILE getBfile(String paramString) throws SQLException { return oracleRs.getBfile(paramString); } @Override public BFILE getBFILE(String paramString) throws SQLException { return oracleRs.getBFILE(paramString); } @Override public BLOB getBLOB(String paramString) throws SQLException { return oracleRs.getBLOB(paramString); } @Override public CHAR getCHAR(String paramString) throws SQLException { return oracleRs.getCHAR(paramString); } @Override public CLOB getCLOB(String paramString) throws SQLException { return oracleRs.getCLOB(paramString); } @Override public OPAQUE getOPAQUE(String paramString) throws SQLException { return oracleRs.getOPAQUE(paramString); } @Override public INTERVALYM getINTERVALYM(String paramString) throws SQLException { return oracleRs.getINTERVALYM(paramString); } @Override public INTERVALDS getINTERVALDS(String paramString) throws SQLException { return oracleRs.getINTERVALDS(paramString); } @Override public TIMESTAMP getTIMESTAMP(String paramString) throws SQLException { return oracleRs.getTIMESTAMP(paramString); } @Override public TIMESTAMPTZ getTIMESTAMPTZ(String paramString) throws SQLException { return oracleRs.getTIMESTAMPTZ(paramString); } @Override public TIMESTAMPLTZ getTIMESTAMPLTZ(String paramString) throws SQLException { return oracleRs.getTIMESTAMPLTZ(paramString); } @Override public ResultSet getCursor(String paramString) throws SQLException { return oracleRs.getCursor(paramString); } @Override public CustomDatum getCustomDatum(String paramString, CustomDatumFactory paramCustomDatumFactory) throws SQLException { return oracleRs.getCustomDatum(paramString, paramCustomDatumFactory); } @Override public ORAData getORAData(String paramString, ORADataFactory paramORADataFactory) throws SQLException { return oracleRs.getORAData(paramString, paramORADataFactory); } @Override public DATE getDATE(String paramString) throws SQLException { return oracleRs.getDATE(paramString); } @Override public NUMBER getNUMBER(String paramString) throws SQLException { return oracleRs.getNUMBER(paramString); } @Override public Datum getOracleObject(String paramString) throws SQLException { return oracleRs.getOracleObject(paramString); } @Override public RAW getRAW(String paramString) throws SQLException { return oracleRs.getRAW(paramString); } @Override public REF getREF(String paramString) throws SQLException { return oracleRs.getREF(paramString); } @Override public ROWID getROWID(String paramString) throws SQLException { return oracleRs.getROWID(paramString); } @Override public STRUCT getSTRUCT(String paramString) throws SQLException { return oracleRs.getSTRUCT(paramString); } @Override public void updateARRAY(int paramInt, ARRAY paramARRAY) throws SQLException { oracleRs.updateARRAY(paramInt, paramARRAY); } @Override public void updateARRAY(String paramString, ARRAY paramARRAY) throws SQLException { oracleRs.updateARRAY(paramString, paramARRAY); } @Override public void updateBfile(int paramInt, BFILE paramBFILE) throws SQLException { oracleRs.updateBfile(paramInt, paramBFILE); } @Override public void updateBFILE(int paramInt, BFILE paramBFILE) throws SQLException { oracleRs.updateBFILE(paramInt, paramBFILE); } @Override public void updateBfile(String paramString, BFILE paramBFILE) throws SQLException { oracleRs.updateBfile(paramString, paramBFILE); } @Override public void updateBFILE(String paramString, BFILE paramBFILE) throws SQLException { oracleRs.updateBFILE(paramString, paramBFILE); } @Override public void updateBLOB(int paramInt, BLOB paramBLOB) throws SQLException { oracleRs.updateBLOB(paramInt, paramBLOB); } @Override public void updateBLOB(String paramString, BLOB paramBLOB) throws SQLException { oracleRs.updateBLOB(paramString, paramBLOB); } @Override public void updateCHAR(int paramInt, CHAR paramCHAR) throws SQLException { oracleRs.updateCHAR(paramInt, paramCHAR); } @Override public void updateCHAR(String paramString, CHAR paramCHAR) throws SQLException { oracleRs.updateCHAR(paramString, paramCHAR); } @Override public void updateCLOB(int paramInt, CLOB paramCLOB) throws SQLException { oracleRs.updateCLOB(paramInt, paramCLOB); } @Override public void updateCLOB(String paramString, CLOB paramCLOB) throws SQLException { oracleRs.updateCLOB(paramString, paramCLOB); } @Override public void updateCustomDatum(int paramInt, CustomDatum paramCustomDatum) throws SQLException { oracleRs.updateCustomDatum(paramInt, paramCustomDatum); } @Override public void updateORAData(int paramInt, ORAData paramORAData) throws SQLException { oracleRs.updateORAData(paramInt, paramORAData); } @Override public void updateCustomDatum(String paramString, CustomDatum paramCustomDatum) throws SQLException { oracleRs.updateCustomDatum(paramString, paramCustomDatum); } @Override public void updateORAData(String paramString, ORAData paramORAData) throws SQLException { oracleRs.updateORAData(paramString, paramORAData); } @Override public void updateDATE(int paramInt, DATE paramDATE) throws SQLException { oracleRs.updateDATE(paramInt, paramDATE); } @Override public void updateDATE(String paramString, DATE paramDATE) throws SQLException { oracleRs.updateDATE(paramString, paramDATE); } @Override public void updateINTERVALYM(int paramInt, INTERVALYM paramINTERVALYM) throws SQLException { oracleRs.updateINTERVALYM(paramInt, paramINTERVALYM); } @Override public void updateINTERVALYM(String paramString, INTERVALYM paramINTERVALYM) throws SQLException { oracleRs.updateINTERVALYM(paramString, paramINTERVALYM); } @Override public void updateINTERVALDS(int paramInt, INTERVALDS paramINTERVALDS) throws SQLException { oracleRs.updateINTERVALDS(paramInt, paramINTERVALDS); } @Override public void updateINTERVALDS(String paramString, INTERVALDS paramINTERVALDS) throws SQLException { oracleRs.updateINTERVALDS(paramString, paramINTERVALDS); } @Override public void updateTIMESTAMP(int paramInt, TIMESTAMP paramTIMESTAMP) throws SQLException { oracleRs.updateTIMESTAMP(paramInt, paramTIMESTAMP); } @Override public void updateTIMESTAMP(String paramString, TIMESTAMP paramTIMESTAMP) throws SQLException { oracleRs.updateTIMESTAMP(paramString, paramTIMESTAMP); } @Override public void updateTIMESTAMPTZ(int paramInt, TIMESTAMPTZ paramTIMESTAMPTZ) throws SQLException { oracleRs.updateTIMESTAMPTZ(paramInt, paramTIMESTAMPTZ); } @Override public void updateTIMESTAMPTZ(String paramString, TIMESTAMPTZ paramTIMESTAMPTZ) throws SQLException { oracleRs.updateTIMESTAMPTZ(paramString, paramTIMESTAMPTZ); } @Override public void updateTIMESTAMPLTZ(int paramInt, TIMESTAMPLTZ paramTIMESTAMPLTZ) throws SQLException { oracleRs.updateTIMESTAMPLTZ(paramInt, paramTIMESTAMPLTZ); } @Override public void updateTIMESTAMPLTZ(String paramString, TIMESTAMPLTZ paramTIMESTAMPLTZ) throws SQLException { oracleRs.updateTIMESTAMPLTZ(paramString, paramTIMESTAMPLTZ); } @Override public void updateNUMBER(int paramInt, NUMBER paramNUMBER) throws SQLException { oracleRs.updateNUMBER(paramInt, paramNUMBER); } @Override public void updateNUMBER(String paramString, NUMBER paramNUMBER) throws SQLException { oracleRs.updateNUMBER(paramString, paramNUMBER); } @Override public void updateOracleObject(int paramInt, Datum paramDatum) throws SQLException { oracleRs.updateOracleObject(paramInt, paramDatum); } @Override public void updateOracleObject(String paramString, Datum paramDatum) throws SQLException { oracleRs.updateOracleObject(paramString, paramDatum); } @Override public void updateRAW(int paramInt, RAW paramRAW) throws SQLException { oracleRs.updateRAW(paramInt, paramRAW); } @Override public void updateRAW(String paramString, RAW paramRAW) throws SQLException { oracleRs.updateRAW(paramString, paramRAW); } @Override public void updateREF(int paramInt, REF paramREF) throws SQLException { oracleRs.updateREF(paramInt, paramREF); } @Override public void updateREF(String paramString, REF paramREF) throws SQLException { oracleRs.updateREF(paramString, paramREF); } @Override public void updateROWID(int paramInt, ROWID paramROWID) throws SQLException { oracleRs.updateROWID(paramInt, paramROWID); } @Override public void updateROWID(String paramString, ROWID paramROWID) throws SQLException { oracleRs.updateROWID(paramString, paramROWID); } @Override public void updateSTRUCT(int paramInt, STRUCT paramSTRUCT) throws SQLException { oracleRs.updateSTRUCT(paramInt, paramSTRUCT); } @Override public void updateSTRUCT(String paramString, STRUCT paramSTRUCT) throws SQLException { oracleRs.updateSTRUCT(paramString, paramSTRUCT); } }
/******************************************************************************* * Copyright (c) 2010 Haifeng Li * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package smile.clustering; import java.util.ArrayList; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import smile.math.Math; import smile.sort.QuickSort; import smile.stat.distribution.GaussianDistribution; /** * G-Means clustering algorithm, an extended K-Means which tries to * automatically determine the number of clusters by normality test. * The G-means algorithm is based on a statistical test for the hypothesis * that a subset of data follows a Gaussian distribution. G-means runs * k-means with increasing k in a hierarchical fashion until the test accepts * the hypothesis that the data assigned to each k-means center are Gaussian. * * <h2>References</h2> * <ol> * <li>G. Hamerly and C. Elkan. Learning the k in k-means. NIPS, 2003.</li> * </ol> * * @see KMeans * @see XMeans * * @author Haifeng Li */ public class GMeans extends KMeans { private static final Logger logger = LoggerFactory.getLogger(GMeans.class); /** * Constructor. Clustering data with the number of clusters being * automatically determined by G-Means algorithm. * @param data the input data of which each row is a sample. * @param kmax the maximum number of clusters. */ public GMeans(double[][] data, int kmax) { if (kmax < 2) { throw new IllegalArgumentException("Invalid parameter kmax = " + kmax); } int n = data.length; int d = data[0].length; k = 1; size = new int[k]; size[0] = n; y = new int[n]; centroids = new double[k][d]; for (int i = 0; i < n; i++) { for (int j = 0; j < d; j++) { centroids[0][j] += data[i][j]; } } for (int j = 0; j < d; j++) { centroids[0][j] /= n; } distortion = 0.0; for (int i = 0; i < n; i++) { distortion += Math.squaredDistance(data[i], centroids[0]); } logger.info(String.format("G-Means distortion with %d clusters: %.5f", k, distortion)); BBDTree bbd = new BBDTree(data); while (k < kmax) { ArrayList<double[]> centers = new ArrayList<double[]>(); double[] score = new double[k]; KMeans[] kmeans = new KMeans[k]; for (int i = 0; i < k; i++) { // don't split too small cluster. anyway likelihood estimation // not accurate in this case. if (size[i] < 25) { logger.info("Cluster {} too small to split: {} samples", i, size[i]); continue; } double[][] subset = new double[size[i]][]; for (int j = 0, l = 0; j < n; j++) { if (y[j] == i) { subset[l++] = data[j]; } } kmeans[i] = new KMeans(subset, 2, 100, 4); double[] v = new double[d]; for (int j = 0; j < d; j++) { v[j] = kmeans[i].centroids[0][j] - kmeans[i].centroids[1][j]; } double vp = Math.dot(v, v); double[] x = new double[size[i]]; for (int j = 0; j < x.length; j++) { x[j] = Math.dot(subset[j], v) / vp; } // normalize to mean 0 and variance 1. Math.normalize(x); score[i] = AndersonDarling(x); logger.info(String.format("Cluster %3d\tAnderson-Darling adjusted test statistic: %3.4f", i, score[i])); } int[] index = QuickSort.sort(score); for (int i = 0; i < k; i++) { if (score[index[i]] <= 1.8692) { centers.add(centroids[index[i]]); } } int m = centers.size(); for (int i = k; --i >= 0;) { if (score[i] > 1.8692) { if (centers.size() + i - m + 1 < kmax) { logger.info("Split cluster {}", index[i]); centers.add(kmeans[index[i]].centroids[0]); centers.add(kmeans[index[i]].centroids[1]); } else { centers.add(centroids[index[i]]); } } } // no more split. if (centers.size() == k) { break; } k = centers.size(); double[][] sums = new double[k][d]; size = new int[k]; centroids = new double[k][]; for (int i = 0; i < k; i++) { centroids[i] = centers.get(i); } distortion = Double.MAX_VALUE; for (int iter = 0; iter < 100; iter++) { double newDistortion = bbd.clustering(centroids, sums, size, y); for (int i = 0; i < k; i++) { if (size[i] > 0) { for (int j = 0; j < d; j++) { centroids[i][j] = sums[i][j] / size[i]; } } } if (distortion <= newDistortion) { break; } else { distortion = newDistortion; } } logger.info(String.format("G-Means distortion with %d clusters: %.5f\n", k, distortion)); } } /** * Calculates the Anderson-Darling statistic for one-dimensional normality test. * * @param x the samples to test if drawn from a Gaussian distribution. */ private static double AndersonDarling(double[] x) { int n = x.length; Arrays.sort(x); for (int i = 0; i < n; i++) { x[i] = GaussianDistribution.getInstance().cdf(x[i]); // in case overflow when taking log later. if (x[i] == 0) x[i] = 0.0000001; if (x[i] == 1) x[i] = 0.9999999; } double A = 0.0; for (int i = 0; i < n; i++) { A -= (2*i+1) * (Math.log(x[i]) + Math.log(1-x[n-i-1])); } A = A / n - n; A *= (1 + 4.0/n - 25.0/(n*n)); return A; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(String.format("G-Means distortion: %.5f\n", distortion)); sb.append(String.format("Clusters of %d data points of dimension %d:\n", y.length, centroids[0].length)); for (int i = 0; i < k; i++) { int r = (int) Math.round(1000.0 * size[i] / y.length); sb.append(String.format("%3d\t%5d (%2d.%1d%%)\n", i, size[i], r / 10, r % 10)); } return sb.toString(); } }
/* * Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.tools.internal.xjc.reader; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.TreeSet; import com.sun.codemodel.internal.JClass; import com.sun.codemodel.internal.JCodeModel; import com.sun.codemodel.internal.JDefinedClass; import com.sun.codemodel.internal.JType; import com.sun.tools.internal.xjc.ErrorReceiver; import org.xml.sax.Locator; import org.xml.sax.SAXParseException; /** * Type-related utility methods. * * @author * <a href="mailto:kohsuke.kawaguchi@sun.com">Kohsuke KAWAGUCHI</a> */ public class TypeUtil { /** * Computes the common base type of two types. * * @param types * set of {@link JType} objects. */ public static JType getCommonBaseType( JCodeModel codeModel, Collection<? extends JType> types ) { return getCommonBaseType( codeModel, types.toArray(new JType[types.size()]) ); } /** * Computes the common base type of types. * * TODO: this is a very interesting problem. Since one type has possibly * multiple base types, it's not an easy problem. * The current implementation is very naive. * * To make the result deterministic across differente JVMs, we have to * use a Set whose ordering is deterministic. */ public static JType getCommonBaseType(JCodeModel codeModel, JType... t) { // first, eliminate duplicates. Set<JType> uniqueTypes = new TreeSet<JType>(typeComparator); for (JType type : t) uniqueTypes.add(type); // if this yields only one type. return now. // this is the only case where we can return a primitive type // from this method if (uniqueTypes.size() == 1) return uniqueTypes.iterator().next(); // assertion failed. nullType can be used only under a very special circumstance assert !uniqueTypes.isEmpty(); // the null type doesn't need to be taken into account. uniqueTypes.remove(codeModel.NULL); // box all the types and compute the intersection of all types Set<JClass> s = null; for (JType type : uniqueTypes) { JClass cls = type.boxify(); if (s == null) s = getAssignableTypes(cls); else s.retainAll(getAssignableTypes(cls)); } // any JClass can be casted to Object, so make sure it's always there s.add( codeModel.ref(Object.class)); // refine 's' by removing "lower" types. // for example, if we have both java.lang.Object and // java.io.InputStream, then we don't want to use java.lang.Object. JClass[] raw = s.toArray(new JClass[s.size()]); s.clear(); for (int i = 0; i < raw.length; i++) { // for each raw[i] int j; for (j = 0; j < raw.length; j++) { // see if raw[j] "includes" raw[i] if (i == j) continue; if (raw[i].isAssignableFrom(raw[j])) break; // raw[j] is derived from raw[i], hence j includes i. } if (j == raw.length) // no other type inclueds raw[i]. remember this value. s.add(raw[i]); } assert !s.isEmpty(); // since at least java.lang.Object has to be there // we now pick the candidate for the return type JClass result = pickOne(s); // finally, sometimes this method is used to compute the base type of types like // JAXBElement<A>, JAXBElement<B>, and JAXBElement<C>. // for those inputs, at this point result=JAXBElement. // // here, we'll try to figure out the parameterization // so that we can return JAXBElement<? extends D> instead of just "JAXBElement". if(result.isParameterized()) return result; // for each uniqueType we store the list of base type parameterization List<List<JClass>> parameters = new ArrayList<List<JClass>>(uniqueTypes.size()); int paramLen = -1; for (JType type : uniqueTypes) { JClass cls = type.boxify(); JClass bp = cls.getBaseClass(result); // if there's no parameterization in the base type, // we won't do any better than <?>. Thus no point in trying to figure out the parameterization. // just return the base type. if(bp.equals(result)) return result; assert bp.isParameterized(); List<JClass> tp = bp.getTypeParameters(); parameters.add(tp); assert paramLen==-1 || paramLen==tp.size(); // since 'bp' always is a parameterized version of 'result', it should always // have the same number of parameters. paramLen = tp.size(); } List<JClass> paramResult = new ArrayList<JClass>(); List<JClass> argList = new ArrayList<JClass>(parameters.size()); // for each type parameter compute the common base type for( int i=0; i<paramLen; i++ ) { argList.clear(); for (List<JClass> list : parameters) argList.add(list.get(i)); // compute the lower bound. JClass bound = (JClass)getCommonBaseType(codeModel,argList); boolean allSame = true; for (JClass a : argList) allSame &= a.equals(bound); if(!allSame) bound = bound.wildcard(); paramResult.add(bound); } return result.narrow(paramResult); } private static JClass pickOne(Set<JClass> s) { // we may have more than one candidates at this point. // any user-defined generated types should have // precedence over system-defined existing types. // // so try to return such a type if any. for (JClass c : s) if (c instanceof JDefinedClass) return c; // we can do more if we like. for example, // we can avoid types in the RI runtime. // but for now, just return the first one. return s.iterator().next(); } private static Set<JClass> getAssignableTypes( JClass t ) { Set<JClass> r = new TreeSet<JClass>(typeComparator); getAssignableTypes(t,r); return r; } /** * Returns the set of all classes/interfaces that a given type * implements/extends, including itself. * * For example, if you pass java.io.FilterInputStream, then the returned * set will contain java.lang.Object, java.lang.InputStream, and * java.lang.FilterInputStream. */ private static void getAssignableTypes( JClass t, Set<JClass> s ) { if(!s.add(t)) return; // add its raw type s.add(t.erasure()); // if this type is added for the first time, // recursively process the super class. JClass _super = t._extends(); if(_super!=null) getAssignableTypes(_super,s); // recursively process all implemented interfaces Iterator<JClass> itr = t._implements(); while(itr.hasNext()) getAssignableTypes(itr.next(),s); } /** * Obtains a {@link JType} object for the string representation * of a type. */ public static JType getType( JCodeModel codeModel, String typeName, ErrorReceiver errorHandler, Locator errorSource ) { try { return codeModel.parseType(typeName); } catch( ClassNotFoundException ee ) { // make it a warning errorHandler.warning( new SAXParseException( Messages.ERR_CLASS_NOT_FOUND.format(typeName) ,errorSource)); // recover by assuming that it's a class that derives from Object return codeModel.directClass(typeName); } } /** * Compares {@link JType} objects by their names. */ private static final Comparator<JType> typeComparator = new Comparator<JType>() { public int compare(JType t1, JType t2) { return t1.fullName().compareTo(t2.fullName()); } }; }
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.orient.core.storage.impl.local.paginated.wal; import com.orientechnologies.common.concur.lock.OInterruptedException; import com.orientechnologies.common.directmemory.ODirectMemoryPointer; import com.orientechnologies.common.io.OFileUtils; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.common.serialization.types.OIntegerSerializer; import com.orientechnologies.common.serialization.types.OLongSerializer; import com.orientechnologies.common.util.OPair; import com.orientechnologies.orient.core.config.OGlobalConfiguration; import com.orientechnologies.orient.core.exception.OStorageException; import com.orientechnologies.orient.core.storage.impl.local.OFullCheckpointRequestListener; import com.orientechnologies.orient.core.storage.impl.local.OLowDiskSpaceInformation; import com.orientechnologies.orient.core.storage.impl.local.OLowDiskSpaceListener; import com.orientechnologies.orient.core.storage.impl.local.paginated.OLocalPaginatedStorage; import java.io.EOFException; import java.io.File; import java.io.FileNotFoundException; import java.io.FilenameFilter; import java.io.IOException; import java.io.RandomAccessFile; import java.lang.ref.WeakReference; import java.util.*; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.CRC32; /** * @author Andrey Lomakin * @since 25.04.13 */ public class ODiskWriteAheadLog extends OAbstractWriteAheadLog { public static final String MASTER_RECORD_EXTENSION = ".wmr"; public static final String WAL_SEGMENT_EXTENSION = ".wal"; private static final long ONE_KB = 1024L; private final long freeSpaceLimit = OGlobalConfiguration.DISK_CACHE_FREE_SPACE_LIMIT .getValueAsLong() * 1024L * 1024L; private final long walSizeLimit = OGlobalConfiguration.WAL_MAX_SIZE .getValueAsLong() * 1024L * 1024L; private final List<LogSegment> logSegments = new ArrayList<LogSegment>(); private final int maxPagesCacheSize; private final int commitDelay; private final long maxSegmentSize; private final File walLocation; private final RandomAccessFile masterRecordLSNHolder; private final OLocalPaginatedStorage storage; private boolean useFirstMasterRecord = true; private long logSize; private File masterRecordFile; private OLogSequenceNumber firstMasterRecord; private OLogSequenceNumber secondMasterRecord; private volatile OLogSequenceNumber flushedLsn; private boolean segmentCreationFlag = false; private final Condition segmentCreationComplete = syncObject.newCondition(); private final Set<OOperationUnitId> activeOperations = new HashSet<OOperationUnitId>(); private final List<WeakReference<OLowDiskSpaceListener>> lowDiskSpaceListeners = Collections .synchronizedList(new ArrayList<WeakReference<OLowDiskSpaceListener>>()); private final List<WeakReference<OFullCheckpointRequestListener>> fullCheckpointListeners = Collections .synchronizedList(new ArrayList<WeakReference<OFullCheckpointRequestListener>>()); private final class LogSegment implements Comparable<LogSegment> { private final RandomAccessFile rndFile; private final File file; private final long order; private final int maxPagesCacheSize; private final ConcurrentLinkedQueue<OWALPage> pagesCache = new ConcurrentLinkedQueue<OWALPage>(); private final ScheduledExecutorService commitExecutor = Executors .newSingleThreadScheduledExecutor(new ThreadFactory() { @Override public Thread newThread(Runnable r) { Thread thread = new Thread(r); thread.setDaemon(true); thread.setName("OrientDB WAL Flush Task (" + storage.getName() + ")"); return thread; } }); private long filledUpTo; private boolean closed; private OWALPage currentPage; private long nextPositionToFlush; private OLogSequenceNumber last = null; private OLogSequenceNumber pendingLSNToFlush; private volatile boolean flushNewData = true; private WeakReference<OPair<OLogSequenceNumber, byte[]>> lastReadRecord = new WeakReference<OPair<OLogSequenceNumber, byte[]>>( null); private final class FlushTask implements Runnable { private FlushTask() { } @Override public void run() { try { commit(); } catch (Throwable e) { OLogManager.instance().error(this, "Error during WAL background flush", e); } } private void commit() throws IOException { if (pagesCache.isEmpty()) return; if (!flushNewData) return; flushNewData = false; final int maxSize = pagesCache.size(); ODirectMemoryPointer[] pagesToFlush = new ODirectMemoryPointer[maxSize]; long filePointer = nextPositionToFlush; int flushedPages = 0; OLogSequenceNumber lastLSNToFlush = null; Iterator<OWALPage> pageIterator = pagesCache.iterator(); while (flushedPages < maxSize) { final OWALPage page = pageIterator.next(); synchronized (page) { final int filledUpTo = page.getFilledUpTo(); int pos = OWALPage.RECORDS_OFFSET; while (pos < filledUpTo) { if (!page.mergeWithNextPage(pos)) { if (pos == OWALPage.RECORDS_OFFSET && pendingLSNToFlush != null) { lastLSNToFlush = pendingLSNToFlush; pendingLSNToFlush = null; } else lastLSNToFlush = new OLogSequenceNumber(order, filePointer + flushedPages * OWALPage.PAGE_SIZE + pos); } else if (pendingLSNToFlush == null) pendingLSNToFlush = new OLogSequenceNumber(order, filePointer + flushedPages * OWALPage.PAGE_SIZE + pos); pos += page.getSerializedRecordSize(pos); } ODirectMemoryPointer dataPointer; if (flushedPages == maxSize - 1) { dataPointer = new ODirectMemoryPointer(OWALPage.PAGE_SIZE); page.getPagePointer().moveData(0, dataPointer, 0, OWALPage.PAGE_SIZE); } else { dataPointer = page.getPagePointer(); } pagesToFlush[flushedPages] = dataPointer; } flushedPages++; } synchronized (rndFile) { rndFile.seek(filePointer); for (int i = 0; i < pagesToFlush.length; i++) { ODirectMemoryPointer dataPointer = pagesToFlush[i]; byte[] pageContent = dataPointer.get(0, OWALPage.PAGE_SIZE); if (i == pagesToFlush.length - 1) dataPointer.free(); flushPage(pageContent); filePointer += OWALPage.PAGE_SIZE; } if (OGlobalConfiguration.WAL_SYNC_ON_PAGE_FLUSH.getValueAsBoolean()) rndFile.getFD().sync(); } nextPositionToFlush = filePointer - OWALPage.PAGE_SIZE; if (lastLSNToFlush != null) flushedLsn = lastLSNToFlush; for (int i = 0; i < flushedPages - 1; i++) { OWALPage page = pagesCache.poll(); page.getPagePointer().free(); } assert !pagesCache.isEmpty(); final long freeSpace = walLocation.getFreeSpace(); if (freeSpace < freeSpaceLimit) { for (WeakReference<OLowDiskSpaceListener> listenerWeakReference : lowDiskSpaceListeners) { final OLowDiskSpaceListener lowDiskSpaceListener = listenerWeakReference.get(); if (lowDiskSpaceListener != null) lowDiskSpaceListener.lowDiskSpace(new OLowDiskSpaceInformation(freeSpace, freeSpaceLimit)); } } } private void flushPage(byte[] content) throws IOException { CRC32 crc32 = new CRC32(); crc32.update(content, OIntegerSerializer.INT_SIZE, OWALPage.PAGE_SIZE - OIntegerSerializer.INT_SIZE); OIntegerSerializer.INSTANCE.serializeNative((int) crc32.getValue(), content, 0); rndFile.write(content); } } private LogSegment(File file, int maxPagesCacheSize) throws IOException { this.file = file; this.maxPagesCacheSize = maxPagesCacheSize; order = extractOrder(file.getName()); closed = false; rndFile = new RandomAccessFile(file, "rw"); } public void startFlush() { if (commitDelay > 0) commitExecutor.scheduleAtFixedRate(new FlushTask(), commitDelay, commitDelay, TimeUnit.MILLISECONDS); } public void stopFlush(boolean flush) { if (flush) flush(); if (!commitExecutor.isShutdown()) { commitExecutor.shutdown(); try { if (!commitExecutor .awaitTermination(OGlobalConfiguration.WAL_SHUTDOWN_TIMEOUT.getValueAsInteger(), TimeUnit.MILLISECONDS)) throw new OStorageException("WAL flush task for " + getPath() + " segment can not be stopped."); } catch (InterruptedException e) { OLogManager.instance().error(this, "Can not shutdown background WAL commit thread."); } } } public long getOrder() { return order; } public void init() throws IOException { selfCheck(); initPageCache(); last = new OLogSequenceNumber(order, filledUpTo - 1); } @Override public int compareTo(LogSegment other) { final long otherOrder = other.order; if (order > otherOrder) return 1; else if (order < otherOrder) return -1; return 0; } public long filledUpTo() throws IOException { return filledUpTo; } public OLogSequenceNumber begin() throws IOException { if (!pagesCache.isEmpty()) return new OLogSequenceNumber(order, OWALPage.RECORDS_OFFSET); if (rndFile.length() > 0) return new OLogSequenceNumber(order, OWALPage.RECORDS_OFFSET); return null; } public OLogSequenceNumber end() { return last; } public void delete(boolean flush) throws IOException { close(flush); boolean deleted = OFileUtils.delete(file); int retryCount = 0; while (!deleted) { deleted = OFileUtils.delete(file); retryCount++; if (retryCount > 10) throw new IOException("Can not delete file. Retry limit exceeded. (" + retryCount + ")."); } } public String getPath() { return file.getAbsolutePath(); } public OLogSequenceNumber logRecord(byte[] record) throws IOException { flushNewData = true; int pageOffset = (int) (filledUpTo % OWALPage.PAGE_SIZE); long pageIndex = filledUpTo / OWALPage.PAGE_SIZE; if (pageOffset == 0 && pageIndex > 0) pageIndex--; int pos = 0; boolean firstChunk = true; OLogSequenceNumber lsn = null; while (pos < record.length) { if (currentPage == null) { ODirectMemoryPointer pointer = new ODirectMemoryPointer(OWALPage.PAGE_SIZE); currentPage = new OWALPage(pointer, true); pagesCache.add(currentPage); filledUpTo += OWALPage.RECORDS_OFFSET; } int freeSpace = currentPage.getFreeSpace(); if (freeSpace < OWALPage.MIN_RECORD_SIZE) { filledUpTo += freeSpace + OWALPage.RECORDS_OFFSET; ODirectMemoryPointer pointer = new ODirectMemoryPointer(OWALPage.PAGE_SIZE); currentPage = new OWALPage(pointer, true); pagesCache.add(currentPage); pageIndex++; freeSpace = currentPage.getFreeSpace(); } final OWALPage walPage = currentPage; synchronized (walPage) { final int entrySize = OWALPage.calculateSerializedSize(record.length - pos); int addedChunkOffset; if (entrySize <= freeSpace) { if (pos == 0) addedChunkOffset = walPage.appendRecord(record, false, !firstChunk); else addedChunkOffset = walPage.appendRecord(Arrays.copyOfRange(record, pos, record.length), false, !firstChunk); pos = record.length; } else { int chunkSize = OWALPage.calculateRecordSize(freeSpace); if (chunkSize > record.length - pos) chunkSize = record.length - pos; addedChunkOffset = walPage.appendRecord(Arrays.copyOfRange(record, pos, pos + chunkSize), true, !firstChunk); pos += chunkSize; } if (firstChunk) lsn = new OLogSequenceNumber(order, pageIndex * OWALPage.PAGE_SIZE + addedChunkOffset); int spaceDiff = freeSpace - walPage.getFreeSpace(); filledUpTo += spaceDiff; firstChunk = false; } } if (pagesCache.size() > maxPagesCacheSize) { OLogManager.instance().info(this, "Max cache limit is reached (%d vs. %d), sync flush is performed.", maxPagesCacheSize, pagesCache.size()); flush(); } last = lsn; return last; } public byte[] readRecord(OLogSequenceNumber lsn) throws IOException { final OPair<OLogSequenceNumber, byte[]> lastRecord = lastReadRecord.get(); if (lastRecord != null && lastRecord.getKey().equals(lsn)) return lastRecord.getValue(); assert lsn.getSegment() == order; if (lsn.getPosition() >= filledUpTo) return null; if (!pagesCache.isEmpty()) flush(); long pageIndex = lsn.getPosition() / OWALPage.PAGE_SIZE; byte[] record = null; int pageOffset = (int) (lsn.getPosition() % OWALPage.PAGE_SIZE); long pageCount = (filledUpTo + OWALPage.PAGE_SIZE - 1) / OWALPage.PAGE_SIZE; while (pageIndex < pageCount) { byte[] pageContent = new byte[OWALPage.PAGE_SIZE]; synchronized (rndFile) { rndFile.seek(pageIndex * OWALPage.PAGE_SIZE); rndFile.readFully(pageContent); } if (!checkPageIntegrity(pageContent)) throw new OWALPageBrokenException("WAL page with index " + pageIndex + " is broken."); ODirectMemoryPointer pointer = new ODirectMemoryPointer(pageContent); try { OWALPage page = new OWALPage(pointer, false); byte[] content = page.getRecord(pageOffset); if (record == null) record = content; else { byte[] oldRecord = record; record = new byte[record.length + content.length]; System.arraycopy(oldRecord, 0, record, 0, oldRecord.length); System.arraycopy(content, 0, record, oldRecord.length, record.length - oldRecord.length); } if (page.mergeWithNextPage(pageOffset)) { pageOffset = OWALPage.RECORDS_OFFSET; pageIndex++; if (pageIndex >= pageCount) throw new OWALPageBrokenException("WAL page with index " + pageIndex + " is broken."); } else { if (page.getFreeSpace() >= OWALPage.MIN_RECORD_SIZE && pageIndex < pageCount - 1) throw new OWALPageBrokenException("WAL page with index " + pageIndex + " is broken."); break; } } finally { pointer.free(); } } lastReadRecord = new WeakReference<OPair<OLogSequenceNumber, byte[]>>(new OPair<OLogSequenceNumber, byte[]>(lsn, record)); return record; } public OLogSequenceNumber getNextLSN(OLogSequenceNumber lsn) throws IOException { final byte[] record = readRecord(lsn); if (record == null) return null; long pos = lsn.getPosition(); long pageIndex = pos / OWALPage.PAGE_SIZE; int pageOffset = (int) (pos - pageIndex * OWALPage.PAGE_SIZE); int restOfRecord = record.length; while (restOfRecord > 0) { int entrySize = OWALPage.calculateSerializedSize(restOfRecord); if (entrySize + pageOffset < OWALPage.PAGE_SIZE) { if (entrySize + pageOffset <= OWALPage.PAGE_SIZE - OWALPage.MIN_RECORD_SIZE) pos += entrySize; else pos += OWALPage.PAGE_SIZE - pageOffset + OWALPage.RECORDS_OFFSET; break; } else if (entrySize + pageOffset == OWALPage.PAGE_SIZE) { pos += entrySize + OWALPage.RECORDS_OFFSET; break; } else { int chunkSize = OWALPage.calculateRecordSize(OWALPage.PAGE_SIZE - pageOffset); restOfRecord -= chunkSize; pos += OWALPage.PAGE_SIZE - pageOffset + OWALPage.RECORDS_OFFSET; pageOffset = OWALPage.RECORDS_OFFSET; } } if (pos >= filledUpTo) return null; return new OLogSequenceNumber(order, pos); } public void close(boolean flush) throws IOException { if (!closed) { lastReadRecord.clear(); stopFlush(flush); rndFile.close(); closed = true; if (!pagesCache.isEmpty()) { for (OWALPage page : pagesCache) page.getPagePointer().free(); } currentPage = null; } } public OLogSequenceNumber readFlushedLSN() throws IOException { long pages = rndFile.length() / OWALPage.PAGE_SIZE; if (pages == 0) return null; return new OLogSequenceNumber(order, filledUpTo - 1); } public void flush() { if (!commitExecutor.isShutdown()) { try { commitExecutor.submit(new FlushTask()).get(); } catch (InterruptedException e) { Thread.interrupted(); throw new OStorageException("Thread was interrupted during flush", e); } catch (ExecutionException e) { throw new OStorageException("Error during WAL segment " + getPath() + " flush."); } } else { new FlushTask().run(); } } private void initPageCache() throws IOException { synchronized (rndFile) { long pagesCount = rndFile.length() / OWALPage.PAGE_SIZE; if (pagesCount == 0) return; rndFile.seek((pagesCount - 1) * OWALPage.PAGE_SIZE); byte[] content = new byte[OWALPage.PAGE_SIZE]; rndFile.readFully(content); if (checkPageIntegrity(content)) { ODirectMemoryPointer pointer = new ODirectMemoryPointer(content); currentPage = new OWALPage(pointer, false); filledUpTo = (pagesCount - 1) * OWALPage.PAGE_SIZE + currentPage.getFilledUpTo(); nextPositionToFlush = (pagesCount - 1) * OWALPage.PAGE_SIZE; } else { ODirectMemoryPointer pointer = new ODirectMemoryPointer(OWALPage.PAGE_SIZE); currentPage = new OWALPage(pointer, true); filledUpTo = pagesCount * OWALPage.PAGE_SIZE + currentPage.getFilledUpTo(); nextPositionToFlush = pagesCount * OWALPage.PAGE_SIZE; } pagesCache.add(currentPage); } } private long extractOrder(String name) { final Matcher matcher = Pattern.compile("^.*\\.(\\d+)\\.wal$").matcher(name); final boolean matches = matcher.find(); assert matches; final String order = matcher.group(1); try { return Long.parseLong(order); } catch (NumberFormatException e) { // never happen throw new IllegalStateException(e); } } private boolean checkPageIntegrity(byte[] content) { final long magicNumber = OLongSerializer.INSTANCE.deserializeNative(content, OWALPage.MAGIC_NUMBER_OFFSET); if (magicNumber != OWALPage.MAGIC_NUMBER) return false; final CRC32 crc32 = new CRC32(); crc32.update(content, OIntegerSerializer.INT_SIZE, OWALPage.PAGE_SIZE - OIntegerSerializer.INT_SIZE); return ((int) crc32.getValue()) == OIntegerSerializer.INSTANCE.deserializeNative(content, 0); } private void selfCheck() throws IOException { if (!pagesCache.isEmpty()) throw new IllegalStateException("WAL cache is not empty, we can not verify WAL after it was started to be used"); synchronized (rndFile) { long pagesCount = rndFile.length() / OWALPage.PAGE_SIZE; if (rndFile.length() % OWALPage.PAGE_SIZE > 0) { OLogManager.instance().error(this, "Last WAL page was written partially, auto fix."); rndFile.setLength(OWALPage.PAGE_SIZE * pagesCount); } } } } public ODiskWriteAheadLog(OLocalPaginatedStorage storage) throws IOException { this(OGlobalConfiguration.WAL_CACHE_SIZE.getValueAsInteger(), OGlobalConfiguration.WAL_COMMIT_TIMEOUT.getValueAsInteger(), OGlobalConfiguration.WAL_MAX_SEGMENT_SIZE.getValueAsInteger() * ONE_KB * ONE_KB, storage); } public void addLowDiskSpaceListener(OLowDiskSpaceListener listener) { lowDiskSpaceListeners.add(new WeakReference<OLowDiskSpaceListener>(listener)); } public void removeLowDiskSpaceListener(OLowDiskSpaceListener listener) { List<WeakReference<OLowDiskSpaceListener>> itemsToRemove = new ArrayList<WeakReference<OLowDiskSpaceListener>>(); for (WeakReference<OLowDiskSpaceListener> ref : lowDiskSpaceListeners) { final OLowDiskSpaceListener lowDiskSpaceListener = ref.get(); if (lowDiskSpaceListener == null || lowDiskSpaceListener.equals(listener)) itemsToRemove.add(ref); } for (WeakReference<OLowDiskSpaceListener> ref : itemsToRemove) lowDiskSpaceListeners.remove(ref); } public void addFullCheckpointListener(OFullCheckpointRequestListener listener) { fullCheckpointListeners.add(new WeakReference<OFullCheckpointRequestListener>(listener)); } public void removeFullCheckpointListener(OFullCheckpointRequestListener listener) { List<WeakReference<OFullCheckpointRequestListener>> itemsToRemove = new ArrayList<WeakReference<OFullCheckpointRequestListener>>(); for (WeakReference<OFullCheckpointRequestListener> ref : fullCheckpointListeners) { final OFullCheckpointRequestListener fullCheckpointRequestListener = ref.get(); if (fullCheckpointRequestListener == null || fullCheckpointRequestListener.equals(listener)) itemsToRemove.add(ref); } for (WeakReference<OFullCheckpointRequestListener> ref : itemsToRemove) fullCheckpointListeners.remove(ref); } public ODiskWriteAheadLog(int maxPagesCacheSize, int commitDelay, long maxSegmentSize, OLocalPaginatedStorage storage) throws IOException { this.maxPagesCacheSize = maxPagesCacheSize; this.commitDelay = commitDelay; this.maxSegmentSize = maxSegmentSize; this.storage = storage; try { this.walLocation = new File(calculateWalPath(this.storage)); File[] walFiles = this.walLocation.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return validateName(name); } }); if (walFiles == null) throw new IllegalStateException( "Location passed in WAL does not exist, or IO error was happened. DB can not work in durable mode in such case."); if (walFiles.length == 0) { LogSegment logSegment = new LogSegment(new File(this.walLocation, getSegmentName(0)), maxPagesCacheSize); logSegment.init(); logSegment.startFlush(); logSegments.add(logSegment); logSize = 0; flushedLsn = null; } else { logSize = 0; for (File walFile : walFiles) { LogSegment logSegment = new LogSegment(walFile, maxPagesCacheSize); logSegment.init(); logSegments.add(logSegment); logSize += logSegment.filledUpTo(); } Collections.sort(logSegments); logSegments.get(logSegments.size() - 1).startFlush(); flushedLsn = readFlushedLSN(); } masterRecordFile = new File(walLocation, this.storage.getName() + MASTER_RECORD_EXTENSION); masterRecordLSNHolder = new RandomAccessFile(masterRecordFile, "rws"); if (masterRecordLSNHolder.length() > 0) { firstMasterRecord = readMasterRecord(this.storage.getName(), 0); secondMasterRecord = readMasterRecord(this.storage.getName(), 1); if (firstMasterRecord == null) { useFirstMasterRecord = true; lastCheckpoint = secondMasterRecord; } else if (secondMasterRecord == null) { useFirstMasterRecord = false; lastCheckpoint = firstMasterRecord; } else { if (firstMasterRecord.compareTo(secondMasterRecord) >= 0) { lastCheckpoint = firstMasterRecord; useFirstMasterRecord = false; } else { lastCheckpoint = secondMasterRecord; useFirstMasterRecord = true; } } } fixMasterRecords(); } catch (FileNotFoundException e) { // never happened OLogManager.instance().error(this, "Error during file initialization for storage %s", e, this.storage.getName()); throw new IllegalStateException("Error during file initialization for storage " + this.storage.getName(), e); } } private static String calculateWalPath(OLocalPaginatedStorage storage) { String walPath = OGlobalConfiguration.WAL_LOCATION.getValueAsString(); if (walPath == null) walPath = storage.getStoragePath(); return walPath; } public static boolean validateName(String name) { if (!name.toLowerCase().endsWith(".wal")) return false; int walOrderStartIndex = name.indexOf('.'); if (walOrderStartIndex == name.length() - 4) return false; int walOrderEndIndex = name.indexOf('.', walOrderStartIndex + 1); String walOrder = name.substring(walOrderStartIndex + 1, walOrderEndIndex); try { Integer.parseInt(walOrder); } catch (NumberFormatException e) { return false; } return true; } public File getWalLocation() { return walLocation; } public OLogSequenceNumber begin() throws IOException { syncObject.lock(); try { checkForClose(); LogSegment first = logSegments.get(0); if (first.filledUpTo() == 0) return null; return first.begin(); } finally { syncObject.unlock(); } } public OLogSequenceNumber end() throws IOException { syncObject.lock(); try { checkForClose(); int lastIndex = logSegments.size() - 1; LogSegment last = logSegments.get(lastIndex); while (last.filledUpTo == 0) { lastIndex--; if (lastIndex >= 0) last = logSegments.get(lastIndex); else return null; } return last.end(); } finally { syncObject.unlock(); } } public void flush() { syncObject.lock(); try { checkForClose(); LogSegment last = logSegments.get(logSegments.size() - 1); last.flush(); } finally { syncObject.unlock(); } } @Override public OLogSequenceNumber logAtomicOperationStartRecord(boolean isRollbackSupported, OOperationUnitId unitId) throws IOException { syncObject.lock(); try { checkForClose(); final OLogSequenceNumber lsn = log(new OAtomicUnitStartRecord(isRollbackSupported, unitId)); activeOperations.add(unitId); return lsn; } finally { syncObject.unlock(); } } @Override public OLogSequenceNumber logAtomicOperationEndRecord(OOperationUnitId operationUnitId, boolean rollback, OLogSequenceNumber startLsn) throws IOException { syncObject.lock(); try { checkForClose(); final OLogSequenceNumber lsn = log(new OAtomicUnitEndRecord(operationUnitId, rollback, startLsn)); activeOperations.remove(operationUnitId); return lsn; } finally { syncObject.unlock(); } } public OLogSequenceNumber log(OWALRecord record) throws IOException { syncObject.lock(); try { checkForClose(); if (segmentCreationFlag && record instanceof OOperationUnitRecord && !activeOperations.contains(((OOperationUnitRecord) record).getOperationUnitId())) { while (segmentCreationFlag) { try { segmentCreationComplete.await(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new OInterruptedException(e); } } } final byte[] serializedForm = OWALRecordsFactory.INSTANCE.toStream(record); LogSegment last = logSegments.get(logSegments.size() - 1); long lastSize = last.filledUpTo(); final OLogSequenceNumber lsn = last.logRecord(serializedForm); record.setLsn(lsn); if (record.isUpdateMasterRecord()) { lastCheckpoint = lsn; if (useFirstMasterRecord) { firstMasterRecord = lsn; writeMasterRecord(0, firstMasterRecord); useFirstMasterRecord = false; } else { secondMasterRecord = lsn; writeMasterRecord(1, secondMasterRecord); useFirstMasterRecord = true; } } final long sizeDiff = last.filledUpTo() - lastSize; logSize += sizeDiff; if (last.filledUpTo() >= maxSegmentSize) { segmentCreationFlag = true; if (record instanceof OAtomicUnitEndRecord && activeOperations.size() == 1 || (!(record instanceof OOperationUnitRecord) && activeOperations.isEmpty())) { last.stopFlush(true); last = new LogSegment(new File(walLocation, getSegmentName(last.getOrder() + 1)), maxPagesCacheSize); last.init(); last.startFlush(); logSegments.add(last); segmentCreationFlag = false; segmentCreationComplete.signalAll(); } } if (logSize > walSizeLimit && logSegments.size() > 1) { for (WeakReference<OFullCheckpointRequestListener> listenerWeakReference : fullCheckpointListeners) { final OFullCheckpointRequestListener listener = listenerWeakReference.get(); if (listener != null) listener.requestCheckpoint(); } } return lsn; } finally { syncObject.unlock(); } } public long size() { syncObject.lock(); try { return logSize; } finally { syncObject.unlock(); } } public List<String> getWalFiles() { final ArrayList<String> result = new ArrayList<String>(); syncObject.lock(); try { for (LogSegment segment : logSegments) { result.add(segment.getPath()); } } finally { syncObject.unlock(); } return result; } public String getWMRFile() { syncObject.lock(); try { return masterRecordFile.getAbsolutePath(); } finally { syncObject.unlock(); } } public void truncate() throws IOException { syncObject.lock(); try { if (logSegments.size() < 2) return; ListIterator<LogSegment> iterator = logSegments.listIterator(logSegments.size() - 1); while (iterator.hasPrevious()) { final LogSegment logSegment = iterator.previous(); logSegment.delete(false); iterator.remove(); } recalculateLogSize(); } finally { syncObject.unlock(); } } public void close() throws IOException { close(true); } public void close(boolean flush) throws IOException { syncObject.lock(); try { if (closed) return; closed = true; for (LogSegment logSegment : logSegments) logSegment.close(flush); masterRecordLSNHolder.close(); } finally { syncObject.unlock(); } } public void delete() throws IOException { delete(false); } public void delete(boolean flush) throws IOException { syncObject.lock(); try { close(flush); for (LogSegment logSegment : logSegments) logSegment.delete(false); boolean deleted = OFileUtils.delete(masterRecordFile); int retryCount = 0; while (!deleted) { deleted = OFileUtils.delete(masterRecordFile); retryCount++; if (retryCount > 10) throw new IOException("Can not delete file. Retry limit exceeded. (" + retryCount + ")."); } } finally { syncObject.unlock(); } } public OWALRecord read(OLogSequenceNumber lsn) throws IOException { syncObject.lock(); try { checkForClose(); long segment = lsn.getSegment(); int index = (int) (segment - logSegments.get(0).getOrder()); if (index < 0 || index >= logSegments.size()) return null; LogSegment logSegment = logSegments.get(index); byte[] recordEntry = logSegment.readRecord(lsn); if (recordEntry == null) return null; final OWALRecord record = OWALRecordsFactory.INSTANCE.fromStream(recordEntry); record.setLsn(lsn); return record; } finally { syncObject.unlock(); } } public OLogSequenceNumber next(OLogSequenceNumber lsn) throws IOException { syncObject.lock(); try { checkForClose(); long order = lsn.getSegment(); int index = (int) (order - logSegments.get(0).getOrder()); if (index < 0 || index >= logSegments.size()) return null; LogSegment logSegment = logSegments.get(index); OLogSequenceNumber nextLSN = logSegment.getNextLSN(lsn); if (nextLSN == null) { index++; if (index >= logSegments.size()) return null; LogSegment nextSegment = logSegments.get(index); if (nextSegment.filledUpTo() == 0) return null; nextLSN = nextSegment.begin(); } return nextLSN; } finally { syncObject.unlock(); } } public OLogSequenceNumber getFlushedLSN() { return flushedLsn; } public void cutTill(OLogSequenceNumber lsn) throws IOException { syncObject.lock(); try { checkForClose(); flush(); int lastTruncateIndex = -1; for (int i = 0; i < logSegments.size() - 1; i++) { final LogSegment logSegment = logSegments.get(i); if (logSegment.end().compareTo(lsn) < 0) lastTruncateIndex = i; else break; } for (int i = 0; i <= lastTruncateIndex; i++) { final LogSegment logSegment = removeHeadSegmentFromList(); if (logSegment != null) logSegment.delete(false); } recalculateLogSize(); fixMasterRecords(); } finally { syncObject.unlock(); } } private LogSegment removeHeadSegmentFromList() { if (logSegments.size() < 2) return null; return logSegments.remove(0); } private void recalculateLogSize() throws IOException { logSize = 0; for (LogSegment segment : logSegments) logSize += segment.filledUpTo(); } private void fixMasterRecords() throws IOException { if (firstMasterRecord != null) { int index = (int) (firstMasterRecord.getSegment() - logSegments.get(0).getOrder()); if (logSegments.size() <= index || index < 0) { firstMasterRecord = null; } else { LogSegment firstMasterRecordSegment = logSegments.get(index); if (firstMasterRecordSegment.filledUpTo() <= firstMasterRecord.getPosition()) firstMasterRecord = null; } } if (secondMasterRecord != null) { int index = (int) (secondMasterRecord.getSegment() - logSegments.get(0).getOrder()); if (logSegments.size() <= index || index < 0) { secondMasterRecord = null; } else { LogSegment secondMasterRecordSegment = logSegments.get(index); if (secondMasterRecordSegment.filledUpTo() <= secondMasterRecord.getPosition()) secondMasterRecord = null; } } if (firstMasterRecord != null && secondMasterRecord != null) return; if (firstMasterRecord == null && secondMasterRecord == null) { masterRecordLSNHolder.setLength(0); masterRecordLSNHolder.getFD().sync(); lastCheckpoint = null; } else { if (secondMasterRecord == null) secondMasterRecord = firstMasterRecord; else firstMasterRecord = secondMasterRecord; lastCheckpoint = firstMasterRecord; writeMasterRecord(0, firstMasterRecord); writeMasterRecord(1, secondMasterRecord); } } private OLogSequenceNumber readMasterRecord(String storageName, int index) throws IOException { final CRC32 crc32 = new CRC32(); try { masterRecordLSNHolder.seek(index * (OIntegerSerializer.INT_SIZE + 2 * OLongSerializer.LONG_SIZE)); int firstCRC = masterRecordLSNHolder.readInt(); final long segment = masterRecordLSNHolder.readLong(); final long position = masterRecordLSNHolder.readLong(); byte[] serializedLSN = new byte[2 * OLongSerializer.LONG_SIZE]; OLongSerializer.INSTANCE.serializeLiteral(segment, serializedLSN, 0); OLongSerializer.INSTANCE.serializeLiteral(position, serializedLSN, OLongSerializer.LONG_SIZE); crc32.update(serializedLSN); if (firstCRC != ((int) crc32.getValue())) { OLogManager.instance().error(this, "Can not restore %d WAL master record for storage %s crc check is failed", index, storageName); return null; } return new OLogSequenceNumber(segment, position); } catch (EOFException eofException) { OLogManager.instance().debug(this, "Can not restore %d WAL master record for storage %s", index, storageName); return null; } } private void writeMasterRecord(int index, OLogSequenceNumber masterRecord) throws IOException { masterRecordLSNHolder.seek(index * (OIntegerSerializer.INT_SIZE + 2 * OLongSerializer.LONG_SIZE)); final CRC32 crc32 = new CRC32(); final byte[] serializedLSN = new byte[2 * OLongSerializer.LONG_SIZE]; OLongSerializer.INSTANCE.serializeLiteral(masterRecord.getSegment(), serializedLSN, 0); OLongSerializer.INSTANCE.serializeLiteral(masterRecord.getPosition(), serializedLSN, OLongSerializer.LONG_SIZE); crc32.update(serializedLSN); masterRecordLSNHolder.writeInt((int) crc32.getValue()); masterRecordLSNHolder.writeLong(masterRecord.getSegment()); masterRecordLSNHolder.writeLong(masterRecord.getPosition()); } private String getSegmentName(long order) { return storage.getName() + "." + order + WAL_SEGMENT_EXTENSION; } private OLogSequenceNumber readFlushedLSN() throws IOException { int segment = logSegments.size() - 1; while (segment >= 0) { LogSegment logSegment = logSegments.get(segment); OLogSequenceNumber flushedLSN = logSegment.readFlushedLSN(); if (flushedLSN == null) segment--; else return flushedLSN; } return null; } }
/* * Copyright 2020-2020 Equinix, Inc * Copyright 2014-2020 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.plugin.stripe; import java.util.HashMap; import java.util.Map; import javax.annotation.Nullable; import com.stripe.model.BankAccount; import com.stripe.model.Charge; import com.stripe.model.PaymentIntent; import com.stripe.model.PaymentMethod; import com.stripe.model.PaymentMethod.Card; import com.stripe.model.PaymentSource; import com.stripe.model.SetupIntent; import com.stripe.model.SetupIntent.PaymentMethodOptions; import com.stripe.model.Source; import com.stripe.model.Source.AchDebit; import com.stripe.model.Token; import com.stripe.model.checkout.Session; // Stripe .toJson() is definitively not GDPR-friendly... public abstract class StripePluginProperties { public static Map<String, Object> toAdditionalDataMap(final PaymentSource stripePaymentSource) { final Map<String, Object> additionalDataMap = new HashMap<String, Object>(); if (stripePaymentSource instanceof com.stripe.model.Card) { final com.stripe.model.Card card = (com.stripe.model.Card) stripePaymentSource; additionalDataMap.put("card_brand", card.getBrand()); additionalDataMap.put("card_address_line1_check", card.getAddressLine1Check()); additionalDataMap.put("card_address_postal_code_check", card.getAddressZipCheck()); additionalDataMap.put("card_cvc_check", card.getCvcCheck()); additionalDataMap.put("card_country", card.getCountry()); additionalDataMap.put("card_description", card.getName()); additionalDataMap.put("card_exp_month", card.getExpMonth()); additionalDataMap.put("card_exp_year", card.getExpYear()); additionalDataMap.put("card_fingerprint", card.getFingerprint()); additionalDataMap.put("card_funding", card.getFunding()); additionalDataMap.put("card_last4", card.getLast4()); } else if (stripePaymentSource instanceof Source) { final Source stripeSource = (Source) stripePaymentSource; final Source.Card card = stripeSource.getCard(); if (card != null) { additionalDataMap.put("card_brand", card.getBrand()); additionalDataMap.put("card_address_line1_check", card.getAddressLine1Check()); additionalDataMap.put("card_address_postal_code_check", card.getAddressZipCheck()); additionalDataMap.put("card_cvc_check", card.getCvcCheck()); additionalDataMap.put("card_country", card.getCountry()); additionalDataMap.put("card_description", card.getName()); additionalDataMap.put("card_exp_month", card.getExpMonth()); additionalDataMap.put("card_exp_year", card.getExpYear()); additionalDataMap.put("card_fingerprint", card.getFingerprint()); additionalDataMap.put("card_funding", card.getFunding()); additionalDataMap.put("card_last4", card.getLast4()); additionalDataMap.put("card_three_d_secure_usage_support", card.getThreeDSecure()); } final AchDebit achDebit = stripeSource.getAchDebit(); if (achDebit != null) { additionalDataMap.put("ach_debit_bank_name", achDebit.getBankName()); additionalDataMap.put("ach_debit_country", achDebit.getCountry()); additionalDataMap.put("ach_debit_fingerprint", achDebit.getFingerprint()); additionalDataMap.put("ach_debit_last4", achDebit.getLast4()); additionalDataMap.put("ach_debit_routing_number", achDebit.getRoutingNumber()); additionalDataMap.put("ach_debit_type", achDebit.getType()); } final Source.SepaDebit sepaDebit = stripeSource.getSepaDebit(); if (sepaDebit != null) { additionalDataMap.put("sepa_debit_bank_code", sepaDebit.getBankCode()); additionalDataMap.put("sepa_debit_branch_code", sepaDebit.getBranchCode()); additionalDataMap.put("sepa_debit_country", sepaDebit.getCountry()); additionalDataMap.put("sepa_debit_fingerprint", sepaDebit.getFingerprint()); additionalDataMap.put("sepa_debit_last4", sepaDebit.getLast4()); additionalDataMap.put("sepa_debit_mandate_reference", sepaDebit.getMandateReference()); additionalDataMap.put("sepa_debit_mandate_url", sepaDebit.getMandateUrl()); } additionalDataMap.put("created", stripeSource.getCreated()); additionalDataMap.put("customer_id", stripeSource.getCustomer()); additionalDataMap.put("id", stripeSource.getId()); additionalDataMap.put("livemode", stripeSource.getLivemode()); additionalDataMap.put("metadata", stripeSource.getMetadata()); additionalDataMap.put("object", stripeSource.getObject()); additionalDataMap.put("type", stripeSource.getType()); } else if (stripePaymentSource instanceof BankAccount) { final BankAccount stripeBankAccount = (BankAccount) stripePaymentSource; additionalDataMap.put("account_holder_type", stripeBankAccount.getAccountHolderType()); additionalDataMap.put("bank_name", stripeBankAccount.getBankName()); additionalDataMap.put("country", stripeBankAccount.getCountry()); additionalDataMap.put("currency", stripeBankAccount.getCurrency()); additionalDataMap.put("fingerprint", stripeBankAccount.getFingerprint()); additionalDataMap.put("last4", stripeBankAccount.getLast4()); additionalDataMap.put("routing_number", stripeBankAccount.getRoutingNumber()); additionalDataMap.put("status", stripeBankAccount.getStatus()); additionalDataMap.put("customer_id", stripeBankAccount.getCustomer()); additionalDataMap.put("id", stripeBankAccount.getId()); additionalDataMap.put("metadata", stripeBankAccount.getMetadata()); additionalDataMap.put("object", stripeBankAccount.getObject()); } else { throw new UnsupportedOperationException("Not yet supported: " + stripePaymentSource); } return additionalDataMap; } public static Map<String, Object> toAdditionalDataMap(final Token token) { if (token.getCard() != null) { return toAdditionalDataMap(token.getCard()); } else if (token.getBankAccount() != null) { return toAdditionalDataMap(token.getBankAccount()); } else { throw new UnsupportedOperationException("Not yet supported: " + token); } } public static Map<String, Object> toAdditionalDataMap(final PaymentMethod stripePaymentMethod) { final Map<String, Object> additionalDataMap = new HashMap<String, Object>(); final Card card = stripePaymentMethod.getCard(); if (card != null) { additionalDataMap.put("card_brand", card.getBrand()); if (card.getChecks() != null) { additionalDataMap.put("card_address_line1_check", card.getChecks().getAddressLine1Check()); additionalDataMap.put("card_address_postal_code_check", card.getChecks().getAddressPostalCodeCheck()); additionalDataMap.put("card_cvc_check", card.getChecks().getCvcCheck()); } additionalDataMap.put("card_country", card.getCountry()); additionalDataMap.put("card_description", card.getDescription()); additionalDataMap.put("card_exp_month", card.getExpMonth()); additionalDataMap.put("card_exp_year", card.getExpYear()); additionalDataMap.put("card_fingerprint", card.getFingerprint()); additionalDataMap.put("card_funding", card.getFunding()); additionalDataMap.put("card_iin", card.getIin()); additionalDataMap.put("card_issuer", card.getIssuer()); additionalDataMap.put("card_last4", card.getLast4()); if (card.getThreeDSecureUsage() != null) { additionalDataMap.put("card_three_d_secure_usage_support", card.getThreeDSecureUsage().getSupported()); } if (card.getWallet() != null) { additionalDataMap.put("card_wallet_type", card.getWallet().getType()); } } final PaymentMethod.SepaDebit sepaDebit = stripePaymentMethod.getSepaDebit(); if (sepaDebit != null) { additionalDataMap.put("sepa_debit_bank_code", sepaDebit.getBankCode()); additionalDataMap.put("sepa_debit_branch_code", sepaDebit.getBranchCode()); additionalDataMap.put("sepa_debit_country", sepaDebit.getCountry()); additionalDataMap.put("sepa_debit_fingerprint", sepaDebit.getFingerprint()); additionalDataMap.put("sepa_debit_last4", sepaDebit.getLast4()); } additionalDataMap.put("created", stripePaymentMethod.getCreated()); additionalDataMap.put("customer_id", stripePaymentMethod.getCustomer()); additionalDataMap.put("id", stripePaymentMethod.getId()); additionalDataMap.put("livemode", stripePaymentMethod.getLivemode()); additionalDataMap.put("metadata", stripePaymentMethod.getMetadata()); additionalDataMap.put("object", stripePaymentMethod.getObject()); additionalDataMap.put("type", stripePaymentMethod.getType()); return additionalDataMap; } public static Map<String, Object> toAdditionalDataMap(final PaymentIntent stripePaymentIntent) { final Map<String, Object> additionalDataMap = new HashMap<String, Object>(); additionalDataMap.put("amount", stripePaymentIntent.getAmount()); additionalDataMap.put("amount_capturable", stripePaymentIntent.getAmountCapturable()); additionalDataMap.put("amount_received", stripePaymentIntent.getAmountReceived()); additionalDataMap.put("application", stripePaymentIntent.getApplication()); additionalDataMap.put("application_fee_amount", stripePaymentIntent.getApplicationFeeAmount()); additionalDataMap.put("canceled_at", stripePaymentIntent.getCanceledAt()); additionalDataMap.put("cancellation_reason", stripePaymentIntent.getCancellationReason()); additionalDataMap.put("capture_method", stripePaymentIntent.getCaptureMethod()); if (stripePaymentIntent.getCharges() != null) { Charge lastCharge = null; for (final Charge charge : stripePaymentIntent.getCharges().autoPagingIterable()) { if (lastCharge == null || lastCharge.getCreated() < charge.getCreated()) { lastCharge = charge; } } if (lastCharge != null) { // Keep the state for the last charge (maps to our payment transaction) additionalDataMap.put("last_charge_amount", lastCharge.getAmount()); additionalDataMap.put("last_charge_authorization_code", lastCharge.getAuthorizationCode()); additionalDataMap.put("last_charge_balance_transaction_id", lastCharge.getBalanceTransaction()); additionalDataMap.put("last_charge_created", lastCharge.getCreated()); additionalDataMap.put("last_charge_currency", lastCharge.getCurrency()); additionalDataMap.put("last_charge_description", lastCharge.getDescription()); additionalDataMap.put("last_charge_failure_code", lastCharge.getFailureCode()); additionalDataMap.put("last_charge_failure_message", lastCharge.getFailureMessage()); additionalDataMap.put("last_charge_id", lastCharge.getId()); additionalDataMap.put("last_charge_metadata", lastCharge.getMetadata()); additionalDataMap.put("last_charge_object", lastCharge.getObject()); additionalDataMap.put("last_charge_outcome", lastCharge.getOutcome()); additionalDataMap.put("last_charge_paid", lastCharge.getPaid()); additionalDataMap.put("last_charge_payment_method_id", lastCharge.getPaymentMethod()); if (lastCharge.getPaymentMethodDetails() != null) { additionalDataMap.put("last_charge_payment_method_type", lastCharge.getPaymentMethodDetails().getType()); } additionalDataMap.put("last_charge_statement_descriptor", lastCharge.getStatementDescriptor()); additionalDataMap.put("last_charge_status", lastCharge.getStatus()); } } additionalDataMap.put("confirmation_method", stripePaymentIntent.getConfirmationMethod()); additionalDataMap.put("created", stripePaymentIntent.getCreated()); additionalDataMap.put("currency", stripePaymentIntent.getCurrency()); additionalDataMap.put("customer_id", stripePaymentIntent.getCustomer()); additionalDataMap.put("description", stripePaymentIntent.getDescription()); additionalDataMap.put("id", stripePaymentIntent.getId()); additionalDataMap.put("invoice_id", stripePaymentIntent.getInvoice()); additionalDataMap.put("last_payment_error", stripePaymentIntent.getLastPaymentError()); additionalDataMap.put("livemode", stripePaymentIntent.getLivemode()); additionalDataMap.put("metadata", stripePaymentIntent.getMetadata()); additionalDataMap.put("next_action", stripePaymentIntent.getNextAction()); additionalDataMap.put("object", stripePaymentIntent.getObject()); additionalDataMap.put("on_behalf_of", stripePaymentIntent.getOnBehalfOf()); additionalDataMap.put("payment_method_id", stripePaymentIntent.getPaymentMethod()); additionalDataMap.put("payment_method_types", stripePaymentIntent.getPaymentMethodTypes()); additionalDataMap.put("review_id", stripePaymentIntent.getReview()); additionalDataMap.put("statement_descriptor", stripePaymentIntent.getStatementDescriptor()); additionalDataMap.put("status", stripePaymentIntent.getStatus()); additionalDataMap.put("transfer_group", stripePaymentIntent.getTransferGroup()); return additionalDataMap; } public static Map<String, Object> toAdditionalDataMap(final SetupIntent stripeSetupIntent) { final Map<String, Object> additionalDataMap = new HashMap<String, Object>(); additionalDataMap.put("application", stripeSetupIntent.getApplication()); additionalDataMap.put("cancellation_reason", stripeSetupIntent.getCancellationReason()); additionalDataMap.put("created", stripeSetupIntent.getCreated()); additionalDataMap.put("customer_id", stripeSetupIntent.getCustomer()); additionalDataMap.put("description", stripeSetupIntent.getDescription()); additionalDataMap.put("id", stripeSetupIntent.getId()); additionalDataMap.put("last_setup_error", stripeSetupIntent.getLastSetupError()); additionalDataMap.put("latest_attempt", stripeSetupIntent.getLatestAttempt()); additionalDataMap.put("livemode", stripeSetupIntent.getLivemode()); additionalDataMap.put("mandate", stripeSetupIntent.getMandate()); additionalDataMap.put("metadata", stripeSetupIntent.getMetadata()); additionalDataMap.put("next_action", stripeSetupIntent.getNextAction()); additionalDataMap.put("object", stripeSetupIntent.getObject()); additionalDataMap.put("on_behalf_of", stripeSetupIntent.getOnBehalfOf()); additionalDataMap.put("payment_method_id", stripeSetupIntent.getPaymentMethod()); final PaymentMethodOptions paymentMethodOptions = stripeSetupIntent.getPaymentMethodOptions(); if (paymentMethodOptions != null ) { final SetupIntent.PaymentMethodOptions.Card card = paymentMethodOptions.getCard(); if (card != null) { additionalDataMap.put("payment_method_options_card_request_three_d_secure", card.getRequestThreeDSecure()); } // paymentMethodOptions also contains "sepa_debit" which contains "mandate_options" that currently has // no properties, so it is ignored here (https://stripe.com/docs/api/setup_intents/object) } additionalDataMap.put("payment_method_types", stripeSetupIntent.getPaymentMethodTypes()); additionalDataMap.put("single_use_mandate_id", stripeSetupIntent.getSingleUseMandate()); additionalDataMap.put("status", stripeSetupIntent.getStatus()); additionalDataMap.put("usage", stripeSetupIntent.getUsage()); return additionalDataMap; } public static Map<String, Object> toAdditionalDataMap(final Session session, @Nullable final String pk) { final Map<String, Object> additionalDataMap = new HashMap<String, Object>(); additionalDataMap.put("billing_address_collection", session.getBillingAddressCollection()); additionalDataMap.put("cancel_url", session.getCancelUrl()); additionalDataMap.put("client_reference_id", session.getClientReferenceId()); additionalDataMap.put("customer_id", session.getCustomer()); additionalDataMap.put("line_items", session.getLineItems()); additionalDataMap.put("id", session.getId()); additionalDataMap.put("livemode", session.getLivemode()); additionalDataMap.put("locale", session.getLocale()); additionalDataMap.put("object", session.getObject()); additionalDataMap.put("payment_intent_id", session.getPaymentIntent()); additionalDataMap.put("payment_method_types", session.getPaymentMethodTypes()); additionalDataMap.put("setup_intent_id", session.getSetupIntent()); additionalDataMap.put("subscription_id", session.getSubscription()); additionalDataMap.put("success_url", session.getSuccessUrl()); if (pk != null) { additionalDataMap.put("publishable_key", pk); } return additionalDataMap; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.common.lib.types; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.Collections; import java.util.Set; import java.util.TreeSet; public final class IdRepoEntitlement { public static final String ANONYMOUS = "ANONYMOUS"; public static final String MUST_CHANGE_PASSWORD = "MUST_CHANGE_PASSWORD"; public static final String DOMAIN_CREATE = "DOMAIN_CREATE"; public static final String DOMAIN_READ = "DOMAIN_READ"; public static final String DOMAIN_UPDATE = "DOMAIN_UPDATE"; public static final String DOMAIN_DELETE = "DOMAIN_DELETE"; public static final String REALM_LIST = "REALM_LIST"; public static final String REALM_CREATE = "REALM_CREATE"; public static final String REALM_UPDATE = "REALM_UPDATE"; public static final String REALM_DELETE = "REALM_DELETE"; public static final String ANYTYPECLASS_LIST = "ANYTYPECLASS_LIST"; public static final String ANYTYPECLASS_CREATE = "ANYTYPECLASS_CREATE"; public static final String ANYTYPECLASS_READ = "ANYTYPECLASS_READ"; public static final String ANYTYPECLASS_UPDATE = "ANYTYPECLASS_UPDATE"; public static final String ANYTYPECLASS_DELETE = "ANYTYPECLASS_DELETE"; public static final String ANYTYPE_LIST = "ANYTYPE_LIST"; public static final String ANYTYPE_CREATE = "ANYTYPE_CREATE"; public static final String ANYTYPE_READ = "ANYTYPE_READ"; public static final String ANYTYPE_UPDATE = "ANYTYPE_UPDATE"; public static final String ANYTYPE_DELETE = "ANYTYPE_DELETE"; public static final String RELATIONSHIPTYPE_LIST = "RELATIONSHIPTYPE_LIST"; public static final String RELATIONSHIPTYPE_CREATE = "RELATIONSHIPTYPE_CREATE"; public static final String RELATIONSHIPTYPE_READ = "RELATIONSHIPTYPE_READ"; public static final String RELATIONSHIPTYPE_UPDATE = "RELATIONSHIPTYPE_UPDATE"; public static final String RELATIONSHIPTYPE_DELETE = "RELATIONSHIPTYPE_DELETE"; public static final String ROLE_LIST = "ROLE_LIST"; public static final String ROLE_CREATE = "ROLE_CREATE"; public static final String ROLE_READ = "ROLE_READ"; public static final String ROLE_UPDATE = "ROLE_UPDATE"; public static final String ROLE_DELETE = "ROLE_DELETE"; public static final String APPLICATION_LIST = "APPLICATION_LIST"; public static final String APPLICATION_CREATE = "APPLICATION_CREATE"; public static final String APPLICATION_READ = "APPLICATION_READ"; public static final String APPLICATION_UPDATE = "APPLICATION_UPDATE"; public static final String APPLICATION_DELETE = "APPLICATION_DELETE"; public static final String DYNREALM_CREATE = "DYNREALM_CREATE"; public static final String DYNREALM_READ = "DYNREALM_READ"; public static final String DYNREALM_UPDATE = "DYNREALM_UPDATE"; public static final String DYNREALM_DELETE = "DYNREALM_DELETE"; public static final String SCHEMA_CREATE = "SCHEMA_CREATE"; public static final String SCHEMA_UPDATE = "SCHEMA_UPDATE"; public static final String SCHEMA_DELETE = "SCHEMA_DELETE"; public static final String USER_SEARCH = "USER_SEARCH"; public static final String USER_CREATE = "USER_CREATE"; public static final String USER_READ = "USER_READ"; public static final String USER_UPDATE = "USER_UPDATE"; public static final String USER_DELETE = "USER_DELETE"; public static final String GROUP_CREATE = "GROUP_CREATE"; public static final String GROUP_SEARCH = "GROUP_SEARCH"; public static final String GROUP_READ = "GROUP_READ"; public static final String GROUP_UPDATE = "GROUP_UPDATE"; public static final String GROUP_DELETE = "GROUP_DELETE"; public static final String KEYMASTER = "KEYMASTER"; public static final String TASK_LIST = "TASK_LIST"; public static final String TASK_CREATE = "TASK_CREATE"; public static final String TASK_READ = "TASK_READ"; public static final String TASK_UPDATE = "TASK_UPDATE"; public static final String TASK_DELETE = "TASK_DELETE"; public static final String TASK_EXECUTE = "TASK_EXECUTE"; public static final String POLICY_LIST = "POLICY_LIST"; public static final String POLICY_CREATE = "POLICY_CREATE"; public static final String POLICY_READ = "POLICY_READ"; public static final String POLICY_UPDATE = "POLICY_UPDATE"; public static final String POLICY_DELETE = "POLICY_DELETE"; public static final String MAIL_TEMPLATE_LIST = "MAIL_TEMPLATE_LIST"; public static final String MAIL_TEMPLATE_CREATE = "MAIL_TEMPLATE_CREATE"; public static final String MAIL_TEMPLATE_READ = "MAIL_TEMPLATE_READ"; public static final String MAIL_TEMPLATE_UPDATE = "MAIL_TEMPLATE_UPDATE"; public static final String MAIL_TEMPLATE_DELETE = "MAIL_TEMPLATE_DELETE"; public static final String NOTIFICATION_LIST = "NOTIFICATION_LIST"; public static final String NOTIFICATION_CREATE = "NOTIFICATION_CREATE"; public static final String NOTIFICATION_READ = "NOTIFICATION_READ"; public static final String NOTIFICATION_UPDATE = "NOTIFICATION_UPDATE"; public static final String NOTIFICATION_DELETE = "NOTIFICATION_DELETE"; public static final String NOTIFICATION_EXECUTE = "NOTIFICATION_EXECUTE"; public static final String REPORT_TEMPLATE_LIST = "REPORT_TEMPLATE_LIST"; public static final String REPORT_TEMPLATE_CREATE = "REPORT_TEMPLATE_CREATE"; public static final String REPORT_TEMPLATE_READ = "REPORT_TEMPLATE_READ"; public static final String REPORT_TEMPLATE_UPDATE = "REPORT_TEMPLATE_UPDATE"; public static final String REPORT_TEMPLATE_DELETE = "REPORT_TEMPLATE_DELETE"; public static final String REPORT_LIST = "REPORT_LIST"; public static final String REPORT_READ = "REPORT_READ"; public static final String REPORT_CREATE = "REPORT_CREATE"; public static final String REPORT_UPDATE = "REPORT_UPDATE"; public static final String REPORT_DELETE = "REPORT_DELETE"; public static final String REPORT_EXECUTE = "REPORT_EXECUTE"; public static final String AUDIT_SEARCH = "AUDIT_SEARCH"; public static final String AUDIT_LIST = "AUDIT_LIST"; public static final String AUDIT_READ = "AUDIT_READ"; public static final String AUDIT_CREATE = "AUDIT_CREATE"; public static final String AUDIT_UPDATE = "AUDIT_UPDATE"; public static final String AUDIT_DELETE = "AUDIT_DELETE"; public static final String SECURITY_QUESTION_CREATE = "SECURITY_QUESTION_CREATE"; public static final String SECURITY_QUESTION_READ = "SECURITY_QUESTION_READ"; public static final String SECURITY_QUESTION_UPDATE = "SECURITY_QUESTION_UPDATE"; public static final String SECURITY_QUESTION_DELETE = "SECURITY_QUESTION_DELETE"; public static final String ACCESS_TOKEN_LIST = "ACCESS_TOKEN_LIST"; public static final String ACCESS_TOKEN_DELETE = "ACCESS_TOKEN_DELETE"; public static final String IMPLEMENTATION_LIST = "IMPLEMENTATION_LIST"; public static final String IMPLEMENTATION_READ = "IMPLEMENTATION_READ"; public static final String IMPLEMENTATION_CREATE = "IMPLEMENTATION_CREATE"; public static final String IMPLEMENTATION_UPDATE = "IMPLEMENTATION_UPDATE"; public static final String IMPLEMENTATION_DELETE = "IMPLEMENTATION_DELETE"; public static final String DELEGATION_LIST = "DELEGATION_LIST"; public static final String DELEGATION_CREATE = "DELEGATION_CREATE"; public static final String DELEGATION_READ = "DELEGATION_READ"; public static final String DELEGATION_UPDATE = "DELEGATION_UPDATE"; public static final String DELEGATION_DELETE = "DELEGATION_DELETE"; public static final String LOGGER_LIST = "LOGGER_LIST"; public static final String LOGGER_UPDATE = "LOGGER_UPDATE"; private static final Set<String> VALUES; static { Set<String> values = new TreeSet<>(); for (Field field : IdRepoEntitlement.class.getDeclaredFields()) { if (Modifier.isStatic(field.getModifiers()) && String.class.equals(field.getType())) { values.add(field.getName()); } } values.remove(ANONYMOUS); values.remove(MUST_CHANGE_PASSWORD); VALUES = Collections.unmodifiableSet(values); } public static Set<String> values() { return VALUES; } private IdRepoEntitlement() { // private constructor for static utility class } }
/******************************************************************************* * Copyright (c) 2002 - 2006 IBM Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package com.ibm.wala.ipa.callgraph; import com.ibm.wala.analysis.reflection.ReflectionContextInterpreter; import com.ibm.wala.analysis.reflection.ReflectionContextSelector; import com.ibm.wala.ipa.callgraph.impl.ExplicitCallGraph; import com.ibm.wala.ipa.callgraph.propagation.ReflectionHandler; import com.ibm.wala.ssa.SSAOptions; /** * Basic interface for options that control call graph generation. * * TODO: This class should be refactored into an abstract base class and language-specific subclasses. */ public class AnalysisOptions { /** * An object that represents the analysis scope */ private AnalysisScope analysisScope; /** * An object that identifies the entrypoints for the call graph */ private Iterable<? extends Entrypoint> entrypoints; /** * Policy that determines types allocated at new statements. */ private ClassTargetSelector classTargetSelector; /** * Policy that determines methods called at call sites. */ private MethodTargetSelector methodTargetSelector; /** * A tuning parameter; how may new equations must be added before doing a new topological sort? */ private int minEquationsForTopSort = 100; /** * A tuning parameter; by what percentage must the number of equations grow before we perform a topological sort? */ private double topologicalGrowthFactor = 0.5; /** * A tuning parameter: how many evaluations are allowed to take place between topological re-orderings. The idea is that many * evaluations may be a sign of a bad ordering, even when few new equations are being added. */ private int maxEvalBetweenTopo = 1000000000; /** * options for handling reflection during call graph construction */ public static enum ReflectionOptions { FULL("full", Integer.MAX_VALUE, false, false, false), APPLICATION_GET_METHOD("application_get_method", Integer.MAX_VALUE, false, false, true), NO_FLOW_TO_CASTS("no_flow_to_casts", 0, false, false, false), NO_FLOW_TO_CASTS_APPLICATION_GET_METHOD("no_flow_to_casts_application_get_method", 0, false, false, true), NO_METHOD_INVOKE("no_method_invoke", Integer.MAX_VALUE, true, false, false), NO_FLOW_TO_CASTS_NO_METHOD_INVOKE("no_flow_to_casts_no_method_invoke", 0, true, false, false), ONE_FLOW_TO_CASTS_NO_METHOD_INVOKE("one_flow_to_casts_no_method_invoke", 1, true, false, false), ONE_FLOW_TO_CASTS_APPLICATION_GET_METHOD("one_flow_to_casts_application_get_method", 1, false, false, true), NO_STRING_CONSTANTS("no_string_constants", Integer.MAX_VALUE, false, true, false), NONE("none", 0, true, true, true); private final String name; /** * how many times should flows from newInstance() calls to casts be analyzed? */ private final int numFlowToCastIterations; /** * should calls to Method.invoke() be ignored? */ private final boolean ignoreMethodInvoke; /** * should get method calls be modeled only for application classes? */ private final boolean applicationClassesOnly; /** * should calls to reflective methods with String constant arguments be ignored? */ private final boolean ignoreStringConstants; private ReflectionOptions(String name, int numFlowToCastIterations, boolean ignoreMethodInvoke, boolean ignoreInterpretCalls, boolean applicationClassesOnly) { this.name = name; this.numFlowToCastIterations = numFlowToCastIterations; this.ignoreMethodInvoke = ignoreMethodInvoke; this.ignoreStringConstants = ignoreInterpretCalls; this.applicationClassesOnly = applicationClassesOnly; } public String getName() { return name; } public int getNumFlowToCastIterations() { return numFlowToCastIterations; } public boolean isIgnoreMethodInvoke() { return ignoreMethodInvoke; } public boolean isIgnoreStringConstants() { return ignoreStringConstants; } public boolean isApplicationClassesOnly() { return applicationClassesOnly; } } /** * Should call graph construction attempt to handle reflection via detection of flows to casts, analysis of string constant * parameters to reflective methods, etc.? * * @see ReflectionHandler * @see ReflectionContextInterpreter * @see ReflectionContextSelector */ private ReflectionOptions reflectionOptions = ReflectionOptions.FULL; /** * Should call graph construction handle possible invocations of static initializer methods? */ private boolean handleStaticInit = true; /** * Options governing SSA construction */ private SSAOptions ssaOptions = new SSAOptions(); /** * Use distinct instance keys for distinct string constants? * * TODO: Probably, this option should moved somewhere into the creation of instance keys. However, those factories are created * within the various builders right now, and this is the most convenient place for an engine user to set an option which the * creation of instance keys later picks up. */ private boolean useConstantSpecificKeys = false; /** * Should analysis of lexical scoping consider call stacks? * * TODO: this option does not apply to all languages. We could have a separation into core engine options and language-specific * options. * * (be careful with multithreaded languages, as threading can break the stack discipline this option may assume) */ private boolean useStacksForLexicalScoping = false; /** * Should global variables be considered lexically-scoped from the root node? * * TODO: this option does not apply to all languages. We could have a separation into core engine options and language-specific * options. * * (be careful with multithreaded languages, as threading can break the stack discipline this option may assume) */ private boolean useLexicalScopingForGlobals = false; /** * Should analysis try to understand the results of string constants flowing to a + operator? Note that this option does not apply * to Java bytecode analysis, since the + operators have been compiled away for that. It is used for the Java CAst front end. */ private boolean traceStringConstants = false; /** * This numerical value indicates the maximum number of nodes that any {@link CallGraph} build with this {@link AnalysisOptions} * object is allowed to have. During {@link CallGraph} construction, once <code>maxNumberOfNodes</code> {@link CGNode} objects * have been added to the {@link CallGraph}, no more {@link CGNode} objects will be added. By default, * <code>maxNumberOfNodes</code> is set to <code>-1</code>, which indicates that no restrictions are in place. See also * {@link ExplicitCallGraph}. */ private long maxNumberOfNodes = -1; // SJF: I'm not sure these factories and caches belong here. // TODO: figure out how to clean this up. public AnalysisOptions() { } public AnalysisOptions(AnalysisScope scope, Iterable<? extends Entrypoint> e) { this.analysisScope = scope; this.entrypoints = e; } public AnalysisScope getAnalysisScope() { return analysisScope; } public void setAnalysisScope(AnalysisScope analysisScope) { this.analysisScope = analysisScope; } /** * TODO: this really should go away. The entrypoints don't belong here. */ public Iterable<? extends Entrypoint> getEntrypoints() { return entrypoints; } public void setEntrypoints(Iterable<? extends Entrypoint> entrypoints) { this.entrypoints = entrypoints; } public long getMaxNumberOfNodes() { return maxNumberOfNodes; } public void setMaxNumberOfNodes(long maxNumberOfNodes) { this.maxNumberOfNodes = maxNumberOfNodes; } /** * @return Policy that determines methods called at call sites. */ public MethodTargetSelector getMethodTargetSelector() { return methodTargetSelector; } /** * @return Policy that determines types allocated at new statements. */ public ClassTargetSelector getClassTargetSelector() { return classTargetSelector; } /** * install a method target selector * * @param x an object which controls the policy for selecting the target at a call site */ public void setSelector(MethodTargetSelector x) { methodTargetSelector = x; } /** * install a class target selector * * @param x an object which controls the policy for selecting the allocated object at a new site */ public void setSelector(ClassTargetSelector x) { classTargetSelector = x; } /** * @return the mininum number of equations that the pointer analysis system must contain before the solver will try to * topologically sore */ public int getMinEquationsForTopSort() { return minEquationsForTopSort; } /** * @param i the mininum number of equations that the pointer analysis system must contain before the solver will try to * topologically sore */ public void setMinEquationsForTopSort(int i) { minEquationsForTopSort = i; } /** * @return the maximum number of evaluations that the pointer analysis solver will perform before topologically resorting the * system */ public int getMaxEvalBetweenTopo() { return maxEvalBetweenTopo; } /** * @return a fraction x s.t. the solver will resort the system when it grows by a factor of x */ public double getTopologicalGrowthFactor() { return topologicalGrowthFactor; } /** * @param i the maximum number of evaluations that the pointer analysis solver will perform before topologically resorting the * system */ public void setMaxEvalBetweenTopo(int i) { maxEvalBetweenTopo = i; } /** * @param d a fraction x s.t. the solver will resort the system when it grows by a factor of x */ public void setTopologicalGrowthFactor(double d) { topologicalGrowthFactor = d; } /** * @return options governing SSA construction */ public SSAOptions getSSAOptions() { return ssaOptions; } /** * @param ssaOptions options governing SSA construction */ public void setSSAOptions(SSAOptions ssaOptions) { this.ssaOptions = ssaOptions; } /** * Use distinct instance keys for distinct string constants? */ public boolean getUseConstantSpecificKeys() { return useConstantSpecificKeys; } /** * Use distinct instance keys for distinct string constants? */ public void setUseConstantSpecificKeys(boolean useConstantSpecificKeys) { this.useConstantSpecificKeys = useConstantSpecificKeys; } /** * Should analysis of lexical scoping consider call stacks? */ public boolean getUseStacksForLexicalScoping() { return useStacksForLexicalScoping; } /** * Should analysis of lexical scoping consider call stacks? */ public void setUseStacksForLexicalScoping(boolean v) { useStacksForLexicalScoping = v; } /** * Should global variables be considered lexically-scoped from the root node? */ public boolean getUseLexicalScopingForGlobals() { return useLexicalScopingForGlobals; } /** * Should global variables be considered lexically-scoped from the root node? */ public void setUseLexicalScopingForGlobals(boolean v) { useLexicalScopingForGlobals = v; } /** * Should analysis try to understand the results of string constants flowing to a + operator? Note that this option does not apply * to Java bytecode analysis, since the + operators have been compiled away for that. It is used for the Java CAst front end. */ public void setTraceStringConstants(boolean v) { traceStringConstants = v; } /** * Should analysis try to understand the results of string constants flowing to a + operator? Note that this option does not apply * to Java bytecode analysis, since the + operators have been compiled away for that. It is used for the Java CAst front end. */ public boolean getTraceStringConstants() { return traceStringConstants; } /** * Should call graph construction attempt to handle reflection via detection of flows to casts, analysis of string constant * parameters to reflective methods, etc.? * * @see ReflectionHandler * @see ReflectionContextInterpreter * @see ReflectionContextSelector */ public ReflectionOptions getReflectionOptions() { return reflectionOptions; } /** * Should call graph construction attempt to handle reflection via detection of flows to casts, analysis of string constant * parameters to reflective methods, etc.? * * @see ReflectionHandler * @see ReflectionContextInterpreter * @see ReflectionContextSelector */ public void setReflectionOptions(ReflectionOptions reflectionOptions) { this.reflectionOptions = reflectionOptions; } /** * Should call graph construction handle possible invocations of static initializer methods? */ public boolean getHandleStaticInit() { return handleStaticInit; } /** * Should call graph construction handle possible invocations of static initializer methods? */ public void setHandleStaticInit(boolean handleStaticInit) { this.handleStaticInit = handleStaticInit; } }
/* * $Id$ * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.struts2.components; import com.opensymphony.xwork2.ActionContext; import com.opensymphony.xwork2.ActionInvocation; import com.opensymphony.xwork2.config.entities.ActionConfig; import com.opensymphony.xwork2.inject.Inject; import com.opensymphony.xwork2.util.ValueStack; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.struts2.StrutsException; import org.apache.struts2.dispatcher.mapper.ActionMapper; import org.apache.struts2.dispatcher.mapper.ActionMapping; import org.apache.struts2.views.util.UrlHelper; import java.io.IOException; import java.io.Writer; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; /** * Implementation of the {@link UrlRenderer} interface that creates URLs suitable in a servlet environment. */ public class ServletUrlRenderer implements UrlRenderer { /** * Provide a logging instance. */ private static final Logger LOG = LogManager.getLogger(ServletUrlRenderer.class); private ActionMapper actionMapper; private UrlHelper urlHelper; @Inject public void setActionMapper(ActionMapper mapper) { this.actionMapper = mapper; } @Inject public void setUrlHelper(UrlHelper urlHelper) { this.urlHelper = urlHelper; } /** * {@inheritDoc} */ public void renderUrl(Writer writer, UrlProvider urlComponent) { String scheme = urlComponent.getHttpServletRequest().getScheme(); if (urlComponent.getScheme() != null) { ValueStack vs = ActionContext.getContext().getValueStack(); scheme = vs.findString(urlComponent.getScheme()); if (scheme == null) { scheme = urlComponent.getScheme(); } } String result; ActionInvocation ai = (ActionInvocation) ActionContext.getContext().get(ActionContext.ACTION_INVOCATION); if (urlComponent.getValue() == null && urlComponent.getAction() != null) { result = urlComponent.determineActionURL(urlComponent.getAction(), urlComponent.getNamespace(), urlComponent.getMethod(), urlComponent.getHttpServletRequest(), urlComponent.getHttpServletResponse(), urlComponent.getParameters(), scheme, urlComponent.isIncludeContext(), urlComponent.isEncode(), urlComponent.isForceAddSchemeHostAndPort(), urlComponent.isEscapeAmp()); } else if (urlComponent.getValue() == null && urlComponent.getAction() == null && ai != null) { // both are null, we will default to the current action final String action = ai.getProxy().getActionName(); final String namespace = ai.getProxy().getNamespace(); final String method = urlComponent.getMethod() != null || !ai.getProxy().isMethodSpecified() ? urlComponent.getMethod() : ai.getProxy().getMethod(); result = urlComponent.determineActionURL(action, namespace, method, urlComponent.getHttpServletRequest(), urlComponent.getHttpServletResponse(), urlComponent.getParameters(), scheme, urlComponent.isIncludeContext(), urlComponent.isEncode(), urlComponent.isForceAddSchemeHostAndPort(), urlComponent.isEscapeAmp()); } else { String _value = urlComponent.getValue(); // We don't include the request parameters cause they would have been // prioritised before this [in start(Writer) method] if (_value != null && _value.indexOf("?") > 0) { _value = _value.substring(0, _value.indexOf("?")); } result = urlHelper.buildUrl(_value, urlComponent.getHttpServletRequest(), urlComponent.getHttpServletResponse(), urlComponent.getParameters(), scheme, urlComponent.isIncludeContext(), urlComponent.isEncode(), urlComponent.isForceAddSchemeHostAndPort(), urlComponent.isEscapeAmp()); } String anchor = urlComponent.getAnchor(); if (StringUtils.isNotEmpty(anchor)) { result += '#' + urlComponent.findString(anchor); } if (urlComponent.isPutInContext()) { String var = urlComponent.getVar(); if (StringUtils.isNotEmpty(var)) { urlComponent.putInContext(result); // add to the request and page scopes as well urlComponent.getHttpServletRequest().setAttribute(var, result); } else { try { writer.write(result); } catch (IOException e) { throw new StrutsException("IOError: " + e.getMessage(), e); } } } else { try { writer.write(result); } catch (IOException e) { throw new StrutsException("IOError: " + e.getMessage(), e); } } } /** * {@inheritDoc} */ public void renderFormUrl(Form formComponent) { String namespace = formComponent.determineNamespace(formComponent.namespace, formComponent.getStack(), formComponent.request); String action; ValueStack vs = ActionContext.getContext().getValueStack(); String scheme = vs.findString("scheme"); if (formComponent.action != null) { action = formComponent.findString(formComponent.action); } else { // no action supplied? ok, then default to the current request // (action or general URL) ActionInvocation ai = (ActionInvocation) formComponent.getStack().getContext().get( ActionContext.ACTION_INVOCATION); if (ai != null) { action = ai.getProxy().getActionName(); namespace = ai.getProxy().getNamespace(); } else { // hmm, ok, we need to just assume the current URL cut down String uri = formComponent.request.getRequestURI(); action = uri.substring(uri.lastIndexOf('/')); } } Map actionParams = null; if (action != null && action.indexOf("?") > 0) { String queryString = action.substring(action.indexOf("?") + 1); actionParams = urlHelper.parseQueryString(queryString, false); action = action.substring(0, action.indexOf("?")); } ActionMapping nameMapping = actionMapper.getMappingFromActionName(action); String actionName = nameMapping.getName(); String actionMethod = nameMapping.getMethod(); final ActionConfig actionConfig = formComponent.configuration.getRuntimeConfiguration().getActionConfig( namespace, actionName); if (actionConfig != null) { ActionMapping mapping = new ActionMapping(actionName, namespace, actionMethod, formComponent.parameters); String result = urlHelper.buildUrl(formComponent.actionMapper.getUriFromActionMapping(mapping), formComponent.request, formComponent.response, actionParams, scheme, formComponent.includeContext, true, false, false); formComponent.addParameter("action", result); // let's try to get the actual action class and name // this can be used for getting the list of validators formComponent.addParameter("actionName", actionName); try { Class clazz = formComponent.objectFactory.getClassInstance(actionConfig.getClassName()); formComponent.addParameter("actionClass", clazz); } catch (ClassNotFoundException e) { // this is OK, we'll just move on } formComponent.addParameter("namespace", namespace); // if the name isn't specified, use the action name if (formComponent.name == null) { formComponent.addParameter("name", actionName); } // if the id isn't specified, use the action name if (formComponent.getId() == null && actionName != null) { formComponent.addParameter("id", formComponent.escape(actionName)); } } else if (action != null) { // Since we can't find an action alias in the configuration, we just // assume the action attribute supplied is the path to be used as // the URI this form is submitting to. // Warn user that the specified namespace/action combo // was not found in the configuration. if (namespace != null && LOG.isWarnEnabled()) { LOG.warn("No configuration found for the specified action: '{}' in namespace: '{}'. Form action defaulting to 'action' attribute's literal value.", actionName, namespace); } String result = urlHelper.buildUrl(action, formComponent.request, formComponent.response, null, scheme, formComponent.includeContext, true); formComponent.addParameter("action", result); // namespace: cut out anything between the start and the last / int slash = result.lastIndexOf('/'); if (slash != -1) { formComponent.addParameter("namespace", result.substring(0, slash)); } else { formComponent.addParameter("namespace", ""); } // name/id: cut out anything between / and . should be the id and // name String id = formComponent.getId(); if (id == null) { slash = result.lastIndexOf('/'); int dot = result.indexOf('.', slash); if (dot != -1) { id = result.substring(slash + 1, dot); } else { id = result.substring(slash + 1); } formComponent.addParameter("id", formComponent.escape(id)); } } // WW-1284 // evaluate if client-side js is to be enabled. (if validation // interceptor does allow validation eg. method is not filtered out) formComponent.evaluateClientSideJsEnablement(actionName, namespace, actionMethod); } public void beforeRenderUrl(UrlProvider urlComponent) { if (urlComponent.getValue() != null) { urlComponent.setValue(urlComponent.findString(urlComponent.getValue())); } // no explicit url set so attach params from current url, do // this at start so body params can override any of these they wish. try { // ww-1266 String includeParams = (urlComponent.getUrlIncludeParams() != null ? urlComponent.getUrlIncludeParams().toLowerCase() : UrlProvider.GET); if (urlComponent.getIncludeParams() != null) { includeParams = urlComponent.findString(urlComponent.getIncludeParams()); } if (UrlProvider.NONE.equalsIgnoreCase(includeParams)) { mergeRequestParameters(urlComponent.getValue(), urlComponent.getParameters(), Collections.<String, Object>emptyMap()); } else if (UrlProvider.ALL.equalsIgnoreCase(includeParams)) { mergeRequestParameters(urlComponent.getValue(), urlComponent.getParameters(), urlComponent.getHttpServletRequest().getParameterMap()); // for ALL also include GET parameters includeGetParameters(urlComponent); includeExtraParameters(urlComponent); } else if (UrlProvider.GET.equalsIgnoreCase(includeParams) || (includeParams == null && urlComponent.getValue() == null && urlComponent.getAction() == null)) { includeGetParameters(urlComponent); includeExtraParameters(urlComponent); } else if (includeParams != null) { LOG.warn("Unknown value for includeParams parameter to URL tag: {}", includeParams); } } catch (Exception e) { LOG.warn("Unable to put request parameters ({}) into parameter map.", urlComponent.getHttpServletRequest().getQueryString(), e); } } private void includeExtraParameters(UrlProvider urlComponent) { if (urlComponent.getExtraParameterProvider() != null) { mergeRequestParameters(urlComponent.getValue(), urlComponent.getParameters(), urlComponent.getExtraParameterProvider().getExtraParameters()); } } private void includeGetParameters(UrlProvider urlComponent) { String query = extractQueryString(urlComponent); mergeRequestParameters(urlComponent.getValue(), urlComponent.getParameters(), urlHelper.parseQueryString(query, false)); } private String extractQueryString(UrlProvider urlComponent) { // Parse the query string to make sure that the parameters come from the query, and not some posted data String query = urlComponent.getHttpServletRequest().getQueryString(); if (query == null) { query = (String) urlComponent.getHttpServletRequest().getAttribute("javax.servlet.forward.query_string"); } if (query != null) { // Remove possible #foobar suffix int idx = query.lastIndexOf('#'); if (idx != -1) { query = query.substring(0, idx); } } return query; } /** * Merge request parameters into current parameters. If a parameter is * already present, than the request parameter in the current request and value atrribute * will not override its value. * * The priority is as follows:- * <ul> * <li>parameter from the current request (least priority)</li> * <li>parameter form the value attribute (more priority)</li> * <li>parameter from the param tag (most priority)</li> * </ul> * * @param value the value attribute (url to be generated by this component) * @param parameters component parameters * @param contextParameters request parameters */ protected void mergeRequestParameters(String value, Map<String, Object> parameters, Map<String, Object> contextParameters) { Map<String, Object> mergedParams = new LinkedHashMap<>(contextParameters); // Merge contextParameters (from current request) with parameters specified in value attribute // eg. value="someAction.action?id=someId&venue=someVenue" // where the parameters specified in value attribute takes priority. if (StringUtils.contains(value, "?")) { String queryString = value.substring(value.indexOf("?") + 1); mergedParams = urlHelper.parseQueryString(queryString, false); for (Map.Entry<String, Object> entry : contextParameters.entrySet()) { if (!mergedParams.containsKey(entry.getKey())) { mergedParams.put(entry.getKey(), entry.getValue()); } } } // Merge parameters specified in value attribute // eg. value="someAction.action?id=someId&venue=someVenue" // with parameters specified though param tag // eg. <param name="id" value="%{'someId'}" /> // where parameters specified through param tag takes priority. for (Map.Entry<String, Object> entry : mergedParams.entrySet()) { if (!parameters.containsKey(entry.getKey())) { parameters.put(entry.getKey(), entry.getValue()); } } } }
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.profiler.context; import com.navercorp.pinpoint.bootstrap.context.AsyncTraceId; import com.navercorp.pinpoint.bootstrap.context.Trace; import com.navercorp.pinpoint.bootstrap.context.TraceContext; import com.navercorp.pinpoint.bootstrap.context.TraceId; import com.navercorp.pinpoint.bootstrap.sampler.Sampler; import com.navercorp.pinpoint.exception.PinpointException; import com.navercorp.pinpoint.profiler.context.storage.AsyncStorage; import com.navercorp.pinpoint.profiler.context.storage.Storage; import com.navercorp.pinpoint.profiler.context.storage.StorageFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author emeroad * @author Taejin Koo */ public class ThreadLocalTraceFactory implements TraceFactory { private final Logger logger = LoggerFactory.getLogger(this.getClass()); private final Binder<Trace> threadLocalBinder = new ThreadLocalBinder<Trace>(); private final TraceContext traceContext; private final StorageFactory storageFactory; private final Sampler sampler; private final IdGenerator idGenerator; public ThreadLocalTraceFactory(TraceContext traceContext, StorageFactory storageFactory, Sampler sampler, IdGenerator idGenerator) { if (traceContext == null) { throw new NullPointerException("traceContext must not be null"); } if (storageFactory == null) { throw new NullPointerException("storageFactory must not be null"); } if (sampler == null) { throw new NullPointerException("sampler must not be null"); } if (idGenerator == null) { throw new NullPointerException("idGenerator must not be null"); } this.traceContext = traceContext; this.storageFactory = storageFactory; this.sampler = sampler; this.idGenerator = idGenerator; } /** * Return Trace object AFTER validating whether it can be sampled or not. * @return Trace */ @Override public Trace currentTraceObject() { final Trace trace = threadLocalBinder.get(); if (trace == null) { return null; } if (trace.canSampled()) { return trace; } return null; } /** * Return Trace object without validating * @return */ @Override public Trace currentRpcTraceObject() { final Trace trace = threadLocalBinder.get(); if (trace == null) { return null; } return trace; } @Override public Trace currentRawTraceObject() { return threadLocalBinder.get(); } @Override public Trace disableSampling() { checkBeforeTraceObject(); final Trace metricTrace = new DisableTrace(this.idGenerator.nextContinuedDisabledId()); bind(metricTrace); return metricTrace; } // continue to trace the request that has been determined to be sampled on previous nodes @Override public Trace continueTraceObject(final TraceId traceId) { checkBeforeTraceObject(); // TODO need to modify how to bind a datasender // always set true because the decision of sampling has been made on previous nodes // TODO need to consider as a target to sample in case Trace object has a sampling flag (true) marked on previous node. final boolean sampling = true; final DefaultTrace trace = new DefaultTrace(traceContext, traceId, this.idGenerator.nextContinuedTransactionId(), sampling); // final Storage storage = storageFactory.createStorage(); final Storage storage = storageFactory.createStorage(); trace.setStorage(storage); bind(trace); return trace; } @Override public Trace continueTraceObject(Trace trace) { checkBeforeTraceObject(); bind(trace); return trace; } private void checkBeforeTraceObject() { final Trace old = this.threadLocalBinder.get(); if (old != null) { final PinpointException exception = new PinpointException("already Trace Object exist."); if (logger.isWarnEnabled()) { logger.warn("beforeTrace:{}", old, exception); } throw exception; } } @Override public Trace newTraceObject() { checkBeforeTraceObject(); // TODO need to modify how to inject a datasender final boolean sampling = sampler.isSampling(); if (sampling) { final DefaultTrace trace = new DefaultTrace(traceContext, idGenerator.nextTransactionId(), sampling); final Storage storage = storageFactory.createStorage(); trace.setStorage(storage); bind(trace); return trace; } else { final DisableTrace disableTrace = new DisableTrace(this.idGenerator.nextDisabledId()); bind(disableTrace); return disableTrace; } } private void bind(Trace trace) { threadLocalBinder.set(trace); // // TODO traceChain example // Trace traceChain = new TraceChain(trace); // threadLocalBinder.set(traceChain); // // // MetricTraceFactory // final Trace delegatedTrace = this.delegate.newTraceObject(); // if (delegatedTrace instanceof TraceChain) { // TraceChain chain = (TraceChain)delegatedTrace; // TraceWrap metricTrace = new MetricTraceWrap(); // // add metricTraceWrap to traceChain // chain.addFirst(metricTrace); // return chain; // } else { // logger.warn("error???"); // return delegatedTrace; // } } @Override public Trace removeTraceObject() { return this.threadLocalBinder.remove(); } public Trace continueAsyncTraceObject(AsyncTraceId traceId, int asyncId, long startTime) { checkBeforeTraceObject(); final TraceId parentTraceId = traceId.getParentTraceId(); final boolean sampling = true; final DefaultTrace trace = new DefaultTrace(traceContext, parentTraceId, IdGenerator.UNTRACKED_ID, sampling); final Storage storage = storageFactory.createStorage(); trace.setStorage(new AsyncStorage(storage)); final AsyncTrace asyncTrace = new AsyncTrace(trace, asyncId, traceId.nextAsyncSequence(), startTime); bind(asyncTrace); return asyncTrace; } }
/* * Licensed under MIT (https://github.com/ligoj/ligoj/blob/master/LICENSE) */ package org.ligoj.app.plugin.prov.catalog; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.ligoj.app.model.Node; import org.ligoj.app.plugin.prov.ProvResource; import org.ligoj.app.plugin.prov.model.ProvContainerPrice; import org.ligoj.app.plugin.prov.model.ProvContainerType; import org.ligoj.app.plugin.prov.model.ProvDatabasePrice; import org.ligoj.app.plugin.prov.model.ProvDatabaseType; import org.ligoj.app.plugin.prov.model.ProvFunctionPrice; import org.ligoj.app.plugin.prov.model.ProvFunctionType; import org.ligoj.app.plugin.prov.model.ProvInstancePrice; import org.ligoj.app.plugin.prov.model.ProvInstancePriceTerm; import org.ligoj.app.plugin.prov.model.ProvInstanceType; import org.ligoj.app.plugin.prov.model.ProvLocation; import org.ligoj.app.plugin.prov.model.ProvStoragePrice; import org.ligoj.app.plugin.prov.model.ProvStorageType; import org.ligoj.app.plugin.prov.model.ProvSupportPrice; import org.ligoj.app.plugin.prov.model.ProvSupportType; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; /** * Base context used to perform catalog update. */ @NoArgsConstructor public abstract class AbstractUpdateContext { /** * The related AWS {@link Node} */ @Getter @Setter protected Node node; /** * When <code>true</code>, all cost attributes are update. */ @Getter @Setter private boolean force; /** * Mapping from API region identifier to region definition. */ @Getter private Map<String, ProvLocation> mapRegionById = new HashMap<>(); /** * The previously installed instance types. Key is the instance code. */ @Getter @Setter protected Map<String, ProvInstanceType> instanceTypes = new ConcurrentHashMap<>(); /** * The previously installed container types. Key is the container code. */ @Getter @Setter protected Map<String, ProvContainerType> containerTypes = new ConcurrentHashMap<>(); /** * The previously installed function types. Key is the function code. */ @Getter @Setter protected Map<String, ProvFunctionType> functionTypes = new ConcurrentHashMap<>(); /** * The previously installed support types. Key is the support name. */ @Getter @Setter private Map<String, ProvSupportType> supportTypes = new HashMap<>(); /** * The previously installed database types. Key is the database code. */ @Getter @Setter protected Map<String, ProvDatabaseType> databaseTypes = new ConcurrentHashMap<>(); /** * The previously installed price term's codes. */ @Getter @Setter protected Map<String, ProvInstancePriceTerm> priceTerms = new ConcurrentHashMap<>(); /** * The previous installed instance prices. Key is the code. */ @Getter private Map<String, ProvInstancePrice> previous = new HashMap<>(); /** * The read catalog price codes: codes having been read from the catalog and persisted. */ @Getter private final Set<String> prices = new HashSet<>(); /** * The previous installed Database prices. Key is the code. */ @Getter private Map<String, ProvDatabasePrice> previousDatabase = new HashMap<>(); /** * The previous installed container prices. Key is the code. */ @Getter private Map<String, ProvContainerPrice> previousContainer = new HashMap<>(); /** * The previous installed function prices. Key is the code. */ @Getter private Map<String, ProvFunctionPrice> previousFunction = new HashMap<>(); /** * The previous installed storage prices. Key is the code. */ @Getter @Setter private Map<String, ProvStoragePrice> previousStorage = new HashMap<>(); /** * The previous installed support prices. Key is the name. */ @Getter @Setter private Map<String, ProvSupportPrice> previousSupport = new HashMap<>(); /** * The available regions. Key is the name. */ @Getter @Setter private Map<String, ProvLocation> regions = new HashMap<>(); /** * The merged type's codes. */ @Getter private Set<String> mergedTypes = Collections.synchronizedSet(new HashSet<>()); /** * The merged term's codes. */ @Getter private Set<String> mergedTerms = Collections.synchronizedSet(new HashSet<>()); /** * The merged location's codes. */ @Getter private Set<String> mergedLocations = Collections.synchronizedSet(new HashSet<>()); /** * The accepted and existing storage type. Key is the code. */ @Getter @Setter private Map<String, ProvStorageType> storageTypes = new ConcurrentHashMap<>(); /** * Valid OS pattern. */ @Getter @Setter private Pattern validOs; /** * Valid instance type pattern. */ @Getter @Setter private Pattern validInstanceType; /** * Valid database type pattern. */ @Getter @Setter private Pattern validDatabaseType; /** * Valid database type pattern. */ @Getter @Setter private Pattern validContainerType; /** * Valid database engine pattern. */ @Getter @Setter private Pattern validDatabaseEngine; /** * Valid instance region pattern. */ @Getter @Setter private Pattern validRegion; /** * Base URL of catalog. */ @Getter @Setter private String baseUrl; /** * Hours per month. */ @Getter @Setter private double hoursMonth = (double) ProvResource.DEFAULT_HOURS_MONTH; protected AbstractUpdateContext(AbstractUpdateContext parent) { this(); setForce(parent.isForce()); setHoursMonth(parent.getHoursMonth()); setNode(parent.getNode()); this.regions = parent.regions; this.baseUrl = parent.baseUrl; this.mergedTypes = parent.mergedTypes; this.mergedTerms = parent.mergedTerms; this.mergedLocations = parent.mergedLocations; this.storageTypes = parent.storageTypes; this.priceTerms = parent.priceTerms; this.validDatabaseEngine = parent.validDatabaseEngine; this.validDatabaseType = parent.validDatabaseType; this.validContainerType = parent.validContainerType; this.validInstanceType = parent.validInstanceType; this.validRegion = parent.validRegion; this.validOs = parent.validOs; this.mapRegionById = parent.getMapRegionById(); } public void setPrevious(final Map<String, ProvInstancePrice> previous) { this.previous = previous; this.previousDatabase.clear(); this.previousContainer.clear(); this.previousFunction.clear(); this.prices.clear(); } public void setPreviousDatabase(final Map<String, ProvDatabasePrice> previous) { this.previous.clear(); this.previousDatabase = previous; this.previousContainer.clear(); this.previousFunction.clear(); this.prices.clear(); } public void setPreviousContainer(final Map<String, ProvContainerPrice> previous) { this.previousContainer = previous; this.previousDatabase.clear(); this.previousFunction.clear(); this.previous.clear(); this.prices.clear(); } public void setPreviousFunction(final Map<String, ProvFunctionPrice> previous) { this.previousFunction = previous; this.previousContainer.clear(); this.previousDatabase.clear(); this.previous.clear(); this.prices.clear(); } /** * Release pointers. */ public void cleanup() { this.prices.clear(); this.mergedTypes.clear(); this.mergedTerms.clear(); this.mergedLocations.clear(); this.storageTypes.clear(); this.supportTypes.clear(); this.storageTypes.clear(); this.previous.clear(); this.databaseTypes.clear(); this.containerTypes.clear(); this.functionTypes.clear(); this.instanceTypes.clear(); this.previousStorage.clear(); this.previousDatabase.clear(); this.previousContainer.clear(); this.previousFunction.clear(); this.previousSupport.clear(); this.priceTerms.clear(); } /** * Return the full URL based on the base URL of this context. * * @param relative URL. <code>/</code> is prepended if missing. * @return The full URL based on the base URL of this context. */ public String getUrl(final String relative) { return baseUrl + StringUtils.prependIfMissing(relative, "/"); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.cluster; import com.carrotsearch.hppc.IntOpenHashSet; import com.carrotsearch.hppc.ObjectContainer; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.google.common.collect.Lists; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction; import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.*; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.index.IndexShardAlreadyExistsException; import org.elasticsearch.index.IndexShardMissingException; import org.elasticsearch.index.aliases.IndexAlias; import org.elasticsearch.index.aliases.IndexAliasesService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.gateway.IndexShardGatewayRecoveryException; import org.elasticsearch.index.gateway.IndexShardGatewayService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.service.IndexService; import org.elasticsearch.index.settings.IndexSettingsService; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.service.IndexShard; import org.elasticsearch.index.shard.service.InternalIndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.*; import org.elasticsearch.threadpool.ThreadPool; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicLong; import static com.google.common.collect.Maps.newHashMap; import static org.elasticsearch.ExceptionsHelper.detailedMessage; /** * */ public class IndicesClusterStateService extends AbstractLifecycleComponent<IndicesClusterStateService> implements ClusterStateListener { private final IndicesService indicesService; private final ClusterService clusterService; private final ThreadPool threadPool; private final RecoveryTarget recoveryTarget; private final ShardStateAction shardStateAction; private final NodeIndexDeletedAction nodeIndexDeletedAction; private final NodeMappingRefreshAction nodeMappingRefreshAction; // a map of mappings type we have seen per index due to cluster state // we need this so we won't remove types automatically created as part of the indexing process private final ConcurrentMap<Tuple<String, String>, Boolean> seenMappings = ConcurrentCollections.newConcurrentMap(); // a list of shards that failed during recovery // we keep track of these shards in order to prevent repeated recovery of these shards on each cluster state update private final ConcurrentMap<ShardId, FailedShard> failedShards = ConcurrentCollections.newConcurrentMap(); static class FailedShard { public final long version; public final long timestamp; FailedShard(long version) { this.version = version; this.timestamp = System.currentTimeMillis(); } } private final Object mutex = new Object(); private final FailedEngineHandler failedEngineHandler = new FailedEngineHandler(); private final boolean sendRefreshMapping; private final AtomicLong recoveryIdGenerator = new AtomicLong(); @Inject public IndicesClusterStateService(Settings settings, IndicesService indicesService, ClusterService clusterService, ThreadPool threadPool, RecoveryTarget recoveryTarget, ShardStateAction shardStateAction, NodeIndexDeletedAction nodeIndexDeletedAction, NodeMappingRefreshAction nodeMappingRefreshAction) { super(settings); this.indicesService = indicesService; this.clusterService = clusterService; this.threadPool = threadPool; this.recoveryTarget = recoveryTarget; this.shardStateAction = shardStateAction; this.nodeIndexDeletedAction = nodeIndexDeletedAction; this.nodeMappingRefreshAction = nodeMappingRefreshAction; this.sendRefreshMapping = componentSettings.getAsBoolean("send_refresh_mapping", true); } @Override protected void doStart() throws ElasticsearchException { clusterService.addFirst(this); } @Override protected void doStop() throws ElasticsearchException { clusterService.remove(this); } @Override protected void doClose() throws ElasticsearchException { } @Override public void clusterChanged(final ClusterChangedEvent event) { if (!indicesService.changesAllowed()) { return; } if (!lifecycle.started()) { return; } synchronized (mutex) { // we need to clean the shards and indices we have on this node, since we // are going to recover them again once state persistence is disabled (no master / not recovered) // TODO: this feels a bit hacky here, a block disables state persistence, and then we clean the allocated shards, maybe another flag in blocks? if (event.state().blocks().disableStatePersistence()) { for (final String index : indicesService.indices()) { IndexService indexService = indicesService.indexService(index); for (Integer shardId : indexService.shardIds()) { logger.debug("[{}][{}] removing shard (disabled block persistence)", index, shardId); try { indexService.removeShard(shardId, "removing shard (disabled block persistence)"); } catch (Throwable e) { logger.warn("[{}] failed to remove shard (disabled block persistence)", e, index); } } removeIndex(index, "cleaning index (disabled block persistence)"); } return; } cleanFailedShards(event); cleanMismatchedIndexUUIDs(event); applyNewIndices(event); applyMappings(event); applyAliases(event); applyNewOrUpdatedShards(event); applyDeletedIndices(event); applyDeletedShards(event); applyCleanedIndices(event); applySettings(event); sendIndexLifecycleEvents(event); } } private void sendIndexLifecycleEvents(final ClusterChangedEvent event) { String localNodeId = event.state().nodes().localNodeId(); assert localNodeId != null; for (String index : event.indicesDeleted()) { try { nodeIndexDeletedAction.nodeIndexDeleted(event.state(), index, localNodeId); } catch (Throwable e) { logger.debug("failed to send to master index {} deleted event", e, index); } } } private void cleanMismatchedIndexUUIDs(final ClusterChangedEvent event) { for (IndexService indexService : indicesService) { IndexMetaData indexMetaData = event.state().metaData().index(indexService.index().name()); if (indexMetaData == null) { // got deleted on us, will be deleted later continue; } if (!indexMetaData.isSameUUID(indexService.indexUUID())) { logger.debug("[{}] mismatch on index UUIDs between cluster state and local state, cleaning the index so it will be recreated", indexMetaData.index()); removeIndex(indexMetaData.index(), "mismatch on index UUIDs between cluster state and local state, cleaning the index so it will be recreated"); } } } private void applyCleanedIndices(final ClusterChangedEvent event) { // handle closed indices, since they are not allocated on a node once they are closed // so applyDeletedIndices might not take them into account for (final String index : indicesService.indices()) { IndexMetaData indexMetaData = event.state().metaData().index(index); if (indexMetaData != null && indexMetaData.state() == IndexMetaData.State.CLOSE) { IndexService indexService = indicesService.indexService(index); for (Integer shardId : indexService.shardIds()) { logger.debug("[{}][{}] removing shard (index is closed)", index, shardId); try { indexService.removeShard(shardId, "removing shard (index is closed)"); } catch (Throwable e) { logger.warn("[{}] failed to remove shard (index is closed)", e, index); } } } } for (final String index : indicesService.indices()) { if (indicesService.indexService(index).shardIds().isEmpty()) { if (logger.isDebugEnabled()) { logger.debug("[{}] cleaning index (no shards allocated)", index); } // clean the index removeIndex(index, "removing index (no shards allocated)"); } } } private void applyDeletedIndices(final ClusterChangedEvent event) { for (final String index : indicesService.indices()) { if (!event.state().metaData().hasIndex(index)) { if (logger.isDebugEnabled()) { logger.debug("[{}] cleaning index, no longer part of the metadata", index); } removeIndex(index, "index no longer part of the metadata"); } } } private void applyDeletedShards(final ClusterChangedEvent event) { RoutingNodes.RoutingNodeIterator routingNode = event.state().readOnlyRoutingNodes().routingNodeIter(event.state().nodes().localNodeId()); if (routingNode == null) { return; } IntOpenHashSet newShardIds = new IntOpenHashSet(); for (IndexService indexService : indicesService) { String index = indexService.index().name(); IndexMetaData indexMetaData = event.state().metaData().index(index); if (indexMetaData == null) { continue; } // now, go over and delete shards that needs to get deleted newShardIds.clear(); for (MutableShardRouting shard : routingNode) { if (shard.index().equals(index)) { newShardIds.add(shard.id()); } } for (Integer existingShardId : indexService.shardIds()) { if (!newShardIds.contains(existingShardId)) { if (indexMetaData.state() == IndexMetaData.State.CLOSE) { if (logger.isDebugEnabled()) { logger.debug("[{}][{}] removing shard (index is closed)", index, existingShardId); } indexService.removeShard(existingShardId, "removing shard (index is closed)"); } else { // we can just remove the shard, without cleaning it locally, since we will clean it // when all shards are allocated in the IndicesStore if (logger.isDebugEnabled()) { logger.debug("[{}][{}] removing shard (not allocated)", index, existingShardId); } indexService.removeShard(existingShardId, "removing shard (not allocated)"); } } } } } private void applyNewIndices(final ClusterChangedEvent event) { // we only create indices for shards that are allocated RoutingNodes.RoutingNodeIterator routingNode = event.state().readOnlyRoutingNodes().routingNodeIter(event.state().nodes().localNodeId()); if (routingNode == null) { return; } for (MutableShardRouting shard : routingNode) { if (!indicesService.hasIndex(shard.index())) { final IndexMetaData indexMetaData = event.state().metaData().index(shard.index()); if (logger.isDebugEnabled()) { logger.debug("[{}] creating index", indexMetaData.index()); } indicesService.createIndex(indexMetaData.index(), indexMetaData.settings(), event.state().nodes().localNode().id()); } } } private void applySettings(ClusterChangedEvent event) { if (!event.metaDataChanged()) { return; } for (IndexMetaData indexMetaData : event.state().metaData()) { if (!indicesService.hasIndex(indexMetaData.index())) { // we only create / update here continue; } // if the index meta data didn't change, no need check for refreshed settings if (!event.indexMetaDataChanged(indexMetaData)) { continue; } String index = indexMetaData.index(); IndexService indexService = indicesService.indexServiceSafe(index); IndexSettingsService indexSettingsService = indexService.injector().getInstance(IndexSettingsService.class); indexSettingsService.refreshSettings(indexMetaData.settings()); } } private void applyMappings(ClusterChangedEvent event) { // go over and update mappings for (IndexMetaData indexMetaData : event.state().metaData()) { if (!indicesService.hasIndex(indexMetaData.index())) { // we only create / update here continue; } List<String> typesToRefresh = null; String index = indexMetaData.index(); IndexService indexService = indicesService.indexService(index); if (indexService == null) { // got deleted on us, ignore (closing the node) return; } MapperService mapperService = indexService.mapperService(); // first, go over and update the _default_ mapping (if exists) if (indexMetaData.mappings().containsKey(MapperService.DEFAULT_MAPPING)) { processMapping(index, mapperService, MapperService.DEFAULT_MAPPING, indexMetaData.mapping(MapperService.DEFAULT_MAPPING).source()); } // go over and add the relevant mappings (or update them) for (ObjectCursor<MappingMetaData> cursor : indexMetaData.mappings().values()) { MappingMetaData mappingMd = cursor.value; String mappingType = mappingMd.type(); CompressedString mappingSource = mappingMd.source(); if (mappingType.equals(MapperService.DEFAULT_MAPPING)) { // we processed _default_ first continue; } boolean requireRefresh = processMapping(index, mapperService, mappingType, mappingSource); if (requireRefresh) { if (typesToRefresh == null) { typesToRefresh = Lists.newArrayList(); } typesToRefresh.add(mappingType); } } if (typesToRefresh != null) { if (sendRefreshMapping) { nodeMappingRefreshAction.nodeMappingRefresh(event.state(), new NodeMappingRefreshAction.NodeMappingRefreshRequest(index, indexMetaData.uuid(), typesToRefresh.toArray(new String[typesToRefresh.size()]), event.state().nodes().localNodeId()) ); } } // go over and remove mappings for (DocumentMapper documentMapper : mapperService) { if (seenMappings.containsKey(new Tuple<>(index, documentMapper.type())) && !indexMetaData.mappings().containsKey(documentMapper.type())) { // we have it in our mappings, but not in the metadata, and we have seen it in the cluster state, remove it mapperService.remove(documentMapper.type()); seenMappings.remove(new Tuple<>(index, documentMapper.type())); } } } } private boolean processMapping(String index, MapperService mapperService, String mappingType, CompressedString mappingSource) { if (!seenMappings.containsKey(new Tuple<>(index, mappingType))) { seenMappings.put(new Tuple<>(index, mappingType), true); } // refresh mapping can happen for 2 reasons. The first is less urgent, and happens when the mapping on this // node is ahead of what there is in the cluster state (yet an update-mapping has been sent to it already, // it just hasn't been processed yet and published). Eventually, the mappings will converge, and the refresh // mapping sent is more of a safe keeping (assuming the update mapping failed to reach the master, ...) // the second case is where the parsing/merging of the mapping from the metadata doesn't result in the same // mapping, in this case, we send to the master to refresh its own version of the mappings (to conform with the // merge version of it, which it does when refreshing the mappings), and warn log it. boolean requiresRefresh = false; try { if (!mapperService.hasMapping(mappingType)) { if (logger.isDebugEnabled()) { logger.debug("[{}] adding mapping [{}], source [{}]", index, mappingType, mappingSource.string()); } // we don't apply default, since it has been applied when the mappings were parsed initially mapperService.merge(mappingType, mappingSource, false); if (!mapperService.documentMapper(mappingType).mappingSource().equals(mappingSource)) { logger.debug("[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}", index, mappingType, mappingSource, mapperService.documentMapper(mappingType).mappingSource()); requiresRefresh = true; } } else { DocumentMapper existingMapper = mapperService.documentMapper(mappingType); if (!mappingSource.equals(existingMapper.mappingSource())) { // mapping changed, update it if (logger.isDebugEnabled()) { logger.debug("[{}] updating mapping [{}], source [{}]", index, mappingType, mappingSource.string()); } // we don't apply default, since it has been applied when the mappings were parsed initially mapperService.merge(mappingType, mappingSource, false); if (!mapperService.documentMapper(mappingType).mappingSource().equals(mappingSource)) { requiresRefresh = true; logger.debug("[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}", index, mappingType, mappingSource, mapperService.documentMapper(mappingType).mappingSource()); } } } } catch (Throwable e) { logger.warn("[{}] failed to add mapping [{}], source [{}]", e, index, mappingType, mappingSource); } return requiresRefresh; } private boolean aliasesChanged(ClusterChangedEvent event) { return !event.state().metaData().aliases().equals(event.previousState().metaData().aliases()) || !event.state().routingTable().equals(event.previousState().routingTable()); } private void applyAliases(ClusterChangedEvent event) { // check if aliases changed if (aliasesChanged(event)) { // go over and update aliases for (IndexMetaData indexMetaData : event.state().metaData()) { if (!indicesService.hasIndex(indexMetaData.index())) { // we only create / update here continue; } String index = indexMetaData.index(); IndexService indexService = indicesService.indexService(index); IndexAliasesService indexAliasesService = indexService.aliasesService(); processAliases(index, indexMetaData.aliases().values(), indexAliasesService); // go over and remove aliases for (IndexAlias indexAlias : indexAliasesService) { if (!indexMetaData.aliases().containsKey(indexAlias.alias())) { // we have it in our aliases, but not in the metadata, remove it indexAliasesService.remove(indexAlias.alias()); } } } } } private void processAliases(String index, ObjectContainer<AliasMetaData> aliases, IndexAliasesService indexAliasesService) { HashMap<String, IndexAlias> newAliases = newHashMap(); for (ObjectCursor<AliasMetaData> cursor : aliases) { AliasMetaData aliasMd = cursor.value; String alias = aliasMd.alias(); CompressedString filter = aliasMd.filter(); try { if (!indexAliasesService.hasAlias(alias)) { if (logger.isDebugEnabled()) { logger.debug("[{}] adding alias [{}], filter [{}]", index, alias, filter); } newAliases.put(alias, indexAliasesService.create(alias, filter)); } else { if ((filter == null && indexAliasesService.alias(alias).filter() != null) || (filter != null && !filter.equals(indexAliasesService.alias(alias).filter()))) { if (logger.isDebugEnabled()) { logger.debug("[{}] updating alias [{}], filter [{}]", index, alias, filter); } newAliases.put(alias, indexAliasesService.create(alias, filter)); } } } catch (Throwable e) { logger.warn("[{}] failed to add alias [{}], filter [{}]", e, index, alias, filter); } } indexAliasesService.addAll(newAliases); } private void applyNewOrUpdatedShards(final ClusterChangedEvent event) throws ElasticsearchException { if (!indicesService.changesAllowed()) { return; } RoutingTable routingTable = event.state().routingTable(); RoutingNodes.RoutingNodeIterator routingNode = event.state().readOnlyRoutingNodes().routingNodeIter(event.state().nodes().localNodeId()); if (routingNode == null) { failedShards.clear(); return; } DiscoveryNodes nodes = event.state().nodes(); for (final ShardRouting shardRouting : routingNode) { final IndexService indexService = indicesService.indexService(shardRouting.index()); if (indexService == null) { // got deleted on us, ignore continue; } final IndexMetaData indexMetaData = event.state().metaData().index(shardRouting.index()); if (indexMetaData == null) { // the index got deleted on the metadata, we will clean it later in the apply deleted method call continue; } final int shardId = shardRouting.id(); if (!indexService.hasShard(shardId) && shardRouting.started()) { if (!failedShards.containsKey(shardRouting.shardId())) { // the master thinks we are started, but we don't have this shard at all, mark it as failed logger.warn("[{}][{}] master [{}] marked shard as started, but shard has not been created, mark shard as failed", shardRouting.index(), shardId, nodes.masterNode()); failedShards.put(shardRouting.shardId(), new FailedShard(shardRouting.version())); if (nodes.masterNode() != null) { shardStateAction.shardFailed(shardRouting, indexMetaData.getUUID(), "master " + nodes.masterNode() + " marked shard as started, but shard has not been created, mark shard as failed", nodes.masterNode() ); } } continue; } if (indexService.hasShard(shardId)) { InternalIndexShard indexShard = (InternalIndexShard) indexService.shard(shardId); ShardRouting currentRoutingEntry = indexShard.routingEntry(); // if the current and global routing are initializing, but are still not the same, its a different "shard" being allocated // for example: a shard that recovers from one node and now needs to recover to another node, // or a replica allocated and then allocating a primary because the primary failed on another node if (currentRoutingEntry.initializing() && shardRouting.initializing() && !currentRoutingEntry.equals(shardRouting)) { logger.debug("[{}][{}] removing shard (different instance of it allocated on this node, current [{}], global [{}])", shardRouting.index(), shardRouting.id(), currentRoutingEntry, shardRouting); // cancel recovery just in case we are in recovery (its fine if we are not in recovery, it will be a noop). recoveryTarget.cancelRecovery(indexShard); indexService.removeShard(shardRouting.id(), "removing shard (different instance of it allocated on this node)"); } } if (indexService.hasShard(shardId)) { InternalIndexShard indexShard = (InternalIndexShard) indexService.shard(shardId); if (!shardRouting.equals(indexShard.routingEntry())) { indexShard.routingEntry(shardRouting); indexService.shardInjector(shardId).getInstance(IndexShardGatewayService.class).routingStateChanged(); } } if (shardRouting.initializing()) { applyInitializingShard(routingTable, nodes, indexMetaData, routingTable.index(shardRouting.index()).shard(shardRouting.id()), shardRouting); } } } private void cleanFailedShards(final ClusterChangedEvent event) { RoutingTable routingTable = event.state().routingTable(); RoutingNodes.RoutingNodeIterator routingNode = event.state().readOnlyRoutingNodes().routingNodeIter(event.state().nodes().localNodeId()); if (routingNode == null) { failedShards.clear(); return; } DiscoveryNodes nodes = event.state().nodes(); long now = System.currentTimeMillis(); String localNodeId = nodes.localNodeId(); Iterator<Map.Entry<ShardId, FailedShard>> iterator = failedShards.entrySet().iterator(); shards: while (iterator.hasNext()) { Map.Entry<ShardId, FailedShard> entry = iterator.next(); FailedShard failedShard = entry.getValue(); IndexRoutingTable indexRoutingTable = routingTable.index(entry.getKey().getIndex()); if (indexRoutingTable != null) { IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(entry.getKey().id()); if (shardRoutingTable != null) { for (ShardRouting shardRouting : shardRoutingTable.assignedShards()) { if (localNodeId.equals(shardRouting.currentNodeId())) { // we have a timeout here just to make sure we don't have dangled failed shards for some reason // its just another safely layer if (shardRouting.version() == failedShard.version && ((now - failedShard.timestamp) < TimeValue.timeValueMinutes(60).millis())) { // It's the same failed shard - keep it if it hasn't timed out continue shards; } else { // Different version or expired, remove it break; } } } } } iterator.remove(); } } private void applyInitializingShard(final RoutingTable routingTable, final DiscoveryNodes nodes, final IndexMetaData indexMetaData, final IndexShardRoutingTable indexShardRouting, final ShardRouting shardRouting) throws ElasticsearchException { final IndexService indexService = indicesService.indexService(shardRouting.index()); if (indexService == null) { // got deleted on us, ignore return; } final int shardId = shardRouting.id(); if (indexService.hasShard(shardId)) { IndexShard indexShard = indexService.shardSafe(shardId); if (indexShard.state() == IndexShardState.STARTED || indexShard.state() == IndexShardState.POST_RECOVERY) { // the master thinks we are initializing, but we are already started or on POST_RECOVERY and waiting // for master to confirm a shard started message (either master failover, or a cluster event before // we managed to tell the master we started), mark us as started if (logger.isTraceEnabled()) { logger.trace("{} master marked shard as initializing, but shard has state [{}], resending shard started to {}", indexShard.shardId(), indexShard.state(), nodes.masterNode()); } if (nodes.masterNode() != null) { shardStateAction.shardStarted(shardRouting, indexMetaData.getUUID(), "master " + nodes.masterNode() + " marked shard as initializing, but shard state is [" + indexShard.state() + "], mark shard as started", nodes.masterNode()); } return; } else { if (indexShard.ignoreRecoveryAttempt()) { logger.trace("ignoring recovery instruction for an existing shard {} (shard state: [{}])", indexShard.shardId(), indexShard.state()); return; } } } // figure out where to recover from (node or disk, in which case sourceNode is null) DiscoveryNode sourceNode = null; if (!shardRouting.primary()) { IndexShardRoutingTable shardRoutingTable = routingTable.index(shardRouting.index()).shard(shardRouting.id()); for (ShardRouting entry : shardRoutingTable) { if (entry.primary() && entry.started()) { // only recover from started primary, if we can't find one, we will do it next round sourceNode = nodes.get(entry.currentNodeId()); if (sourceNode == null) { logger.trace("can't recover replica because primary shard {} is assigned to an unknown node. ignoring.", entry); return; } break; } } if (sourceNode == null) { logger.trace("can't recover replica for {} because a primary shard can not be found. ignoring.", shardRouting.shardId()); return; } } else if (shardRouting.relocatingNodeId() != null) { sourceNode = nodes.get(shardRouting.relocatingNodeId()); if (sourceNode == null) { logger.trace("can't recover from remote primary shard {} because it is assigned to an unknown node [{}]. ignoring.", shardRouting.shardId(), shardRouting.relocatingNodeId()); return; } } // if there is no shard, create it if (!indexService.hasShard(shardId)) { if (failedShards.containsKey(shardRouting.shardId())) { // already tried to create this shard but it failed - ignore logger.trace("[{}][{}] not initializing, this shards failed to recover on this node before, waiting for reassignment", shardRouting.index(), shardRouting.id()); return; } try { if (logger.isDebugEnabled()) { logger.debug("[{}][{}] creating shard", shardRouting.index(), shardId); } InternalIndexShard indexShard = (InternalIndexShard) indexService.createShard(shardId); indexShard.routingEntry(shardRouting); indexShard.engine().addFailedEngineListener(failedEngineHandler); } catch (IndexShardAlreadyExistsException e) { // ignore this, the method call can happen several times } catch (Throwable e) { logger.warn("[{}][{}] failed to create shard", e, shardRouting.index(), shardRouting.id()); try { indexService.removeShard(shardId, "failed to create [" + ExceptionsHelper.detailedMessage(e) + "]"); } catch (IndexShardMissingException e1) { // ignore } catch (Throwable e1) { logger.warn("[{}][{}] failed to remove shard after failed creation", e1, shardRouting.index(), shardRouting.id()); } failedShards.put(shardRouting.shardId(), new FailedShard(shardRouting.version())); if (nodes.masterNode() != null) { shardStateAction.shardFailed(shardRouting, indexMetaData.getUUID(), "Failed to create shard, message [" + detailedMessage(e) + "]", nodes.masterNode() ); } else { logger.debug("can't send shard failed for {} as there is no current master", shardRouting.shardId()); } return; } } final InternalIndexShard indexShard = (InternalIndexShard) indexService.shardSafe(shardId); if (indexShard.ignoreRecoveryAttempt()) { // we are already recovering (we can get to this state since the cluster event can happen several // times while we recover) logger.trace("ignoring recovery instruction for shard {} (shard state: [{}])", indexShard.shardId(), indexShard.state()); return; } if (sourceNode != null) { try { // we don't mark this one as relocated at the end. // For primaries: requests in any case are routed to both when its relocating and that way we handle // the edge case where its mark as relocated, and we might need to roll it back... // For replicas: we are recovering a backup from a primary RecoveryState.Type type = shardRouting.primary() ? RecoveryState.Type.RELOCATION : RecoveryState.Type.REPLICA; final StartRecoveryRequest request = new StartRecoveryRequest(indexShard.shardId(), sourceNode, nodes.localNode(), false, indexShard.store().list(), type, recoveryIdGenerator.incrementAndGet()); recoveryTarget.startRecovery(request, indexShard, new PeerRecoveryListener(request, shardRouting, indexService, indexMetaData)); } catch (Throwable e) { handleRecoveryFailure(indexService, indexMetaData, shardRouting, true, e); } } else { // we are the first primary, recover from the gateway // if its post api allocation, the index should exists boolean indexShouldExists = indexShardRouting.primaryAllocatedPostApi(); IndexShardGatewayService shardGatewayService = indexService.shardInjector(shardId).getInstance(IndexShardGatewayService.class); shardGatewayService.recover(indexShouldExists, new IndexShardGatewayService.RecoveryListener() { @Override public void onRecoveryDone() { shardStateAction.shardStarted(shardRouting, indexMetaData.getUUID(), "after recovery from gateway"); } @Override public void onIgnoreRecovery(String reason) { } @Override public void onRecoveryFailed(IndexShardGatewayRecoveryException e) { handleRecoveryFailure(indexService, indexMetaData, shardRouting, true, e); } }); } } private class PeerRecoveryListener implements RecoveryTarget.RecoveryListener { private final StartRecoveryRequest request; private final ShardRouting shardRouting; private final IndexService indexService; private final IndexMetaData indexMetaData; private PeerRecoveryListener(StartRecoveryRequest request, ShardRouting shardRouting, IndexService indexService, IndexMetaData indexMetaData) { this.request = request; this.shardRouting = shardRouting; this.indexService = indexService; this.indexMetaData = indexMetaData; } @Override public void onRecoveryDone() { shardStateAction.shardStarted(shardRouting, indexMetaData.getUUID(), "after recovery (replica) from node [" + request.sourceNode() + "]"); } @Override public void onRetryRecovery(TimeValue retryAfter, RecoveryStatus recoveryStatus) { recoveryTarget.retryRecovery(request, retryAfter, recoveryStatus, PeerRecoveryListener.this); } @Override public void onIgnoreRecovery(boolean removeShard, String reason) { if (!removeShard) { return; } synchronized (mutex) { if (indexService.hasShard(shardRouting.shardId().id())) { if (logger.isDebugEnabled()) { logger.debug("[{}][{}] removing shard on ignored recovery, reason [{}]", shardRouting.index(), shardRouting.shardId().id(), reason); } try { indexService.removeShard(shardRouting.shardId().id(), "ignore recovery: " + reason); } catch (IndexShardMissingException e) { // the node got closed on us, ignore it } catch (Throwable e1) { logger.warn("[{}][{}] failed to delete shard after ignore recovery", e1, indexService.index().name(), shardRouting.shardId().id()); } } } } @Override public void onRecoveryFailure(RecoveryFailedException e, boolean sendShardFailure) { handleRecoveryFailure(indexService, indexMetaData, shardRouting, sendShardFailure, e); } } private void handleRecoveryFailure(IndexService indexService, IndexMetaData indexMetaData, ShardRouting shardRouting, boolean sendShardFailure, Throwable failure) { logger.warn("[{}][{}] failed to start shard", failure, indexService.index().name(), shardRouting.shardId().id()); synchronized (mutex) { if (indexService.hasShard(shardRouting.shardId().id())) { try { indexService.removeShard(shardRouting.shardId().id(), "recovery failure [" + ExceptionsHelper.detailedMessage(failure) + "]"); } catch (IndexShardMissingException e) { // the node got closed on us, ignore it } catch (Throwable e1) { logger.warn("[{}][{}] failed to delete shard after failed startup", e1, indexService.index().name(), shardRouting.shardId().id()); } } if (sendShardFailure) { try { failedShards.put(shardRouting.shardId(), new FailedShard(shardRouting.version())); shardStateAction.shardFailed(shardRouting, indexMetaData.getUUID(), "Failed to start shard, message [" + detailedMessage(failure) + "]"); } catch (Throwable e1) { logger.warn("[{}][{}] failed to mark shard as failed after a failed start", e1, indexService.index().name(), shardRouting.id()); } } } } private void removeIndex(String index, String reason) { try { indicesService.removeIndex(index, reason); } catch (Throwable e) { logger.warn("failed to clean index ({})", e, reason); } // clear seen mappings as well for (Tuple<String, String> tuple : seenMappings.keySet()) { if (tuple.v1().equals(index)) { seenMappings.remove(tuple); } } } private class FailedEngineHandler implements Engine.FailedEngineListener { @Override public void onFailedEngine(final ShardId shardId, final String reason, final @Nullable Throwable failure) { ShardRouting shardRouting = null; final IndexService indexService = indicesService.indexService(shardId.index().name()); if (indexService != null) { IndexShard indexShard = indexService.shard(shardId.id()); if (indexShard != null) { shardRouting = indexShard.routingEntry(); } } if (shardRouting == null) { logger.warn("[{}][{}] engine failed, but can't find index shard. failure reason: [{}]", shardId.index().name(), shardId.id(), reason); return; } final ShardRouting fShardRouting = shardRouting; final String indexUUID = indexService.indexUUID(); // we know indexService is not null here. final String failureMessage = "engine failure, message [" + reason + "]" + (failure == null ? "" : "[" + detailedMessage(failure) + "]"); threadPool.generic().execute(new Runnable() { @Override public void run() { synchronized (mutex) { if (indexService.hasShard(shardId.id())) { try { indexService.removeShard(shardId.id(), failureMessage); } catch (IndexShardMissingException e) { // the node got closed on us, ignore it } catch (Throwable e1) { logger.warn("[{}][{}] failed to delete shard after failed engine ([{}])", e1, indexService.index().name(), shardId.id(), reason); } } try { failedShards.put(fShardRouting.shardId(), new FailedShard(fShardRouting.version())); shardStateAction.shardFailed(fShardRouting, indexUUID, failureMessage); } catch (Throwable e1) { logger.warn("[{}][{}] failed to mark shard as failed after a failed engine ([{}])", e1, indexService.index().name(), shardId.id(), reason); } } } }); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.mongodb.gridfs; import com.mongodb.DB; import com.mongodb.DBCollection; import com.mongodb.Mongo; import com.mongodb.MongoClient; import com.mongodb.ReadPreference; import com.mongodb.WriteConcern; import com.mongodb.gridfs.GridFS; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; import org.apache.camel.support.CamelContextHelper; import org.apache.camel.support.DefaultEndpoint; /** * Component for working with MongoDB GridFS. */ @UriEndpoint(firstVersion = "2.18.0", scheme = "mongodb-gridfs", title = "MongoDB GridFS", syntax = "mongodb-gridfs:connectionBean", label = "database,nosql") public class GridFsEndpoint extends DefaultEndpoint { public static final String GRIDFS_OPERATION = "gridfs.operation"; public static final String GRIDFS_METADATA = "gridfs.metadata"; public static final String GRIDFS_CHUNKSIZE = "gridfs.chunksize"; public static final String GRIDFS_FILE_ID_PRODUCED = "gridfs.fileid"; @UriPath @Metadata(required = true) private String connectionBean; @UriParam @Metadata(required = true) private String database; @UriParam(defaultValue = GridFS.DEFAULT_BUCKET) private String bucket; @UriParam(enums = "ACKNOWLEDGED,W1,W2,W3,UNACKNOWLEDGED,JOURNALED,MAJORITY,SAFE") private WriteConcern writeConcern; @UriParam private WriteConcern writeConcernRef; @UriParam private ReadPreference readPreference; @UriParam(label = "producer") private String operation; @UriParam(label = "consumer") private String query; @UriParam(label = "consumer", defaultValue = "1000") private long initialDelay = 1000; @UriParam(label = "consumer", defaultValue = "500") private long delay = 500; @UriParam(label = "consumer", defaultValue = "TimeStamp") private QueryStrategy queryStrategy = QueryStrategy.TimeStamp; @UriParam(label = "consumer", defaultValue = "camel-timestamps") private String persistentTSCollection = "camel-timestamps"; @UriParam(label = "consumer", defaultValue = "camel-timestamp") private String persistentTSObject = "camel-timestamp"; @UriParam(label = "consumer", defaultValue = "camel-processed") private String fileAttributeName = "camel-processed"; private Mongo mongoConnection; private DB db; private GridFS gridFs; private DBCollection filesCollection; public GridFsEndpoint(String uri, GridFsComponent component) { super(uri, component); } @Override public Producer createProducer() throws Exception { initializeConnection(); return new GridFsProducer(this); } @Override public Consumer createConsumer(Processor processor) throws Exception { initializeConnection(); return new GridFsConsumer(this, processor); } public void initializeConnection() throws Exception { log.info("Initialize GridFS endpoint: {}", this); if (database == null) { throw new IllegalStateException("Missing required endpoint configuration: database"); } db = mongoConnection.getDB(database); if (db == null) { throw new IllegalStateException("Could not initialize GridFsComponent. Database " + database + " does not exist."); } gridFs = new GridFS(db, bucket == null ? GridFS.DEFAULT_BUCKET : bucket) { { filesCollection = getFilesCollection(); } }; } @Override protected void doStart() throws Exception { if (writeConcern != null && writeConcernRef != null) { String msg = "Cannot set both writeConcern and writeConcernRef at the same time. Respective values: " + writeConcern + ", " + writeConcernRef + ". Aborting initialization."; throw new IllegalArgumentException(msg); } mongoConnection = CamelContextHelper.mandatoryLookup(getCamelContext(), connectionBean, MongoClient.class); log.debug("Resolved the connection with the name {} as {}", connectionBean, mongoConnection); setWriteReadOptionsOnConnection(); super.doStart(); } @Override protected void doStop() throws Exception { super.doStop(); if (mongoConnection != null) { log.debug("Closing connection"); mongoConnection.close(); } } private void setWriteReadOptionsOnConnection() { // Set the WriteConcern if (writeConcern != null) { mongoConnection.setWriteConcern(writeConcern); } else if (writeConcernRef != null) { mongoConnection.setWriteConcern(writeConcernRef); } // Set the ReadPreference if (readPreference != null) { mongoConnection.setReadPreference(readPreference); } } // ======= Getters and setters =============================================== public String getConnectionBean() { return connectionBean; } /** * Name of {@link com.mongodb.Mongo} to use. */ public void setConnectionBean(String connectionBean) { this.connectionBean = connectionBean; } public Mongo getMongoConnection() { return mongoConnection; } /** * Sets the Mongo instance that represents the backing connection * * @param mongoConnection the connection to the database */ public void setMongoConnection(Mongo mongoConnection) { this.mongoConnection = mongoConnection; } public DB getDB() { return db; } public String getDatabase() { return database; } /** * Sets the name of the MongoDB database to target * * @param database name of the MongoDB database */ public void setDatabase(String database) { this.database = database; } /** * Sets the name of the GridFS bucket within the database. Default is "fs". * * @param database name of the MongoDB database */ public String getBucket() { return bucket; } public void setBucket(String bucket) { this.bucket = bucket; } public String getQuery() { return query; } /** * Additional query parameters (in JSON) that are used to configure the query used for finding * files in the GridFsConsumer * @param query */ public void setQuery(String query) { this.query = query; } public long getDelay() { return delay; } /** * Sets the delay between polls within the Consumer. Default is 500ms * @param delay */ public void setDelay(long delay) { this.delay = delay; } public long getInitialDelay() { return initialDelay; } /** * Sets the initialDelay before the consumer will start polling. Default is 1000ms * @param initialDelay */ public void setInitialDelay(long initialDelay) { this.initialDelay = delay; } /** * Sets the QueryStrategy that is used for polling for new files. Default is Timestamp * @see QueryStrategy * @param s */ public void setQueryStrategy(String s) { queryStrategy = QueryStrategy.valueOf(s); } public QueryStrategy getQueryStrategy() { return queryStrategy; } /** * If the QueryType uses a persistent timestamp, this sets the name of the collection within * the DB to store the timestamp. * @param s */ public void setPersistentTSCollection(String s) { persistentTSCollection = s; } public String getPersistentTSCollection() { return persistentTSCollection; } /** * If the QueryType uses a persistent timestamp, this is the ID of the object in the collection * to store the timestamp. * @param s */ public void setPersistentTSObject(String id) { persistentTSObject = id; } public String getPersistentTSObject() { return persistentTSObject; } /** * If the QueryType uses a FileAttribute, this sets the name of the attribute that is used. Default is "camel-processed". * @param f */ public void setFileAttributeName(String f) { fileAttributeName = f; } public String getFileAttributeName() { return fileAttributeName; } /** * Set the {@link WriteConcern} for write operations on MongoDB using the standard ones. * Resolved from the fields of the WriteConcern class by calling the {@link WriteConcern#valueOf(String)} method. * * @param writeConcern the standard name of the WriteConcern * @see <a href="http://api.mongodb.org/java/current/com/mongodb/WriteConcern.html#valueOf(java.lang.String)">possible options</a> */ public void setWriteConcern(String writeConcern) { this.writeConcern = WriteConcern.valueOf(writeConcern); } public WriteConcern getWriteConcern() { return writeConcern; } /** * Set the {@link WriteConcern} for write operations on MongoDB, passing in the bean ref to a custom WriteConcern which exists in the Registry. * You can also use standard WriteConcerns by passing in their key. See the {@link #setWriteConcern(String) setWriteConcern} method. * * @param writeConcernRef the name of the bean in the registry that represents the WriteConcern to use */ public void setWriteConcernRef(String writeConcernRef) { WriteConcern wc = this.getCamelContext().getRegistry().lookupByNameAndType(writeConcernRef, WriteConcern.class); if (wc == null) { String msg = "Camel MongoDB component could not find the WriteConcern in the Registry. Verify that the " + "provided bean name (" + writeConcernRef + ") is correct. Aborting initialization."; throw new IllegalArgumentException(msg); } this.writeConcernRef = wc; } public WriteConcern getWriteConcernRef() { return writeConcernRef; } /** * Sets a MongoDB {@link ReadPreference} on the Mongo connection. Read preferences set directly on the connection will be * overridden by this setting. * <p/> * The {@link com.mongodb.ReadPreference#valueOf(String)} utility method is used to resolve the passed {@code readPreference} * value. Some examples for the possible values are {@code nearest}, {@code primary} or {@code secondary} etc. * * @param readPreference the name of the read preference to set */ public void setReadPreference(String readPreference) { this.readPreference = ReadPreference.valueOf(readPreference); } public ReadPreference getReadPreference() { return readPreference; } /** * Sets the operation this endpoint will execute against GridRS. */ public void setOperation(String operation) { this.operation = operation; } public String getOperation() { return operation; } public GridFS getGridFs() { return gridFs; } public void setGridFs(GridFS gridFs) { this.gridFs = gridFs; } public DBCollection getFilesCollection() { return filesCollection; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.util.ipc.shmem; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.net.URL; import java.nio.channels.FileLock; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.Collection; import java.util.jar.JarFile; import java.util.zip.ZipEntry; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.LT; import org.apache.ignite.internal.util.typedef.internal.U; import static org.apache.ignite.internal.IgniteVersionUtils.VER_STR; /** * Shared memory native loader. */ @SuppressWarnings("ErrorNotRethrown") public class IpcSharedMemoryNativeLoader { /** Library name base. */ private static final String LIB_NAME_BASE = "igniteshmem"; /** Library jar name base. */ private static final String JAR_NAME_BASE = "shmem"; /** Library name. */ static final String LIB_NAME = LIB_NAME_BASE + "-" + VER_STR; /** Loaded flag. */ private static volatile boolean loaded; /** * @return Operating system name to resolve path to library. */ private static String os() { String name = System.getProperty("os.name").toLowerCase().trim(); if (name.startsWith("win")) throw new IllegalStateException("IPC shared memory native loader should not be called on windows."); if (name.startsWith("linux")) return "linux"; if (name.startsWith("mac os x")) return "osx"; return name.replaceAll("\\W+", "_"); } /** * @return Platform. */ private static String platform() { return os() + bitModel(); } /** * @return Bit model. */ private static int bitModel() { String prop = System.getProperty("sun.arch.data.model"); if (prop == null) prop = System.getProperty("com.ibm.vm.bitmode"); if (prop != null) return Integer.parseInt(prop); // We don't know. return -1; } /** * @param log Logger, if available. If null, warnings will be printed out to console. * @throws IgniteCheckedException If failed. */ public static void load(IgniteLogger log) throws IgniteCheckedException { if (loaded) return; synchronized (IpcSharedMemoryNativeLoader.class) { if (loaded) return; doLoad(log); loaded = true; } } /** * @throws IgniteCheckedException If failed. */ private static void doLoad(IgniteLogger log) throws IgniteCheckedException { assert Thread.holdsLock(IpcSharedMemoryNativeLoader.class); Collection<Throwable> errs = new ArrayList<>(); try { // Load native library (the library directory should be in java.library.path). System.loadLibrary(LIB_NAME); return; } catch (UnsatisfiedLinkError e) { errs.add(e); } File tmpDir = getUserSpecificTempDir(); File lockFile = new File(tmpDir, "igniteshmem.lock"); // Obtain lock on file to prevent concurrent extracts. try (RandomAccessFile randomAccessFile = new RandomAccessFile(lockFile, "rws"); FileLock ignored = randomAccessFile.getChannel().lock()) { if (extractAndLoad(errs, tmpDir, platformSpecificResourcePath())) return; if (extractAndLoad(errs, tmpDir, osSpecificResourcePath())) return; if (extractAndLoad(errs, tmpDir, resourcePath())) return; try { if (log != null) LT.warn(log, "Failed to load 'igniteshmem' library from classpath. Will try to load it from IGNITE_HOME."); String igniteHome = X.resolveIgniteHome(); File shmemJar = findShmemJar(errs, igniteHome); if (shmemJar != null) { try (JarFile jar = new JarFile(shmemJar, false, JarFile.OPEN_READ)) { if (extractAndLoad(errs, jar, tmpDir, platformSpecificResourcePath())) return; if (extractAndLoad(errs, jar, tmpDir, osSpecificResourcePath())) return; if (extractAndLoad(errs, jar, tmpDir, resourcePath())) return; } } } catch (IgniteCheckedException ignore) { // No-op. } // Failed to find the library. assert !errs.isEmpty(); throw new IgniteCheckedException("Failed to load native IPC library: " + errs); } catch (IOException e) { throw new IgniteCheckedException("Failed to obtain file lock: " + lockFile, e); } } /** * Tries to find shmem jar in IGNITE_HOME/libs folder. * * @param errs Collection of errors to add readable exception to. * @param igniteHome Resolver IGNITE_HOME variable. * @return File, if found. */ private static File findShmemJar(Collection<Throwable> errs, String igniteHome) { File libs = new File(igniteHome, "libs"); if (!libs.exists() || libs.isFile()) { errs.add(new IllegalStateException("Failed to find libs folder in resolved IGNITE_HOME: " + igniteHome)); return null; } for (File lib : libs.listFiles()) { if (lib.getName().endsWith(".jar") && lib.getName().contains(JAR_NAME_BASE)) return lib; } errs.add(new IllegalStateException("Failed to find shmem jar in resolved IGNITE_HOME: " + igniteHome)); return null; } /** * Gets temporary directory unique for each OS user. * The directory guaranteed to exist, though may not be empty. */ private static File getUserSpecificTempDir() throws IgniteCheckedException { String tmp = System.getProperty("java.io.tmpdir"); String userName = System.getProperty("user.name"); File tmpDir = new File(tmp, userName); if (!tmpDir.exists()) //noinspection ResultOfMethodCallIgnored tmpDir.mkdirs(); if (!(tmpDir.exists() && tmpDir.isDirectory())) throw new IgniteCheckedException("Failed to create temporary directory [dir=" + tmpDir + ']'); return tmpDir; } /** * @return OS resource path. */ private static String osSpecificResourcePath() { return "META-INF/native/" + os() + "/" + mapLibraryName(LIB_NAME_BASE); } /** * @return Platform resource path. */ private static String platformSpecificResourcePath() { return "META-INF/native/" + platform() + "/" + mapLibraryName(LIB_NAME_BASE); } /** * @return Resource path. */ private static String resourcePath() { return "META-INF/native/" + mapLibraryName(LIB_NAME_BASE); } /** * @return Maps library name to file name. */ private static String mapLibraryName(String name) { String libName = System.mapLibraryName(name); if (U.isMacOs() && libName.endsWith(".jnilib")) return libName.substring(0, libName.length() - "jnilib".length()) + "dylib"; return libName; } /** * @param errs Errors collection. * @param rsrcPath Path. * @return {@code True} if library was found and loaded. */ private static boolean extractAndLoad(Collection<Throwable> errs, File tmpDir, String rsrcPath) { ClassLoader clsLdr = U.detectClassLoader(IpcSharedMemoryNativeLoader.class); URL rsrc = clsLdr.getResource(rsrcPath); if (rsrc != null) return extract(errs, rsrc, new File(tmpDir, mapLibraryName(LIB_NAME))); else { errs.add(new IllegalStateException("Failed to find resource with specified class loader " + "[rsrc=" + rsrcPath + ", clsLdr=" + clsLdr + ']')); return false; } } /** * @param errs Errors collection. * @param rsrcPath Path. * @return {@code True} if library was found and loaded. */ private static boolean extractAndLoad(Collection<Throwable> errs, JarFile jar, File tmpDir, String rsrcPath) { ZipEntry rsrc = jar.getEntry(rsrcPath); if (rsrc != null) return extract(errs, rsrc, jar, new File(tmpDir, mapLibraryName(LIB_NAME))); else { errs.add(new IllegalStateException("Failed to find resource within specified jar file " + "[rsrc=" + rsrcPath + ", jar=" + jar.getName() + ']')); return false; } } /** * @param errs Errors collection. * @param src Source. * @param target Target. * @return {@code True} if resource was found and loaded. */ private static boolean extract(Collection<Throwable> errs, URL src, File target) { FileOutputStream os = null; InputStream is = null; try { if (!target.exists() || !haveEqualMD5(target, src.openStream())) { is = src.openStream(); if (is != null) { os = new FileOutputStream(target); int read; byte[] buf = new byte[4096]; while ((read = is.read(buf)) != -1) os.write(buf, 0, read); } } // chmod 775. if (!U.isWindows()) Runtime.getRuntime().exec(new String[] {"chmod", "775", target.getCanonicalPath()}).waitFor(); System.load(target.getPath()); return true; } catch (IOException | UnsatisfiedLinkError | InterruptedException | NoSuchAlgorithmException e) { errs.add(e); } finally { U.closeQuiet(os); U.closeQuiet(is); } return false; } /** * @param errs Errors collection. * @param src Source. * @param target Target. * @return {@code True} if resource was found and loaded. */ private static boolean extract(Collection<Throwable> errs, ZipEntry src, JarFile jar, File target) { FileOutputStream os = null; InputStream is = null; try { if (!target.exists() || !haveEqualMD5(target, jar.getInputStream(src))) { is = jar.getInputStream(src); if (is != null) { os = new FileOutputStream(target); int read; byte[] buf = new byte[4096]; while ((read = is.read(buf)) != -1) os.write(buf, 0, read); } } // chmod 775. if (!U.isWindows()) Runtime.getRuntime().exec(new String[] {"chmod", "775", target.getCanonicalPath()}).waitFor(); System.load(target.getPath()); return true; } catch (IOException | UnsatisfiedLinkError | InterruptedException | NoSuchAlgorithmException e) { errs.add(e); } finally { U.closeQuiet(os); U.closeQuiet(is); } return false; } /** * @param target Target. * @param srcIS Source input stream. * @return {@code True} if target md5-sum equal to source md5-sum. * @throws NoSuchAlgorithmException If md5 algorithm was not found. * @throws IOException If an I/O exception occurs. */ private static boolean haveEqualMD5(File target, InputStream srcIS) throws NoSuchAlgorithmException, IOException { try { try (InputStream targetIS = new FileInputStream(target)) { String targetMD5 = U.calculateMD5(targetIS); String srcMD5 = U.calculateMD5(srcIS); return targetMD5.equals(srcMD5); } } finally { srcIS.close(); } } }
/******************************************************************************* * Copyright (c) 2000, 2011 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation * Mateusz Matela <mateusz.matela@gmail.com> - [code manipulation] [dcr] toString() builder wizard - https://bugs.eclipse.org/bugs/show_bug.cgi?id=26070 *******************************************************************************/ package org.eclipse.jdt.internal.ui.actions; import org.eclipse.osgi.util.NLS; public final class ActionMessages extends NLS { private static final String BUNDLE_NAME= "org.eclipse.jdt.internal.ui.actions.ActionMessages";//$NON-NLS-1$ private ActionMessages() { // Do not instantiate } public static String ActionUtil_warning_derived_dontShowAgain; public static String ActionUtil_warning_derived_message; public static String ActionUtil_warning_derived_title; public static String AddDelegateMethodsAction_not_in_source_file; public static String AddDelegateMethodsAction_template_link_message; public static String AddDelegateMethodsAction_template_link_tooltip; public static String AddGetterSetterAction_allow_setters_for_finals_description; public static String AddGetterSetterAction_error_not_in_source_file; public static String AddGetterSetterAction_template_link_description; public static String AddGetterSetterAction_template_link_tooltip; public static String AddUnimplementedConstructorsAction_template_link_message; public static String AddUnimplementedConstructorsAction_template_link_tooltip; public static String GenerateConstructorUsingFieldsSelectionDialog_template_link_message; public static String GenerateConstructorUsingFieldsSelectionDialog_template_link_tooltip; public static String CopyQualifiedNameAction_ActionName; public static String CopyQualifiedNameAction_ErrorDescription; public static String CopyQualifiedNameAction_ErrorTitle; public static String CopyQualifiedNameAction_InfoDialogTitel; public static String CopyQualifiedNameAction_NoElementToQualify; public static String CopyQualifiedNameAction_ToolTipText; public static String FindBreakContinueTargetOccurrencesAction_label; public static String FindBreakContinueTargetOccurrencesAction_tooltip; public static String FindNLSProblemsAction_Description; public static String FindNLSProblemsAction_ErrorDialogTitle; public static String FindNLSProblemsAction_Name; public static String FindNLSProblemsAction_NoPropertieFilesFoundErrorDescription; public static String FindNLSProblemsAction_ToolTip; public static String GenerateNewConstructorUsingFieldsAction_error_not_a_source_file; public static String IntroduceParameterObjectAction_action_description; public static String IntroduceParameterObjectAction_action_text; public static String IntroduceParameterObjectAction_action_tooltip; public static String IntroduceParameterObjectAction_can_not_run_refactoring_message; public static String IntroduceParameterObjectAction_exceptiondialog_title; public static String IntroduceParameterObjectAction_unexpected_exception; public static String OccurrencesSearchMenuAction_break_continue_target_label; public static String OccurrencesSearchMenuAction_implementing_methods_label; public static String OccurrencesSearchMenuAction_method_exits_label; public static String OccurrencesSearchMenuAction_no_entries_available; public static String OccurrencesSearchMenuAction_occurrences_in_file_label; public static String OccurrencesSearchMenuAction_throwing_exception_label; public static String OpenAction_error_problem_opening_editor; public static String OpenAction_multistatus_message; public static String OpenViewActionGroup_showInAction_label; public static String OpenWithMenu_label; public static String RefactorMenu_label; public static String SourceMenu_label; public static String BuildPath_label; public static String BuildAction_label; public static String SelectionConverter_codeResolve_failed; public static String OpenAction_label; public static String OpenAction_tooltip; public static String OpenAction_description; public static String OpenAction_declaration_label; public static String OpenAction_select_element; public static String OpenAction_error_title; public static String OpenAction_error_message; public static String OpenAction_error_messageBadSelection; public static String OpenSuperImplementationAction_label; public static String OpenSuperImplementationAction_tooltip; public static String OpenSuperImplementationAction_description; public static String OpenSuperImplementationAction_error_title; public static String OpenSuperImplementationAction_error_message; public static String OpenSuperImplementationAction_not_applicable; public static String OpenSuperImplementationAction_no_super_implementation; public static String OpenImplementationAction_label; public static String OpenImplementationAction_tooltip; public static String OpenImplementationAction_description; public static String OpenImplementationAction_error_title; public static String OpenImplementationAction_not_applicable; public static String OpenTypeHierarchyAction_label; public static String OpenTypeHierarchyAction_tooltip; public static String OpenTypeHierarchyAction_description; public static String OpenTypeHierarchyAction_dialog_title; public static String OpenTypeHierarchyAction_messages_title; public static String OpenTypeHierarchyAction_messages_no_java_elements; public static String OpenTypeHierarchyAction_messages_no_valid_java_element; public static String ShowInPackageViewAction_label; public static String ShowInPackageViewAction_description; public static String ShowInPackageViewAction_tooltip; public static String ShowInPackageViewAction_dialog_title; public static String ShowInPackageViewAction_error_message; public static String ShowInNavigatorView_label; public static String ShowInNavigatorView_dialog_title; public static String ShowInNavigatorView_dialog_message; public static String ShowInNavigatorView_error_activation_failed; public static String OverrideMethodsAction_label; public static String OverrideMethodsAction_description; public static String OverrideMethodsAction_tooltip; public static String OverrideMethodsAction_error_actionfailed; public static String OverrideMethodsAction_error_title; public static String OverrideMethodsAction_error_nothing_found; public static String OverrideMethodsAction_not_applicable; public static String OverrideMethodsAction_interface_not_applicable; public static String OverrideMethodsAction_annotation_not_applicable; public static String CleanUpAction_label; public static String CleanUpAction_labelWizard; public static String CleanUpAction_tooltip; public static String CleanUpAction_description; public static String CleanUpAction_MultiStateErrorTitle; public static String CleanUpAction_UnexpectedErrorMessage; public static String CleanUpAction_CUNotOnBuildpathMessage; public static String CleanUpAction_EmptySelection_description; public static String CleanUpAction_actionName; public static String AddGetterSetterAction_no_primary_type_title; public static String AddGetterSetterAction_no_primary_type_message; public static String AddGetterSetterAction_label; public static String AddGetterSetterAction_description; public static String AddGetterSetterAction_tooltip; public static String AddGetterSetterAction_error_duplicate_methods_singular; public static String AddGetterSetterAction_error_duplicate_methods_plural; public static String AddGetterSetterAction_error_title; public static String AddGetterSetterAction_error_actionfailed; public static String AddGetterSetterAction_not_applicable; public static String AddGetterSetterAction_interface_not_applicable; public static String AddGetterSetterAction_annotation_not_applicable; public static String AddGetterSetterAction_QueryDialog_title; public static String AddGetterSetterAction_SkipExistingDialog_message; public static String AddGetterSetterAction_SkipExistingDialog_skip_label; public static String AddGetterSetterAction_SkipExistingDialog_replace_label; public static String AddGetterSetterAction_SkipExistingDialog_skipAll_label; public static String AddGetterSetterAction_dialog_label; public static String AddGetterSetterAction_methods_selected; public static String AddGettSetterAction_typeContainsNoFields_message; public static String GenerateHashCodeEqualsAction_error_caption; public static String GenerateMethodAbstractAction_error_not_applicable; public static String GenerateMethodAbstractAction_error_removed_type; public static String GenerateMethodAbstractAction_error_cannot_create; public static String GenerateHashCodeEqualsAction_label; public static String GenerateHashCodeEqualsAction_description; public static String GenerateHashCodeEqualsAction_tooltip; public static String GenerateMethodAbstractAction_annotation_not_applicable; public static String GenerateMethodAbstractAction_interface_not_applicable; public static String GenerateMethodAbstractAction_enum_not_applicable; public static String GenerateMethodAbstractAction_anonymous_type_not_applicable; public static String GenerateHashCodeEqualsAction_no_nonstatic_fields_error; public static String GenerateHashCodeEqualsAction_transient_field_included_error; public static String GenerateHashCodeEqualsAction_type_does_not_implement_hashCode_equals_error; public static String GenerateHashCodeEqualsAction_interface_does_not_declare_hashCode_equals_error; public static String GenerateMethodAbstractAction_final_method_in_superclass_error; public static String GenerateMethodAbstractAction_already_has_this_method_error; public static String GenerateMethodAbstractAction_super_class; public static String GenerateHashCodeEqualsAction_field_type; public static String GenerateHashCodeEqualsAction_equals; public static String GenerateHashCodeEqualsAction_hashCode; public static String GenerateHashCodeEqualsAction_hashcode_or_equals; public static String GenerateHashCodeEqualsAction_equals_and_hashCode; public static String GetterSetterTreeSelectionDialog_select_getters; public static String GetterSetterTreeSelectionDialog_select_setters; public static String GetterSetterTreeSelectionDialog_alpha_pair_sort; public static String GetterSetterTreeSelectionDialog_alpha_method_sort; public static String GetterSetterTreeSelectionDialog_sort_label; public static String SourceActionDialog_enterAt_label; public static String SourceActionDialog_modifier_group; public static String SourceActionDialog_modifier_public; public static String SourceActionDialog_modifier_protected; public static String SourceActionDialog_modifier_default; public static String SourceActionDialog_modifier_private; public static String SourceActionDialog_modifier_synchronized; public static String SourceActionDialog_modifier_final; public static String SourceActionDialog_first; public static String SourceActionDialog_last; public static String SourceActionDialog_after; public static String SourceActionDialog_createMethodComment; public static String SourceActionDialog_no_entries; public static String SourceActionDialog_createConstructorComment; public static String AddUnimplementedConstructorsAction_label; public static String AddUnimplementedConstructorsAction_description; public static String AddUnimplementedConstructorsAction_tooltip; public static String AddUnimplementedConstructorsAction_error_title; public static String AddUnimplementedConstructorsAction_not_applicable; public static String AddUnimplementedConstructorsAction_interface_not_applicable; public static String AddUnimplementedConstructorsAction_enum_not_applicable; public static String AddUnimplementedConstructorsAction_annotation_not_applicable; public static String AddUnimplementedConstructorsAction_methods_selected; public static String AddUnimplementedConstructorsAction_error_nothing_found; public static String AddUnimplementedConstructorsAction_dialog_title; public static String AddUnimplementedConstructorsAction_dialog_label; public static String AddUnimplementedConstructorsDialog_omit_super; public static String GenerateConstructorUsingFieldsAction_label; public static String GenerateConstructorUsingFieldsAction_description; public static String GenerateConstructorUsingFieldsAction_tooltip; public static String GenerateConstructorUsingFieldsAction_error_title; public static String GenerateConstructorUsingFieldsAction_not_applicable; public static String GenerateConstructorUsingFieldsAction_fields_selected; public static String GenerateConstructorUsingFieldsAction_error_duplicate_constructor; public static String GenerateConstructorUsingFieldsAction_error_nothing_found; public static String GenerateConstructorUsingFieldsAction_dialog_title; public static String GenerateConstructorUsingFieldsAction_dialog_label; public static String GenerateConstructorUsingFieldsAction_interface_not_applicable; public static String GenerateConstructorUsingFieldsAction_enum_not_applicable; public static String GenerateConstructorUsingFieldsAction_annotation_not_applicable; public static String GenerateConstructorUsingFieldsAction_typeContainsNoFields_message; public static String GenerateConstructorUsingFieldsAction_error_actionfailed; public static String GenerateConstructorUsingFieldsSelectionDialog_up_button; public static String GenerateConstructorUsingFieldsSelectionDialog_down_button; public static String GenerateConstructorUsingFieldsSelectionDialog_sort_constructor_choices_label; public static String GenerateConstructorUsingFieldsSelectionDialog_omit_super; public static String GenerateConstructorUsingFieldsAction_error_anonymous_class; public static String AddJavaDocStubAction_label; public static String AddJavaDocStubAction_description; public static String AddJavaDocStubAction_tooltip; public static String AddJavaDocStubsAction_error_dialogTitle; public static String AddJavaDocStubsAction_error_actionFailed; public static String AddJavaDocStubsAction_not_applicable; public static String ExternalizeStringsAction_label; public static String ExternalizeStringsAction_dialog_title; public static String FindStringsToExternalizeAction_label; public static String FindStringsToExternalizeAction_dialog_title; public static String FindStringsToExternalizeAction_error_message; public static String FindStringsToExternalizeAction_error_cannotBeParsed; public static String FindStringsToExternalizeAction_foundStrings; public static String FindStringsToExternalizeAction_noStrings; public static String FindStringsToExternalizeAction_non_externalized_singular; public static String FindStringsToExternalizeAction_non_externalized_plural; public static String FindStringsToExternalizeAction_button_label; public static String FindStringsToExternalizeAction_find_strings; public static String OpenExternalJavadocAction_label; public static String OpenExternalJavadocAction_description; public static String OpenExternalJavadocAction_tooltip; public static String OpenAttachedJavadocAction_label; public static String OpenAttachedJavadocAction_description; public static String OpenAttachedJavadocAction_tooltip; public static String OpenAttachedJavadocAction_select_element; public static String OpenAttachedJavadocAction_libraries_no_location; public static String OpenAttachedJavadocAction_source_no_location; public static String OpenAttachedJavadocAction_opening_failed; public static String OpenAttachedJavadocAction_dialog_title; public static String OpenAttachedJavadocAction_code_resolve_failed; public static String SelfEncapsulateFieldAction_label; public static String SelfEncapsulateFieldAction_dialog_title; public static String SelfEncapsulateFieldAction_dialog_unavailable; public static String SelfEncapsulateFieldAction_dialog_cannot_perform; public static String OrganizeImportsAction_label; public static String OrganizeImportsAction_tooltip; public static String OrganizeImportsAction_description; public static String OrganizeImportsAction_multi_error_parse; public static String OrganizeImportsAction_multi_error_unresolvable; public static String OrganizeImportsAction_selectiondialog_title; public static String OrganizeImportsAction_selectiondialog_message; public static String OrganizeImportsAction_error_title; public static String OrganizeImportsAction_error_message; public static String OrganizeImportsAction_single_error_parse; /** * DO NOT REMOVE, used in a product, see https://bugs.eclipse.org/296836 . * @deprecated As of 3.6, replaced by {@link #OrganizeImportsAction_summary_added_singular} and {@link #OrganizeImportsAction_summary_added_plural} */ public static String OrganizeImportsAction_summary_added; /** * DO NOT REMOVE, used in a product, see https://bugs.eclipse.org/296836 . * @deprecated As of 3.6, replaced by {@link #OrganizeImportsAction_summary_removed_singular} and {@link #OrganizeImportsAction_summary_removed_plural} */ public static String OrganizeImportsAction_summary_removed; public static String OrganizeImportsAction_summary_added_singular; public static String OrganizeImportsAction_summary_added_plural; public static String OrganizeImportsAction_summary_removed_singular; public static String OrganizeImportsAction_summary_removed_plural; public static String OrganizeImportsAction_EmptySelection_description; public static String OrganizeImportsAction_EmptySelection_title; public static String FormatAllAction_label; public static String FormatAllAction_tooltip; public static String FormatAllAction_description; public static String SortMembersAction_label; public static String SortMembersAction_tooltip; public static String SortMembersAction_description; public static String SortMembersAction_not_applicable; public static String SortMembersAction_containsmarkers; public static String SortMembersAction_dialog_title; public static String MemberFilterActionGroup_hide_fields_label; public static String MemberFilterActionGroup_hide_fields_tooltip; public static String MemberFilterActionGroup_hide_fields_description; public static String MemberFilterActionGroup_hide_static_label; public static String MemberFilterActionGroup_hide_static_tooltip; public static String MemberFilterActionGroup_hide_static_description; public static String MemberFilterActionGroup_hide_nonpublic_label; public static String MemberFilterActionGroup_hide_nonpublic_tooltip; public static String MemberFilterActionGroup_hide_nonpublic_description; public static String MemberFilterActionGroup_hide_localtypes_label; public static String MemberFilterActionGroup_hide_localtypes_tooltip; public static String MemberFilterActionGroup_hide_localtypes_description; public static String NewWizardsActionGroup_new; public static String OpenProjectAction_dialog_title; public static String OpenProjectAction_dialog_message; public static String OpenProjectAction_error_message; public static String OpenJavaPerspectiveAction_dialog_title; public static String OpenJavaPerspectiveAction_error_open_failed; public static String OpenJavaBrowsingPerspectiveAction_dialog_title; public static String OpenJavaBrowsingPerspectiveAction_error_open_failed; public static String OpenTypeInHierarchyAction_label; public static String OpenTypeInHierarchyAction_description; public static String OpenTypeInHierarchyAction_tooltip; public static String OpenTypeInHierarchyAction_dialogMessage; public static String OpenTypeInHierarchyAction_dialogTitle; public static String RefreshAction_label; public static String RefreshAction_toolTip; public static String RefreshAction_progressMessage; public static String RefreshAction_error_workbenchaction_message; public static String RefreshAction_refresh_operation_label; public static String ModifyParameterAction_problem_title; public static String ModifyParameterAction_problem_message; public static String MultiSortMembersAction_noElementsToSortDialog_message; public static String MultiSortMembersAction_noElementsToSortDialog_title; public static String ActionUtil_notOnBuildPath_title; public static String ActionUtil_notOnBuildPath_message; public static String ActionUtil_notOnBuildPath_resource_message; public static String ActionUtil_not_possible; public static String ActionUtil_no_linked; public static String SelectAllAction_label; public static String SelectAllAction_tooltip; public static String AddToClasspathAction_label; public static String AddToClasspathAction_toolTip; public static String AddToClasspathAction_progressMessage; public static String AddToClasspathAction_error_title; public static String AddToClasspathAction_error_message; public static String RemoveFromClasspathAction_Remove; public static String RemoveFromClasspathAction_tooltip; public static String RemoveFromClasspathAction_Removing; public static String RemoveFromClasspathAction_exception_dialog_title; public static String RemoveFromClasspathAction_Problems_occurred; public static String AddDelegateMethodsAction_error_title; public static String AddDelegateMethodsAction_error_actionfailed; public static String AddDelegateMethodsAction_label; public static String AddDelegateMethodsAction_description; public static String AddDelegateMethodsAction_tooltip; public static String AddDelegateMethodsAction_not_applicable; public static String AddDelegateMethodsAction_annotation_not_applicable; public static String AddDelegateMethodsAction_interface_not_applicable; public static String AddDelegateMethodsAction_duplicate_methods_singular; public static String AddDelegateMethodsAction_duplicate_methods_plural; public static String AddDelegateMethodsAction_title; public static String AddDelegateMethodsAction_message; public static String AddDelegateMethodsAction_selectioninfo_more; public static String SurroundWithTemplateMenuAction_ConfigureTemplatesActionName; public static String SurroundWithTemplateMenuAction_NoneApplicable; public static String SurroundWithTemplateMenuAction_SurroundWithTemplateSubMenuName; public static String SurroundWithTemplateMenuAction_SurroundWithTryCatchActionName; public static String SurroundWithTemplateMenuAction_SurroundWithTryMultiCatchActionName; public static String ToggleLinkingAction_label; public static String ToggleLinkingAction_tooltip; public static String ToggleLinkingAction_description; public static String ConfigureContainerAction_error_title; public static String ConfigureContainerAction_error_creationfailed_message; public static String ConfigureContainerAction_error_applyingfailed_message; public static String FindExceptionOccurrences_text; public static String FindExceptionOccurrences_toolTip; public static String FindImplementOccurrencesAction_text; public static String FindImplementOccurrencesAction_toolTip; public static String FindMethodExitOccurrencesAction_label; public static String FindMethodExitOccurrencesAction_tooltip; public static String CategoryFilterActionGroup_JavaCategoryFilter_title; public static String CategoryFilterActionGroup_SelectAllCategories; public static String CategoryFilterActionGroup_DeselectAllCategories; public static String CategoryFilterActionGroup_SelectCategoriesDescription; public static String CategoryFilterActionGroup_ShowCategoriesActionDescription; public static String CategoryFilterActionGroup_ShowCategoriesToolTip; public static String CategoryFilterActionGroup_ShowCategoriesLabel; public static String CategoryFilterActionGroup_ShowUncategorizedMembers; static { NLS.initializeMessages(BUNDLE_NAME, ActionMessages.class); } public static String OpenNewAnnotationWizardAction_text; public static String OpenNewAnnotationWizardAction_description; public static String OpenNewAnnotationWizardAction_tooltip; public static String OpenNewClassWizardAction_text; public static String OpenNewClassWizardAction_description; public static String OpenNewClassWizardAction_tooltip; public static String OpenNewEnumWizardAction_text; public static String OpenNewEnumWizardAction_description; public static String OpenNewEnumWizardAction_tooltip; public static String OpenNewInterfaceWizardAction_text; public static String OpenNewInterfaceWizardAction_description; public static String OpenNewInterfaceWizardAction_tooltip; public static String OpenNewJavaProjectWizardAction_text; public static String OpenNewJavaProjectWizardAction_description; public static String OpenNewJavaProjectWizardAction_tooltip; public static String OpenNewPackageWizardAction_text; public static String OpenNewPackageWizardAction_description; public static String OpenNewPackageWizardAction_tooltip; public static String OpenNewSourceFolderWizardAction_text; public static String OpenNewSourceFolderWizardAction_text2; public static String OpenNewSourceFolderWizardAction_description; public static String OpenNewSourceFolderWizardAction_tooltip; public static String GenerateBuildPathActionGroup_update_jar_text; public static String GenerateBuildPathActionGroup_update_jar_description; public static String GenerateBuildPathActionGroup_update_jar_tooltip; public static String CollapsAllAction_label; public static String CollapsAllAction_tooltip; public static String CollapsAllAction_description; public static String GenerateToStringAction_label; public static String GenerateToStringAction_description; public static String GenerateToStringAction_tooltip; public static String GenerateToStringAction_tostring; public static String GenerateToStringAction_error_caption; }
package com.thesaka.ralarm.activity; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.ServiceConnection; import android.content.res.Configuration; import android.graphics.Color; import android.os.Bundle; import android.os.IBinder; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.NumberPicker; import android.widget.TextView; import com.thesaka.ralarm.R; import com.thesaka.ralarm.service.AlarmLocalService; import com.thesaka.ralarm.service.LocalAlarmBinder; import com.thesaka.ralarm.util.Logger; import com.thesaka.ralarm.view.CustomNumberPicker; public class AlarmActivity extends Activity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Logger.debug(AlarmActivity.class, "onCreate()"); initUI(); } @Override protected void onStart() { super.onStart(); Logger.debug(AlarmActivity.class, "onStart()"); if(mService == null){ startAndBindService(); } } @Override protected void onResume() { super.onResume(); Logger.debug(AlarmActivity.class, "onResume()"); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); Logger.debug(AlarmActivity.class, "onConfigurationChanged()"); } @Override protected void onPause() { super.onPause(); Logger.debug(AlarmActivity.class, "onPause()"); } @Override protected void onStop() { super.onStop(); Logger.debug(AlarmActivity.class, "onStop()"); if(mService != null){ unbindService(mServiceConnection); mService = null; } try { unregisterForLocalServiceBroadcast(); } catch (Exception e) {} } @Override protected void onDestroy() { super.onDestroy(); Logger.debug(AlarmActivity.class, "onDestroy()"); } private void initUI(){ setContentView(R.layout.main_activity); mStartBtn = (Button)findViewById(R.id.startBtn); mStopBtn = (Button)findViewById(R.id.stopBtn); mStartBtn.setOnClickListener(mStartBtnListener); mStopBtn.setOnClickListener(mStopBtnListener); mNumberPicker = (CustomNumberPicker)findViewById(R.id.numberPicker); mNumberPicker.setDescendantFocusability(NumberPicker.FOCUS_BLOCK_DESCENDANTS); mNumberPicker.setMaxValue(60); mNumberPicker.setMinValue(1); mNumberPicker.setWrapSelectorWheel(true); } private void restoreUI(){ if(mService != null){ if(mService.isAlarmRunning()){ setEnabledBtn(mStartBtn, false); setEnabledBtn(mStopBtn, true); setAlarmInterval(mService.getAlarmInterval()); mNumberPicker.setEnabled(false); }else{ setEnabledBtn(mStartBtn, true); setEnabledBtn(mStopBtn, false); } } } private void startAndBindService(){ Logger.debug(AlarmActivity.class, "Starting and Binding with AlarmLocalService"); Intent intent = new Intent(); intent.setClass(AlarmActivity.this, AlarmLocalService.class); startService(intent); bindService(intent, mServiceConnection, Context.BIND_AUTO_CREATE | Context.BIND_ABOVE_CLIENT); } private void registerLocalServiceBroadcast() { if(!mLocalReceiverRegistered){ Logger.debug(AlarmActivity.class, "Registering AlarmLocalServ broadcast rx"); mService.registerLocalServiceBroadcast(mReceiver); mLocalReceiverRegistered = true; } } private void unregisterForLocalServiceBroadcast() { Logger.debug(AlarmActivity.class, "Un-Registering AlarmLocalServ broadcast rx"); mService.unregisterLocalServiceBroadcast(); mLocalReceiverRegistered = false; } private boolean handleIntent(Intent intent){ boolean handleResult = true; String action = intent.getAction(); if(null == action){ Logger.error(AlarmActivity.class,"Invalid intent action"); return false; } if(AlarmLocalService.ACTION_ALARM_SET.equals(action)){ Logger.debug(AlarmActivity.class, "Received Alarm set broadcast"); setEnabledBtn(mStartBtn, false); setEnabledBtn(mStopBtn, true); mNumberPicker.setEnabled(false); }else if(AlarmLocalService.ACTION_ALARM_CANCEL.equals(action)){ Logger.debug(AlarmActivity.class, "Received Alarm cancelled broadcast"); setEnabledBtn(mStartBtn, true); setEnabledBtn(mStopBtn, false); mNumberPicker.setEnabled(true); }else if(AlarmLocalService.ACTION_ALARM_RING.equals(action)){ Logger.debug(AlarmActivity.class, "Received Alarm ring broadcast"); }else if(AlarmLocalService.ACTION_ALARM_TRIGGERED.equals(action)){ Logger.debug(AlarmActivity.class, "Received Alarm triggered broadcast"); } return handleResult; } private void setEnabledBtn(Button button, boolean enabled){ if(enabled){ button.setEnabled(true); button.setTextColor(Color.BLACK); }else{ button.setEnabled(false); button.setTextColor(Color.GRAY); } } private OnClickListener mStartBtnListener = new OnClickListener() { @Override public void onClick(View v) { if(mService != null) mService.setAlarmAfterXmins(getAlarmInterval()); else startAndBindService(); } }; private OnClickListener mStopBtnListener = new OnClickListener() { @Override public void onClick(View v) { if(mService != null) mService.cancelAlarm(); else startAndBindService(); } }; private int getAlarmInterval(){ return mNumberPicker.getValue(); } private void setAlarmInterval(int val){ mNumberPicker.setValue(val); } private ServiceConnection mServiceConnection = new ServiceConnection() { @Override public void onServiceDisconnected(ComponentName name) { Logger.debug(AlarmActivity.class, "ServiceConnection:onServiceDisconnected()"); mService = null; unregisterForLocalServiceBroadcast(); restoreUI(); } @Override public void onServiceConnected(ComponentName name, IBinder service) { Logger.debug(AlarmActivity.class, "ServiceConnection:onServiceConnected()"); mService = ((LocalAlarmBinder)service).getService(); registerLocalServiceBroadcast(); restoreUI(); } }; private BroadcastReceiver mReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { Logger.debug(AlarmActivity.class, "Received broadcast message"); if(null == intent){ Logger.error(AlarmActivity.class,"mLocalServiceReceiver.onReceive() invalid intent"); return; } if(AlarmLocalService.ACTION_LOCAL_SERVICE_BROADCAST.equals(intent.getAction())){ Logger.debug(AlarmActivity.class, "Received local service broadcast message"); Intent localServiceIntent = intent.getParcelableExtra(AlarmLocalService.EXTRA_LOCAL_SERVICE_INTENT); if(null != localServiceIntent){ handleIntent(localServiceIntent); }else{ Logger.error(AlarmActivity.class,"mLocalServiceReceiver.onReceive() missing local service intent"); } } } }; private Button mStartBtn; private Button mStopBtn; private CustomNumberPicker mNumberPicker; private AlarmLocalService mService; private boolean mLocalReceiverRegistered; }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui.picker; import com.intellij.openapi.diagnostic.Logger; import com.intellij.ui.ColorPicker; import com.intellij.ui.ColorUtil; import com.intellij.ui.Gray; import com.intellij.ui.mac.foundation.Foundation; import com.intellij.ui.mac.foundation.FoundationLibrary; import com.intellij.ui.mac.foundation.ID; import com.intellij.ui.mac.foundation.MacUtil; import com.intellij.util.BitUtil; import com.intellij.util.ui.UIUtil; import com.sun.jna.Native; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.imageio.ImageIO; import javax.swing.*; import java.awt.*; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.awt.geom.Ellipse2D; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.nio.ByteBuffer; public class MacColorPipette extends ColorPipetteBase { private static final Logger LOG = Logger.getInstance(MacColorPipette.class); private static final int PIXELS = 17; private static final int ZOOM = 10; private static final int SIZE = PIXELS * ZOOM; @SuppressWarnings("UseJBColor") private final Color myTransparentColor = new Color(0, true); private final Rectangle myCaptureRect = new Rectangle(0, 0, PIXELS, PIXELS); public MacColorPipette(@NotNull ColorPicker picker, @NotNull ColorListener listener) { super(picker, listener); } @NotNull @Override protected Dialog getOrCreatePickerDialog() { Dialog pickerDialog = getPickerDialog(); if (pickerDialog == null) { pickerDialog = super.getOrCreatePickerDialog(); pickerDialog.addKeyListener(new KeyAdapter() { @Override public void keyPressed(KeyEvent event) { super.keyPressed(event); int diff = BitUtil.isSet(event.getModifiers(), Event.SHIFT_MASK) ? 10 : 1; Point location = updateLocation(); if (location != null) { switch (event.getKeyCode()) { case KeyEvent.VK_DOWN: myRobot.mouseMove(location.x, location.y + diff); break; case KeyEvent.VK_UP: myRobot.mouseMove(location.x, location.y - diff); break; case KeyEvent.VK_LEFT: myRobot.mouseMove(location.x - diff, location.y); break; case KeyEvent.VK_RIGHT: myRobot.mouseMove(location.x + diff, location.y); break; } updateLocation(); } } }); final JLabel label = new JLabel() { @Override public void paint(Graphics g) { applyRenderingHints(g); Dialog pickerDialog = getPickerDialog(); if (pickerDialog != null && pickerDialog.isShowing()) { Point mouseLoc = updateLocation(); if (mouseLoc == null) return; final Color newColor = myRobot.getPixelColor(mouseLoc.x, mouseLoc.y); Graphics2D graphics2d = ((Graphics2D)g); Point offset = new Point(10, 10); //final int pixels = UIUtil.isRetina(graphics2d) ? PIXELS / 2 + 1 : PIXELS; int left = PIXELS / 2 + 1; myCaptureRect.setBounds(mouseLoc.x - left, mouseLoc.y - left, PIXELS, PIXELS); BufferedImage captureScreen = captureScreen(pickerDialog, myCaptureRect); graphics2d.setComposite(AlphaComposite.Clear); graphics2d.fillRect(0, 0, getWidth(), getHeight()); graphics2d.setComposite(AlphaComposite.Src); graphics2d.clip(new Ellipse2D.Double(offset.x, offset.y, SIZE, SIZE)); graphics2d.drawImage(captureScreen, offset.x, offset.y, SIZE, SIZE, this); // paint magnifier graphics2d.setComposite(AlphaComposite.SrcOver); drawPixelGrid(graphics2d, offset); drawCenterPixel(graphics2d, offset, newColor); drawCurrentColorRectangle(graphics2d, offset, newColor); graphics2d.setClip(0, 0, getWidth(), getHeight()); drawMagnifierBorder(newColor, graphics2d, offset); pickerDialog.repaint(); if (!newColor.equals(getColor())) { setColor(newColor); notifyListener(newColor, 300); } } } }; pickerDialog.add(label); pickerDialog.setSize(SIZE + 20, SIZE + 20); pickerDialog.setBackground(myTransparentColor); } return pickerDialog; } private static void applyRenderingHints(@NotNull Graphics graphics) { UIUtil.applyRenderingHints(graphics); if (graphics instanceof Graphics2D) { ((Graphics2D)graphics).setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR); ((Graphics2D)graphics).setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); ((Graphics2D)graphics).setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_PURE); } } private static void drawCurrentColorRectangle(@NotNull Graphics2D graphics, @NotNull Point offset, @NotNull Color currentColor) { graphics.setColor(Gray._0.withAlpha(150)); graphics.fillRoundRect(SIZE / 4 + offset.x, SIZE * 3 / 4 + offset.y, SIZE / 2, SIZE / 8, 10, 10); graphics.setColor(Gray._255); graphics.drawString(" " + currentColor.getRed(), offset.x + SIZE / 4, offset.y + SIZE * 3 / 4 + 15); graphics.drawString(" " + currentColor.getGreen(), offset.x + SIZE / 4 + SIZE / 2 / 3, offset.y + SIZE * 3 / 4 + 15); graphics.drawString(" " + currentColor.getBlue(), offset.x + SIZE / 4 + SIZE / 3, offset.y + SIZE * 3 / 4 + 15); } private static void drawCenterPixel(@NotNull Graphics2D graphics, @NotNull Point offset, @NotNull Color currentColor) { graphics.setColor(ColorUtil.isDark(currentColor) ? Gray._255.withAlpha(150) : Gray._0.withAlpha(150)); graphics.drawRect((SIZE - ZOOM) / 2 + offset.x, (SIZE - ZOOM) / 2 + offset.y, ZOOM, ZOOM); } private static void drawPixelGrid(@NotNull Graphics2D graphics, @NotNull Point offset) { graphics.setColor(Gray._0.withAlpha(10)); for (int i = 0; i < PIXELS; i++) { int cellOffset = i * ZOOM; graphics.drawLine(cellOffset + offset.x, offset.y, cellOffset + offset.x, SIZE + offset.y); graphics.drawLine(offset.x, cellOffset + offset.y, SIZE + offset.x, cellOffset + offset.y); } } private static void drawMagnifierBorder(@NotNull Color currentColor, @NotNull Graphics2D graphics, @NotNull Point offset) { graphics.setColor(currentColor.darker()); graphics.setStroke(new BasicStroke(5)); graphics.draw(new Ellipse2D.Double(offset.x, offset.y, SIZE, SIZE)); } @Override public boolean isAvailable() { return myRobot != null && captureScreen(null, new Rectangle(0, 0, 1, 1)) != null; } @Nullable private static BufferedImage captureScreen(@Nullable Window belowWindow, @NotNull Rectangle rect) { ID pool = Foundation.invoke("NSAutoreleasePool", "new"); try { ID windowId = belowWindow != null ? MacUtil.findWindowFromJavaWindow(belowWindow) : null; Foundation.NSRect nsRect = new Foundation.NSRect(rect.x, rect.y, rect.width, rect.height); ID cgWindowId = windowId != null ? Foundation.invoke(windowId, "windowNumber") : ID.NIL; int windowListOptions = cgWindowId != null ? FoundationLibrary.kCGWindowListOptionOnScreenBelowWindow : FoundationLibrary.kCGWindowListOptionAll; int windowImageOptions = FoundationLibrary.kCGWindowImageNominalResolution; ID cgImageRef = Foundation.cgWindowListCreateImage(nsRect, windowListOptions, cgWindowId, windowImageOptions); ID bitmapRep = Foundation.invoke(Foundation.invoke("NSBitmapImageRep", "alloc"), "initWithCGImage:", cgImageRef); ID nsImage = Foundation.invoke(Foundation.invoke("NSImage", "alloc"), "init"); Foundation.invoke(nsImage, "addRepresentation:", bitmapRep); ID data = Foundation.invoke(nsImage, "TIFFRepresentation"); ID bytes = Foundation.invoke(data, "bytes"); ID length = Foundation.invoke(data, "length"); ByteBuffer byteBuffer = Native.getDirectByteBuffer(bytes.longValue(), length.longValue()); Foundation.invoke(nsImage, "release"); byte[] b = new byte[byteBuffer.remaining()]; byteBuffer.get(b); return ImageIO.read(new ByteArrayInputStream(b)); } catch (Throwable t) { LOG.error(t); return null; } finally { Foundation.invoke(pool, "release"); } } }
package org.apache.ojb.broker.util.batch; /* Copyright 2002-2005 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.lang.reflect.Proxy; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import org.apache.ojb.broker.PersistenceBroker; import org.apache.ojb.broker.metadata.ClassDescriptor; import org.apache.ojb.broker.metadata.CollectionDescriptor; import org.apache.ojb.broker.metadata.DescriptorRepository; import org.apache.ojb.broker.metadata.JdbcConnectionDescriptor; import org.apache.ojb.broker.metadata.ObjectReferenceDescriptor; import org.apache.ojb.broker.util.WrappedConnection; /** * The implementation of {@link java.sql.Connection} which * automatically gathers INSERT, UPDATE and DELETE * PreparedStatements into batches. * * @author Oleg Nitz (<a href="mailto:olegnitz@apache.org">olegnitz@apache.org</a>) * @version $Id: BatchConnection.java,v 1.1 2007-08-24 22:17:42 ewestfal Exp $ */ public class BatchConnection extends WrappedConnection { private static final int MAX_COUNT = 100; /** * Maps PBKey to another HashMap, * which maps table name to List of related tables (N:1 or 1:1) */ private static HashMap _pbkeyToFKInfo = new HashMap(); private boolean _useBatchInserts = true; private HashMap _statements = new HashMap(); private ArrayList _order = new ArrayList(); private HashMap _fkInfo; private HashSet _deleted; private HashSet _dontInsert; private HashSet _touched = new HashSet(); private int count = 0; private JdbcConnectionDescriptor m_jcd; public BatchConnection(Connection conn, PersistenceBroker broker) { super(conn); m_jcd = broker.serviceConnectionManager().getConnectionDescriptor(); _fkInfo = (HashMap) _pbkeyToFKInfo.get(broker.getPBKey()); if (_fkInfo != null) { return; } DescriptorRepository repos = broker.getDescriptorRepository(); _fkInfo = new HashMap(); for (Iterator it = repos.iterator(); it.hasNext();) { ClassDescriptor desc = (ClassDescriptor) it.next(); List ordList = desc.getObjectReferenceDescriptors(); if (!ordList.isEmpty()) { HashSet fkTables = getFKTablesFor(desc.getFullTableName()); for (Iterator it2 = ordList.iterator(); it2.hasNext();) { ObjectReferenceDescriptor ord = (ObjectReferenceDescriptor) it2.next(); ClassDescriptor oneDesc = repos.getDescriptorFor(ord.getItemClass()); fkTables.addAll(getFullTableNames(oneDesc, repos)); } } List codList = desc.getCollectionDescriptors(); for (Iterator it2 = codList.iterator(); it2.hasNext();) { CollectionDescriptor cod = (CollectionDescriptor) it2.next(); ClassDescriptor manyDesc = repos.getDescriptorFor(cod.getItemClass()); if (cod.isMtoNRelation()) { HashSet fkTables = getFKTablesFor(cod.getIndirectionTable()); fkTables.addAll(getFullTableNames(desc, repos)); fkTables.addAll(getFullTableNames(manyDesc, repos)); } else { HashSet manyTableNames = getFullTableNames(manyDesc, repos); for (Iterator it3 = manyTableNames.iterator(); it3.hasNext();) { HashSet fkTables = getFKTablesFor((String) it3.next()); fkTables.addAll(getFullTableNames(desc, repos)); } } } } _pbkeyToFKInfo.put(broker.getPBKey(), _fkInfo); } private HashSet getFKTablesFor(String tableName) { HashSet fkTables = (HashSet) _fkInfo.get(tableName); if (fkTables == null) { fkTables = new HashSet(); _fkInfo.put(tableName, fkTables); } return fkTables; } private HashSet getFullTableNames(ClassDescriptor desc, DescriptorRepository repos) { String tableName; HashSet tableNamesSet = new HashSet(); Collection extents = desc.getExtentClasses(); tableName = desc.getFullTableName(); if (tableName != null) { tableNamesSet.add(tableName); } for (Iterator it = extents.iterator(); it.hasNext();) { Class extClass = (Class) it.next(); ClassDescriptor extDesc = repos.getDescriptorFor(extClass); tableName = extDesc.getFullTableName(); if (tableName != null) { tableNamesSet.add(tableName); } } return tableNamesSet; } public void setUseBatchInserts(boolean useBatchInserts) { _useBatchInserts = useBatchInserts; } /** * Remember the order of execution */ void nextExecuted(String sql) throws SQLException { count++; if (_order.contains(sql)) { return; } String sqlCmd = sql.substring(0, 7); String rest = sql.substring(sqlCmd.equals("UPDATE ") ? 7 // "UPDATE " : 12); // "INSERT INTO " or "DELETE FROM " String tableName = rest.substring(0, rest.indexOf(' ')); HashSet fkTables = (HashSet) _fkInfo.get(tableName); // we should not change order of INSERT/DELETE/UPDATE // statements for the same table if (_touched.contains(tableName)) { executeBatch(); } if (sqlCmd.equals("INSERT ")) { if (_dontInsert != null && _dontInsert.contains(tableName)) { // one of the previous INSERTs contained a table // that references this table. // Let's execute that previous INSERT right now so that // in the future INSERTs into this table will go first // in the _order array. executeBatch(); } } else //if (sqlCmd.equals("DELETE ") || sqlCmd.equals("UPDATE ")) { // We process UPDATEs in the same way as DELETEs // because setting FK to NULL in UPDATE is equivalent // to DELETE from the referential integrity point of view. if (_deleted != null && fkTables != null) { HashSet intersection = (HashSet) _deleted.clone(); intersection.retainAll(fkTables); if (!intersection.isEmpty()) { // one of the previous DELETEs contained a table // that is referenced from this table. // Let's execute that previous DELETE right now so that // in the future DELETEs into this table will go first // in the _order array. executeBatch(); } } } _order.add(sql); _touched.add(tableName); if (sqlCmd.equals("INSERT ")) { if (fkTables != null) { if (_dontInsert == null) { _dontInsert = new HashSet(); } _dontInsert.addAll(fkTables); } } else if (sqlCmd.equals("DELETE ")) { if (_deleted == null) { _deleted = new HashSet(); } _deleted.add(tableName); } } /** * If UPDATE, INSERT or DELETE, return BatchPreparedStatement, * otherwise return null. */ private PreparedStatement prepareBatchStatement(String sql) { String sqlCmd = sql.substring(0, 7); if (sqlCmd.equals("UPDATE ") || sqlCmd.equals("DELETE ") || (_useBatchInserts && sqlCmd.equals("INSERT "))) { PreparedStatement stmt = (PreparedStatement) _statements.get(sql); if (stmt == null) { // [olegnitz] for JDK 1.2 we need to list both PreparedStatement and Statement // interfaces, otherwise proxy.jar works incorrectly stmt = (PreparedStatement) Proxy.newProxyInstance(getClass().getClassLoader(), new Class[]{ PreparedStatement.class, Statement.class, BatchPreparedStatement.class}, new PreparedStatementInvocationHandler(this, sql, m_jcd)); _statements.put(sql, stmt); } return stmt; } else { return null; } } public PreparedStatement prepareStatement(String sql) throws SQLException { PreparedStatement stmt = null; stmt = prepareBatchStatement(sql); if (stmt == null) { stmt = getDelegate().prepareStatement(sql); } return stmt; } public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { PreparedStatement stmt = null; stmt = prepareBatchStatement(sql); if (stmt == null) { stmt = getDelegate().prepareStatement(sql, resultSetType, resultSetConcurrency); } return stmt; } public void executeBatch() throws SQLException { BatchPreparedStatement batchStmt; Connection conn = getDelegate(); try { for (Iterator it = _order.iterator(); it.hasNext();) { batchStmt = (BatchPreparedStatement) _statements.get(it.next()); batchStmt.doExecute(conn); } } finally { _order.clear(); if (_dontInsert != null) { _dontInsert.clear(); } if (_deleted != null) { _deleted.clear(); } _touched.clear(); count = 0; } } public void executeBatchIfNecessary() throws SQLException { if (count >= MAX_COUNT) { executeBatch(); } } public void clearBatch() { _order.clear(); _statements.clear(); if (_dontInsert != null) { _dontInsert.clear(); } if (_deleted != null) { _deleted.clear(); } } public void commit() throws SQLException { executeBatch(); _statements.clear(); getDelegate().commit(); } public void rollback() throws SQLException { clearBatch(); getDelegate().rollback(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.taskmanager; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.TaskInfo; import org.apache.flink.configuration.Configuration; import org.apache.flink.core.fs.Path; import org.apache.flink.runtime.accumulators.AccumulatorRegistry; import org.apache.flink.runtime.broadcast.BroadcastVariableManager; import org.apache.flink.runtime.checkpoint.CheckpointMetrics; import org.apache.flink.runtime.checkpoint.TaskStateSnapshot; import org.apache.flink.runtime.execution.Environment; import org.apache.flink.runtime.executiongraph.ExecutionAttemptID; import org.apache.flink.runtime.io.disk.iomanager.IOManager; import org.apache.flink.runtime.io.network.TaskEventDispatcher; import org.apache.flink.runtime.io.network.api.writer.ResultPartitionWriter; import org.apache.flink.runtime.io.network.partition.consumer.IndexedInputGate; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.tasks.InputSplitProvider; import org.apache.flink.runtime.jobgraph.tasks.TaskOperatorEventGateway; import org.apache.flink.runtime.memory.MemoryManager; import org.apache.flink.runtime.metrics.groups.TaskMetricGroup; import org.apache.flink.runtime.query.TaskKvStateRegistry; import org.apache.flink.runtime.state.TaskStateManager; import org.apache.flink.runtime.taskexecutor.GlobalAggregateManager; import java.util.Map; import java.util.concurrent.Future; import static org.apache.flink.util.Preconditions.checkNotNull; /** * In implementation of the {@link Environment}. */ public class RuntimeEnvironment implements Environment { private final JobID jobId; private final JobVertexID jobVertexId; private final ExecutionAttemptID executionId; private final TaskInfo taskInfo; private final Configuration jobConfiguration; private final Configuration taskConfiguration; private final ExecutionConfig executionConfig; private final ClassLoader userCodeClassLoader; private final MemoryManager memManager; private final IOManager ioManager; private final BroadcastVariableManager bcVarManager; private final TaskStateManager taskStateManager; private final GlobalAggregateManager aggregateManager; private final InputSplitProvider splitProvider; private final Map<String, Future<Path>> distCacheEntries; private final ResultPartitionWriter[] writers; private final IndexedInputGate[] inputGates; private final TaskEventDispatcher taskEventDispatcher; private final CheckpointResponder checkpointResponder; private final TaskOperatorEventGateway operatorEventGateway; private final AccumulatorRegistry accumulatorRegistry; private final TaskKvStateRegistry kvStateRegistry; private final TaskManagerRuntimeInfo taskManagerInfo; private final TaskMetricGroup metrics; private final Task containingTask; // ------------------------------------------------------------------------ public RuntimeEnvironment( JobID jobId, JobVertexID jobVertexId, ExecutionAttemptID executionId, ExecutionConfig executionConfig, TaskInfo taskInfo, Configuration jobConfiguration, Configuration taskConfiguration, ClassLoader userCodeClassLoader, MemoryManager memManager, IOManager ioManager, BroadcastVariableManager bcVarManager, TaskStateManager taskStateManager, GlobalAggregateManager aggregateManager, AccumulatorRegistry accumulatorRegistry, TaskKvStateRegistry kvStateRegistry, InputSplitProvider splitProvider, Map<String, Future<Path>> distCacheEntries, ResultPartitionWriter[] writers, IndexedInputGate[] inputGates, TaskEventDispatcher taskEventDispatcher, CheckpointResponder checkpointResponder, TaskOperatorEventGateway operatorEventGateway, TaskManagerRuntimeInfo taskManagerInfo, TaskMetricGroup metrics, Task containingTask) { this.jobId = checkNotNull(jobId); this.jobVertexId = checkNotNull(jobVertexId); this.executionId = checkNotNull(executionId); this.taskInfo = checkNotNull(taskInfo); this.executionConfig = checkNotNull(executionConfig); this.jobConfiguration = checkNotNull(jobConfiguration); this.taskConfiguration = checkNotNull(taskConfiguration); this.userCodeClassLoader = checkNotNull(userCodeClassLoader); this.memManager = checkNotNull(memManager); this.ioManager = checkNotNull(ioManager); this.bcVarManager = checkNotNull(bcVarManager); this.taskStateManager = checkNotNull(taskStateManager); this.aggregateManager = checkNotNull(aggregateManager); this.accumulatorRegistry = checkNotNull(accumulatorRegistry); this.kvStateRegistry = checkNotNull(kvStateRegistry); this.splitProvider = checkNotNull(splitProvider); this.distCacheEntries = checkNotNull(distCacheEntries); this.writers = checkNotNull(writers); this.inputGates = checkNotNull(inputGates); this.taskEventDispatcher = checkNotNull(taskEventDispatcher); this.checkpointResponder = checkNotNull(checkpointResponder); this.operatorEventGateway = checkNotNull(operatorEventGateway); this.taskManagerInfo = checkNotNull(taskManagerInfo); this.containingTask = containingTask; this.metrics = metrics; } // ------------------------------------------------------------------------ @Override public ExecutionConfig getExecutionConfig() { return this.executionConfig; } @Override public JobID getJobID() { return jobId; } @Override public JobVertexID getJobVertexId() { return jobVertexId; } @Override public ExecutionAttemptID getExecutionId() { return executionId; } @Override public TaskInfo getTaskInfo() { return this.taskInfo; } @Override public Configuration getJobConfiguration() { return jobConfiguration; } @Override public Configuration getTaskConfiguration() { return taskConfiguration; } @Override public TaskManagerRuntimeInfo getTaskManagerInfo() { return taskManagerInfo; } @Override public TaskMetricGroup getMetricGroup() { return metrics; } @Override public ClassLoader getUserClassLoader() { return userCodeClassLoader; } @Override public MemoryManager getMemoryManager() { return memManager; } @Override public IOManager getIOManager() { return ioManager; } @Override public BroadcastVariableManager getBroadcastVariableManager() { return bcVarManager; } @Override public TaskStateManager getTaskStateManager() { return taskStateManager; } @Override public GlobalAggregateManager getGlobalAggregateManager() { return aggregateManager; } @Override public AccumulatorRegistry getAccumulatorRegistry() { return accumulatorRegistry; } @Override public TaskKvStateRegistry getTaskKvStateRegistry() { return kvStateRegistry; } @Override public InputSplitProvider getInputSplitProvider() { return splitProvider; } @Override public Map<String, Future<Path>> getDistributedCacheEntries() { return distCacheEntries; } @Override public ResultPartitionWriter getWriter(int index) { return writers[index]; } @Override public ResultPartitionWriter[] getAllWriters() { return writers; } @Override public IndexedInputGate getInputGate(int index) { return inputGates[index]; } @Override public IndexedInputGate[] getAllInputGates() { return inputGates; } @Override public TaskEventDispatcher getTaskEventDispatcher() { return taskEventDispatcher; } @Override public void acknowledgeCheckpoint(long checkpointId, CheckpointMetrics checkpointMetrics) { acknowledgeCheckpoint(checkpointId, checkpointMetrics, null); } @Override public void acknowledgeCheckpoint( long checkpointId, CheckpointMetrics checkpointMetrics, TaskStateSnapshot checkpointStateHandles) { checkpointResponder.acknowledgeCheckpoint( jobId, executionId, checkpointId, checkpointMetrics, checkpointStateHandles); } @Override public void declineCheckpoint(long checkpointId, Throwable cause) { checkpointResponder.declineCheckpoint(jobId, executionId, checkpointId, cause); } @Override public TaskOperatorEventGateway getOperatorCoordinatorEventGateway() { return operatorEventGateway; } @Override public void failExternally(Throwable cause) { this.containingTask.failExternally(cause); } }
/** * Copyright (c) 2013-2017, phandom.org * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: 1) Redistributions of source code must retain the above * copyright notice, this list of conditions and the following * disclaimer. 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. 3) Neither the name of the phandom.org nor * the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.phandom; import com.rexsl.test.XhtmlMatchers; import java.io.File; import java.net.URI; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.Assume; import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; /** * Test case for {@link Phandom}. * @author Yegor Bugayenko (yegor@tpc2.com) * @version $Id$ */ public final class PhandomTest { /** * Temporary folder. * @checkstyle VisibilityModifier (3 lines) */ @Rule public transient TemporaryFolder temp = new TemporaryFolder(); /** * Check for Phantomjs availability. */ @Before public void installed() { Assume.assumeTrue(Phandom.isInstalled()); } /** * Phandom can build DOM document. * @throws Exception If some problem inside */ @Test public void buildsDomDocument() throws Exception { MatcherAssert.assertThat( XhtmlMatchers.xhtml( new Phandom( StringUtils.join( "<!DOCTYPE html>\n", "<html xmlns='http://www.w3.org/1999/xhtml'><head>\n", "<meta content='hi there' name='description'/>\n", "</head><body><p>&euro;</p><a href='#'/></body></html>" ) ).dom() ), XhtmlMatchers.hasXPaths( "/xhtml:html/body", "/xhtml:html/head/meta[@name='description']", "//p[.='\u20ac']" ) ); } /** * Phandom can succeed on a broken DOM. * @throws Exception If some problem inside */ @Test public void succeedsOnBrokenDom() throws Exception { MatcherAssert.assertThat( XhtmlMatchers.xhtml( new Phandom( IOUtils.toInputStream( "<html xmlns='http://www.w3.org/1999/xhtml'>\nbroken" ) ).dom() ), XhtmlMatchers.hasXPath("/xhtml:html[head and body]") ); } /** * Phandom can fail on a broken javascript. * @throws Exception If some problem inside */ @Test(expected = RuntimeException.class) public void failsOnBrokenJavascript() throws Exception { new Phandom( StringUtils.join( "<html xmlns='http://www.w3.org/1999/xhtml' >", "<body><script>a.call();</script>\n</body></html>" ) ).dom(); } /** * Phandom can parse a huge HTML. * @throws Exception If some problem inside */ @Test public void parsesLongHtml() throws Exception { MatcherAssert.assertThat( XhtmlMatchers.xhtml( new Phandom( StringUtils.join( "<html xmlns='http://www.w3.org/1999/xhtml'>", "<head><script>//<![CDATA[\n", "function onLoad() {", "for (i=0; i<1000; ++i) {", "var div = document.createElement('div');", "div.innerHTML = i + '&lt;&#10;<b>&gt;</b>&#10;&amp;';", "div.style.color = 'red';", "div.setAttribute('class', 'foo');", "document.body.appendChild(div);", "document.body.removeChild(div);", "}}\n//]]></script></head>", "<body onload='onLoad();'></body></html>\n\n" ) ).dom() ), XhtmlMatchers.hasXPath("/xhtml:html/body[count(div)=0]") ); } /** * Phandom can parse a web page. * @throws Exception If some problem inside * @since 0.3 */ @Test public void parsesWebPage() throws Exception { MatcherAssert.assertThat( XhtmlMatchers.xhtml( new Phandom(new URI("http://www.xembly.org/")).dom() ).toString(), Matchers.containsString("<html>") ); } /** * Phandom can parse a file on disc. * @throws Exception If some problem inside * @since 0.3 */ @Test public void parsesFile() throws Exception { final File file = this.temp.newFile("a.html"); FileUtils.write( file, "<html xmlns='http://www.w3.org/1999/xhtml'><p>hi!</p></html>" ); MatcherAssert.assertThat( XhtmlMatchers.xhtml( new Phandom(file).dom() ), XhtmlMatchers.hasXPath("//body[p='hi!']") ); } /** * Phandom can parse XML+XSL. * @throws Exception If some problem inside * @since 0.3 * @see http://stackoverflow.com/questions/23342952/does-phantomjs-render-xmlxsl */ @Test @Ignore public void parsesXmlAndXsl() throws Exception { final File dir = this.temp.newFolder(); final File main = new File(dir, "main.xml"); FileUtils.write( main, "<?xml-stylesheet href='i.xsl' type='text/xsl'?><index/>" ); FileUtils.write( new File(dir, "i.xsl"), StringUtils.join( "<xsl:stylesheet", " xmlns:xsl='http://www.w3.org/1999/XSL/Transform'", " xmlns='http://www.w3.org/1999/xhtml' version='1.0'>", "<xsl:template match='/'>", "<html>hello, XML!</html>", "</xsl:template>" ) ); MatcherAssert.assertThat( XhtmlMatchers.xhtml( new Phandom(main).dom() ), XhtmlMatchers.hasXPath("//head and //body") ); } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Range; import com.google.common.collect.Sets; import com.google.devtools.build.lib.actions.ActionKeyContext; import com.google.devtools.build.lib.actions.ArtifactRoot; import com.google.devtools.build.lib.actions.CommandLineExpansionException; import com.google.devtools.build.lib.actions.FileStateValue; import com.google.devtools.build.lib.actions.FileValue; import com.google.devtools.build.lib.analysis.AnalysisProtos.ActionGraphContainer; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.WorkspaceStatusAction.Factory; import com.google.devtools.build.lib.analysis.buildinfo.BuildInfoFactory; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.concurrent.Uninterruptibles; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.packages.AspectClass; import com.google.devtools.build.lib.packages.BuildFileName; import com.google.devtools.build.lib.packages.NoSuchPackageException; import com.google.devtools.build.lib.packages.NoSuchTargetException; import com.google.devtools.build.lib.packages.Package; import com.google.devtools.build.lib.packages.PackageFactory; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.packages.SkylarkSemanticsOptions; import com.google.devtools.build.lib.pkgcache.PackageCacheOptions; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.profiler.AutoProfiler; import com.google.devtools.build.lib.skyframe.AspectValue.AspectKey; import com.google.devtools.build.lib.skyframe.DirtinessCheckerUtils.BasicFilesystemDirtinessChecker; import com.google.devtools.build.lib.skyframe.DirtinessCheckerUtils.ExternalDirtinessChecker; import com.google.devtools.build.lib.skyframe.DirtinessCheckerUtils.MissingDiffDirtinessChecker; import com.google.devtools.build.lib.skyframe.DirtinessCheckerUtils.UnionDirtinessChecker; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFilesKnowledge; import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.FileType; import com.google.devtools.build.lib.skyframe.PackageFunction.ActionOnIOExceptionReadingBuildFile; import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy; import com.google.devtools.build.lib.skyframe.actiongraph.ActionGraphDump; import com.google.devtools.build.lib.util.AbruptExitException; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.util.ResourceUsage; import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor; import com.google.devtools.build.lib.vfs.BatchStat; import com.google.devtools.build.lib.vfs.FileSystem; import com.google.devtools.build.lib.vfs.ModifiedFileSet; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.skyframe.BuildDriver; import com.google.devtools.build.skyframe.Differencer; import com.google.devtools.build.skyframe.GraphInconsistencyReceiver; import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator; import com.google.devtools.build.skyframe.Injectable; import com.google.devtools.build.skyframe.MemoizingEvaluator.EvaluatorSupplier; import com.google.devtools.build.skyframe.NodeEntry; import com.google.devtools.build.skyframe.RecordingDifferencer; import com.google.devtools.build.skyframe.SequencedRecordingDifferencer; import com.google.devtools.build.skyframe.SequentialBuildDriver; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import com.google.devtools.common.options.OptionsClassProvider; import java.io.PrintStream; import java.util.ArrayList; import java.util.Collection; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.Callable; import java.util.logging.Logger; import javax.annotation.Nullable; /** * A SkyframeExecutor that implicitly assumes that builds can be done incrementally from the most * recent build. In other words, builds are "sequenced". */ public final class SequencedSkyframeExecutor extends SkyframeExecutor { private static final Logger logger = Logger.getLogger(SequencedSkyframeExecutor.class.getName()); private boolean lastAnalysisDiscarded = false; /** * If false, the graph will not store state useful for incremental builds, saving memory but * leaving the graph un-reusable. Subsequent builds will therefore not be incremental. * * <p>Avoids storing edges entirely and dereferences each action after execution. */ private boolean trackIncrementalState = true; private boolean evaluatorNeedsReset = false; // This is intentionally not kept in sync with the evaluator: we may reset the evaluator without // ever losing injected/invalidated data here. This is safe because the worst that will happen is // that on the next build we try to inject/invalidate some nodes that aren't needed for the build. private final RecordingDifferencer recordingDiffer = new SequencedRecordingDifferencer(); private final DiffAwarenessManager diffAwarenessManager; private final Iterable<SkyValueDirtinessChecker> customDirtinessCheckers; private Set<String> previousClientEnvironment = ImmutableSet.of(); private SequencedSkyframeExecutor( EvaluatorSupplier evaluatorSupplier, PackageFactory pkgFactory, FileSystem fileSystem, BlazeDirectories directories, ActionKeyContext actionKeyContext, Factory workspaceStatusActionFactory, ImmutableList<BuildInfoFactory> buildInfoFactories, Iterable<? extends DiffAwareness.Factory> diffAwarenessFactories, ImmutableMap<SkyFunctionName, SkyFunction> extraSkyFunctions, Iterable<SkyValueDirtinessChecker> customDirtinessCheckers, ImmutableSet<PathFragment> hardcodedBlacklistedPackagePrefixes, PathFragment additionalBlacklistedPackagePrefixesFile, CrossRepositoryLabelViolationStrategy crossRepositoryLabelViolationStrategy, List<BuildFileName> buildFilesByPriority, ActionOnIOExceptionReadingBuildFile actionOnIOExceptionReadingBuildFile, BuildOptions defaultBuildOptions, MutableArtifactFactorySupplier mutableArtifactFactorySupplier) { super( evaluatorSupplier, pkgFactory, fileSystem, directories, actionKeyContext, workspaceStatusActionFactory, buildInfoFactories, extraSkyFunctions, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, hardcodedBlacklistedPackagePrefixes, additionalBlacklistedPackagePrefixesFile, crossRepositoryLabelViolationStrategy, buildFilesByPriority, actionOnIOExceptionReadingBuildFile, /*shouldUnblockCpuWorkWhenFetchingDeps=*/ false, GraphInconsistencyReceiver.THROWING, defaultBuildOptions, new PackageProgressReceiver(), mutableArtifactFactorySupplier); this.diffAwarenessManager = new DiffAwarenessManager(diffAwarenessFactories); this.customDirtinessCheckers = customDirtinessCheckers; } public static SequencedSkyframeExecutor create( PackageFactory pkgFactory, FileSystem fileSystem, BlazeDirectories directories, ActionKeyContext actionKeyContext, Factory workspaceStatusActionFactory, ImmutableList<BuildInfoFactory> buildInfoFactories, Iterable<? extends DiffAwareness.Factory> diffAwarenessFactories, ImmutableMap<SkyFunctionName, SkyFunction> extraSkyFunctions, Iterable<SkyValueDirtinessChecker> customDirtinessCheckers, ImmutableSet<PathFragment> hardcodedBlacklistedPackagePrefixes, PathFragment additionalBlacklistedPackagePrefixesFile, CrossRepositoryLabelViolationStrategy crossRepositoryLabelViolationStrategy, List<BuildFileName> buildFilesByPriority, ActionOnIOExceptionReadingBuildFile actionOnIOExceptionReadingBuildFile, BuildOptions defaultBuildOptions) { return create( pkgFactory, fileSystem, directories, actionKeyContext, workspaceStatusActionFactory, buildInfoFactories, diffAwarenessFactories, extraSkyFunctions, customDirtinessCheckers, hardcodedBlacklistedPackagePrefixes, additionalBlacklistedPackagePrefixesFile, crossRepositoryLabelViolationStrategy, buildFilesByPriority, actionOnIOExceptionReadingBuildFile, defaultBuildOptions, new MutableArtifactFactorySupplier()); } public static SequencedSkyframeExecutor create( PackageFactory pkgFactory, FileSystem fileSystem, BlazeDirectories directories, ActionKeyContext actionKeyContext, Factory workspaceStatusActionFactory, ImmutableList<BuildInfoFactory> buildInfoFactories, Iterable<? extends DiffAwareness.Factory> diffAwarenessFactories, ImmutableMap<SkyFunctionName, SkyFunction> extraSkyFunctions, Iterable<SkyValueDirtinessChecker> customDirtinessCheckers, ImmutableSet<PathFragment> hardcodedBlacklistedPackagePrefixes, PathFragment additionalBlacklistedPackagePrefixesFile, CrossRepositoryLabelViolationStrategy crossRepositoryLabelViolationStrategy, List<BuildFileName> buildFilesByPriority, ActionOnIOExceptionReadingBuildFile actionOnIOExceptionReadingBuildFile, BuildOptions defaultBuildOptions, MutableArtifactFactorySupplier mutableArtifactFactorySupplier) { SequencedSkyframeExecutor skyframeExecutor = new SequencedSkyframeExecutor( InMemoryMemoizingEvaluator.SUPPLIER, pkgFactory, fileSystem, directories, actionKeyContext, workspaceStatusActionFactory, buildInfoFactories, diffAwarenessFactories, extraSkyFunctions, customDirtinessCheckers, hardcodedBlacklistedPackagePrefixes, additionalBlacklistedPackagePrefixesFile, crossRepositoryLabelViolationStrategy, buildFilesByPriority, actionOnIOExceptionReadingBuildFile, defaultBuildOptions, mutableArtifactFactorySupplier); skyframeExecutor.init(); return skyframeExecutor; } @Override protected BuildDriver getBuildDriver() { return new SequentialBuildDriver(memoizingEvaluator); } @Override public void resetEvaluator() { super.resetEvaluator(); diffAwarenessManager.reset(); } @Override protected Differencer evaluatorDiffer() { return recordingDiffer; } @Override protected Injectable injectable() { return recordingDiffer; } @VisibleForTesting public RecordingDifferencer getDifferencerForTesting() { return recordingDiffer; } @Override public void sync( ExtendedEventHandler eventHandler, PackageCacheOptions packageCacheOptions, SkylarkSemanticsOptions skylarkSemanticsOptions, Path outputBase, Path workingDirectory, String defaultsPackageContents, UUID commandId, Map<String, String> clientEnv, TimestampGranularityMonitor tsgm, OptionsClassProvider options) throws InterruptedException, AbruptExitException { if (evaluatorNeedsReset) { // Recreate MemoizingEvaluator so that graph is recreated with correct edge-clearing status, // or if the graph doesn't have edges, so that a fresh graph can be used. resetEvaluator(); evaluatorNeedsReset = false; } super.sync(eventHandler, packageCacheOptions, skylarkSemanticsOptions, outputBase, workingDirectory, defaultsPackageContents, commandId, clientEnv, tsgm, options); handleDiffs(eventHandler, packageCacheOptions.checkOutputFiles, options); } /** * The value types whose builders have direct access to the package locator, rather than accessing * it via an explicit Skyframe dependency. They need to be invalidated if the package locator * changes. */ private static final ImmutableSet<SkyFunctionName> PACKAGE_LOCATOR_DEPENDENT_VALUES = ImmutableSet.of( SkyFunctions.AST_FILE_LOOKUP, FileStateValue.FILE_STATE, FileValue.FILE, SkyFunctions.DIRECTORY_LISTING_STATE, SkyFunctions.TARGET_PATTERN, SkyFunctions.PREPARE_DEPS_OF_PATTERN, SkyFunctions.WORKSPACE_FILE, SkyFunctions.EXTERNAL_PACKAGE, SkyFunctions.TARGET_PATTERN, SkyFunctions.TARGET_PATTERN_PHASE); @Override protected ImmutableMap<Root, ArtifactRoot> createSourceArtifactRootMapOnNewPkgLocator( PathPackageLocator oldLocator, PathPackageLocator pkgLocator) { invalidate(SkyFunctionName.functionIsIn(PACKAGE_LOCATOR_DEPENDENT_VALUES)); return super.createSourceArtifactRootMapOnNewPkgLocator(oldLocator, pkgLocator); } @Override protected void invalidate(Predicate<SkyKey> pred) { recordingDiffer.invalidate(Iterables.filter(memoizingEvaluator.getValues().keySet(), pred)); } private void invalidateDeletedPackages(Iterable<PackageIdentifier> deletedPackages) { ArrayList<SkyKey> packagesToInvalidate = Lists.newArrayList(); for (PackageIdentifier deletedPackage : deletedPackages) { packagesToInvalidate.add(PackageLookupValue.key(deletedPackage)); } recordingDiffer.invalidate(packagesToInvalidate); } /** * Sets the packages that should be treated as deleted and ignored. */ @Override @VisibleForTesting // productionVisibility = Visibility.PRIVATE public void setDeletedPackages(Iterable<PackageIdentifier> pkgs) { // Invalidate the old deletedPackages as they may exist now. invalidateDeletedPackages(deletedPackages.get()); deletedPackages.set(ImmutableSet.copyOf(pkgs)); // Invalidate the new deletedPackages as we need to pretend that they don't exist now. invalidateDeletedPackages(deletedPackages.get()); } /** Uses diff awareness on all the package paths to invalidate changed files. */ @VisibleForTesting public void handleDiffs(ExtendedEventHandler eventHandler) throws InterruptedException { handleDiffs(eventHandler, /*checkOutputFiles=*/false, OptionsClassProvider.EMPTY); } private void handleDiffs( ExtendedEventHandler eventHandler, boolean checkOutputFiles, OptionsClassProvider options) throws InterruptedException { if (lastAnalysisDiscarded) { // Values were cleared last build, but they couldn't be deleted because they were needed for // the execution phase. We can delete them now. dropConfiguredTargetsNow(eventHandler); lastAnalysisDiscarded = false; } TimestampGranularityMonitor tsgm = this.tsgm.get(); modifiedFiles = 0; Map<Root, DiffAwarenessManager.ProcessableModifiedFileSet> modifiedFilesByPathEntry = Maps.newHashMap(); Set<Pair<Root, DiffAwarenessManager.ProcessableModifiedFileSet>> pathEntriesWithoutDiffInformation = Sets.newHashSet(); for (Root pathEntry : pkgLocator.get().getPathEntries()) { DiffAwarenessManager.ProcessableModifiedFileSet modifiedFileSet = diffAwarenessManager.getDiff(eventHandler, pathEntry, options); if (modifiedFileSet.getModifiedFileSet().treatEverythingAsModified()) { pathEntriesWithoutDiffInformation.add(Pair.of(pathEntry, modifiedFileSet)); } else { modifiedFilesByPathEntry.put(pathEntry, modifiedFileSet); } } handleDiffsWithCompleteDiffInformation(tsgm, modifiedFilesByPathEntry); handleDiffsWithMissingDiffInformation(eventHandler, tsgm, pathEntriesWithoutDiffInformation, checkOutputFiles); handleClientEnvironmentChanges(); } /** Invalidates entries in the client environment. */ private void handleClientEnvironmentChanges() { // Remove deleted client environmental variables. Iterable<SkyKey> deletedKeys = Sets.difference(previousClientEnvironment, clientEnv.get().keySet()) .stream() .map(ClientEnvironmentFunction::key) .collect(ImmutableList.toImmutableList()); recordingDiffer.invalidate(deletedKeys); previousClientEnvironment = clientEnv.get().keySet(); // Inject current client environmental values. We can inject unconditionally without fearing // over-invalidation; skyframe will not invalidate an injected key if the key's new value is the // same as the old value. ImmutableMap.Builder<SkyKey, SkyValue> newValuesBuilder = ImmutableMap.builder(); for (Map.Entry<String, String> entry : clientEnv.get().entrySet()) { newValuesBuilder.put( ClientEnvironmentFunction.key(entry.getKey()), new ClientEnvironmentValue(entry.getValue())); } recordingDiffer.inject(newValuesBuilder.build()); } /** * Invalidates files under path entries whose corresponding {@link DiffAwareness} gave an exact * diff. Removes entries from the given map as they are processed. All of the files need to be * invalidated, so the map should be empty upon completion of this function. */ private void handleDiffsWithCompleteDiffInformation( TimestampGranularityMonitor tsgm, Map<Root, DiffAwarenessManager.ProcessableModifiedFileSet> modifiedFilesByPathEntry) throws InterruptedException { for (Root pathEntry : ImmutableSet.copyOf(modifiedFilesByPathEntry.keySet())) { DiffAwarenessManager.ProcessableModifiedFileSet processableModifiedFileSet = modifiedFilesByPathEntry.get(pathEntry); ModifiedFileSet modifiedFileSet = processableModifiedFileSet.getModifiedFileSet(); Preconditions.checkState(!modifiedFileSet.treatEverythingAsModified(), pathEntry); handleChangedFiles(ImmutableList.of(pathEntry), getDiff(tsgm, modifiedFileSet.modifiedSourceFiles(), pathEntry)); processableModifiedFileSet.markProcessed(); } } /** * Finds and invalidates changed files under path entries whose corresponding {@link * DiffAwareness} said all files may have been modified. */ private void handleDiffsWithMissingDiffInformation( ExtendedEventHandler eventHandler, TimestampGranularityMonitor tsgm, Set<Pair<Root, DiffAwarenessManager.ProcessableModifiedFileSet>> pathEntriesWithoutDiffInformation, boolean checkOutputFiles) throws InterruptedException { ExternalFilesKnowledge externalFilesKnowledge = externalFilesHelper.getExternalFilesKnowledge(); if (pathEntriesWithoutDiffInformation.isEmpty() && Iterables.isEmpty(customDirtinessCheckers) && ((!externalFilesKnowledge.anyOutputFilesSeen || !checkOutputFiles) && !externalFilesKnowledge.anyNonOutputExternalFilesSeen)) { // Avoid a full graph scan if we have good diff information for all path entries, there are // no custom checkers that need to look at the whole graph, and no external (not under any // path) files need to be checked. return; } // Before running the FilesystemValueChecker, ensure that all values marked for invalidation // have actually been invalidated (recall that invalidation happens at the beginning of the // next evaluate() call), because checking those is a waste of time. buildDriver.evaluate(ImmutableList.<SkyKey>of(), false, DEFAULT_THREAD_COUNT, eventHandler); FilesystemValueChecker fsvc = new FilesystemValueChecker(tsgm, null); // We need to manually check for changes to known files. This entails finding all dirty file // system values under package roots for which we don't have diff information. If at least // one path entry doesn't have diff information, then we're going to have to iterate over // the skyframe values at least once no matter what. Set<Root> diffPackageRootsUnderWhichToCheck = new HashSet<>(); for (Pair<Root, DiffAwarenessManager.ProcessableModifiedFileSet> pair : pathEntriesWithoutDiffInformation) { diffPackageRootsUnderWhichToCheck.add(pair.getFirst()); } // We freshly compute knowledge of the presence of external files in the skyframe graph. We use // a fresh ExternalFilesHelper instance and only set the real instance's knowledge *after* we // are done with the graph scan, lest an interrupt during the graph scan causes us to // incorrectly think there are no longer any external files. ExternalFilesHelper tmpExternalFilesHelper = externalFilesHelper.cloneWithFreshExternalFilesKnowledge(); // See the comment for FileType.OUTPUT for why we need to consider output files here. EnumSet<FileType> fileTypesToCheck = checkOutputFiles ? EnumSet.of(FileType.EXTERNAL, FileType.EXTERNAL_REPO, FileType.OUTPUT) : EnumSet.of(FileType.EXTERNAL, FileType.EXTERNAL_REPO); logger.info( "About to scan skyframe graph checking for filesystem nodes of types " + Iterables.toString(fileTypesToCheck)); Differencer.Diff diff = fsvc.getDirtyKeys( memoizingEvaluator.getValues(), new UnionDirtinessChecker( Iterables.concat( customDirtinessCheckers, ImmutableList.<SkyValueDirtinessChecker>of( new ExternalDirtinessChecker( tmpExternalFilesHelper, fileTypesToCheck), new MissingDiffDirtinessChecker(diffPackageRootsUnderWhichToCheck))))); handleChangedFiles(diffPackageRootsUnderWhichToCheck, diff); for (Pair<Root, DiffAwarenessManager.ProcessableModifiedFileSet> pair : pathEntriesWithoutDiffInformation) { pair.getSecond().markProcessed(); } // We use the knowledge gained during the graph scan that just completed. Otherwise, naively, // once an external file gets into the Skyframe graph, we'll overly-conservatively always think // the graph needs to be scanned. externalFilesHelper.setExternalFilesKnowledge( tmpExternalFilesHelper.getExternalFilesKnowledge()); } private void handleChangedFiles( Collection<Root> diffPackageRootsUnderWhichToCheck, Differencer.Diff diff) { Collection<SkyKey> changedKeysWithoutNewValues = diff.changedKeysWithoutNewValues(); Map<SkyKey, SkyValue> changedKeysWithNewValues = diff.changedKeysWithNewValues(); logDiffInfo(diffPackageRootsUnderWhichToCheck, changedKeysWithoutNewValues, changedKeysWithNewValues); recordingDiffer.invalidate(changedKeysWithoutNewValues); recordingDiffer.inject(changedKeysWithNewValues); modifiedFiles += getNumberOfModifiedFiles(changedKeysWithoutNewValues); modifiedFiles += getNumberOfModifiedFiles(changedKeysWithNewValues.keySet()); incrementalBuildMonitor.accrue(changedKeysWithoutNewValues); incrementalBuildMonitor.accrue(changedKeysWithNewValues.keySet()); } private static final int MAX_NUMBER_OF_CHANGED_KEYS_TO_LOG = 10; private static void logDiffInfo( Iterable<Root> pathEntries, Collection<SkyKey> changedWithoutNewValue, Map<SkyKey, ? extends SkyValue> changedWithNewValue) { int numModified = changedWithNewValue.size() + changedWithoutNewValue.size(); StringBuilder result = new StringBuilder("DiffAwareness found ") .append(numModified) .append(" modified source files and directory listings"); if (!Iterables.isEmpty(pathEntries)) { result.append(" for "); result.append(Joiner.on(", ").join(pathEntries)); } if (numModified > 0) { Iterable<SkyKey> allModifiedKeys = Iterables.concat(changedWithoutNewValue, changedWithNewValue.keySet()); Iterable<SkyKey> trimmed = Iterables.limit(allModifiedKeys, MAX_NUMBER_OF_CHANGED_KEYS_TO_LOG); result.append(": ") .append(Joiner.on(", ").join(trimmed)); if (numModified > MAX_NUMBER_OF_CHANGED_KEYS_TO_LOG) { result.append(", ..."); } } logger.info(result.toString()); } private static int getNumberOfModifiedFiles(Iterable<SkyKey> modifiedValues) { // We are searching only for changed files, DirectoryListingValues don't depend on // child values, that's why they are invalidated separately return Iterables.size( Iterables.filter(modifiedValues, SkyFunctionName.functionIs(FileStateValue.FILE_STATE))); } /** * {@inheritDoc} * * <p>Necessary conditions to not store graph edges are either * * <ol> * <li>batch (since incremental builds are not possible) and discard_analysis_cache (since * otherwise user isn't concerned about saving memory this way). * <li>track_incremental_state set to false. * </ol> */ @Override public void decideKeepIncrementalState( boolean batch, boolean keepStateAfterBuild, boolean shouldTrackIncrementalState, boolean discardAnalysisCache, EventHandler eventHandler) { Preconditions.checkState(!active); boolean oldValueOfTrackIncrementalState = trackIncrementalState; // First check if the incrementality state should be kept around during the build. boolean explicitlyRequestedNoIncrementalData = !shouldTrackIncrementalState; boolean implicitlyRequestedNoIncrementalData = (batch && discardAnalysisCache); trackIncrementalState = !explicitlyRequestedNoIncrementalData && !implicitlyRequestedNoIncrementalData; if (explicitlyRequestedNoIncrementalData != implicitlyRequestedNoIncrementalData) { if (!explicitlyRequestedNoIncrementalData) { eventHandler.handle( Event.warn( "--batch and --discard_analysis_cache specified, but --notrack_incremental_state " + "not specified: incrementality data is implicitly discarded, but you may need" + " to specify --notrack_incremental_state in the future if you want to " + "maximize memory savings.")); } if (!batch && keepStateAfterBuild) { eventHandler.handle( Event.warn( "--notrack_incremental_state was specified, but without " + "--nokeep_state_after_build. Inmemory state from this build will not be " + "reusable, but it will not get fully wiped until the beginning of the next " + "build. Use --nokeep_state_after_build to clean up eagerly.")); } } // Now check if it is necessary to wipe the previous state. We do this if either the previous // or current incrementalStateRetentionStrategy requires the build to have been isolated. if (oldValueOfTrackIncrementalState != trackIncrementalState) { logger.info("Set incremental state to " + trackIncrementalState); evaluatorNeedsReset = true; } else if (!trackIncrementalState) { evaluatorNeedsReset = true; } } @Override public boolean tracksStateForIncrementality() { return trackIncrementalState; } @Override public void invalidateFilesUnderPathForTesting( ExtendedEventHandler eventHandler, ModifiedFileSet modifiedFileSet, Root pathEntry) throws InterruptedException { if (lastAnalysisDiscarded) { // Values were cleared last build, but they couldn't be deleted because they were needed for // the execution phase. We can delete them now. dropConfiguredTargetsNow(eventHandler); lastAnalysisDiscarded = false; } TimestampGranularityMonitor tsgm = this.tsgm.get(); Differencer.Diff diff; if (modifiedFileSet.treatEverythingAsModified()) { diff = new FilesystemValueChecker(tsgm, null).getDirtyKeys(memoizingEvaluator.getValues(), new BasicFilesystemDirtinessChecker()); } else { diff = getDiff(tsgm, modifiedFileSet.modifiedSourceFiles(), pathEntry); } syscalls.set(getPerBuildSyscallCache(/*concurrencyLevel=*/ 42)); recordingDiffer.invalidate(diff.changedKeysWithoutNewValues()); recordingDiffer.inject(diff.changedKeysWithNewValues()); // Blaze invalidates transient errors on every build. invalidateTransientErrors(); } @Override public void invalidateTransientErrors() { checkActive(); recordingDiffer.invalidateTransientErrors(); } @Override public void detectModifiedOutputFiles( ModifiedFileSet modifiedOutputFiles, @Nullable Range<Long> lastExecutionTimeRange) throws AbruptExitException, InterruptedException { // Detect external modifications in the output tree. FilesystemValueChecker fsvc = new FilesystemValueChecker(Preconditions.checkNotNull(tsgm.get()), lastExecutionTimeRange); BatchStat batchStatter = outputService == null ? null : outputService.getBatchStatter(); recordingDiffer.invalidate( fsvc.getDirtyActionValues( memoizingEvaluator.getValues(), batchStatter, modifiedOutputFiles)); modifiedFiles += fsvc.getNumberOfModifiedOutputFiles(); outputDirtyFiles += fsvc.getNumberOfModifiedOutputFiles(); modifiedFilesDuringPreviousBuild += fsvc.getNumberOfModifiedOutputFilesDuringPreviousBuild(); informAboutNumberOfModifiedFiles(); } private static ImmutableSet<SkyFunctionName> LOADING_TYPES = ImmutableSet.of( SkyFunctions.PACKAGE, SkyFunctions.SKYLARK_IMPORTS_LOOKUP, SkyFunctions.AST_FILE_LOOKUP, SkyFunctions.GLOB); /** * Save memory by removing references to configured targets and aspects in Skyframe. * * <p>These nodes must be recreated on subsequent builds. We do not clear the top-level target * nodes, since their configured targets are needed for the target completion middleman values. * * <p>The nodes are not deleted during this method call, because they are needed for the execution * phase. Instead, their analysis-time data is cleared while preserving the generating action info * needed for execution. The next build will delete the nodes (and recreate them if necessary). * * <p>If {@link #tracksStateForIncrementality} is false, then also delete loading-phase nodes (as * determined by {@link #LOADING_TYPES}) from the graph, since there will be no future builds to * use them for. */ private void discardAnalysisCache( Collection<ConfiguredTarget> topLevelTargets, Collection<AspectValue> topLevelAspects) { topLevelTargets = ImmutableSet.copyOf(topLevelTargets); topLevelAspects = ImmutableSet.copyOf(topLevelAspects); // This is to prevent throwing away Packages we may need during execution. ImmutableSet.Builder<PackageIdentifier> packageSetBuilder = ImmutableSet.builder(); packageSetBuilder.addAll( Collections2.transform( topLevelTargets, (target) -> target.getLabel().getPackageIdentifier())); packageSetBuilder.addAll( Collections2.transform( topLevelAspects, (aspect) -> aspect.getLabel().getPackageIdentifier())); ImmutableSet<PackageIdentifier> topLevelPackages = packageSetBuilder.build(); try (AutoProfiler p = AutoProfiler.logged("discarding analysis cache", logger)) { lastAnalysisDiscarded = true; Iterator<? extends Map.Entry<SkyKey, ? extends NodeEntry>> it = memoizingEvaluator.getGraphMap().entrySet().iterator(); while (it.hasNext()) { Map.Entry<SkyKey, ? extends NodeEntry> keyAndEntry = it.next(); NodeEntry entry = keyAndEntry.getValue(); if (entry == null || !entry.isDone()) { continue; } SkyKey key = keyAndEntry.getKey(); SkyFunctionName functionName = key.functionName(); // Keep packages for top-level targets and aspects in memory to get the target from later. if (functionName.equals(SkyFunctions.PACKAGE) && topLevelPackages.contains((key.argument()))) { continue; } if (!tracksStateForIncrementality() && LOADING_TYPES.contains(functionName)) { it.remove(); continue; } if (functionName.equals(SkyFunctions.CONFIGURED_TARGET)) { ConfiguredTargetValue ctValue; try { ctValue = (ConfiguredTargetValue) entry.getValue(); } catch (InterruptedException e) { throw new IllegalStateException("No interruption in sequenced evaluation", e); } // ctValue may be null if target was not successfully analyzed. if (ctValue != null) { ctValue.clear(!topLevelTargets.contains(ctValue.getConfiguredTarget())); } } else if (functionName.equals(SkyFunctions.ASPECT)) { AspectValue aspectValue; try { aspectValue = (AspectValue) entry.getValue(); } catch (InterruptedException e) { throw new IllegalStateException("No interruption in sequenced evaluation", e); } // value may be null if target was not successfully analyzed. if (aspectValue != null) { aspectValue.clear(!topLevelAspects.contains(aspectValue)); } } } } } @Override public void clearAnalysisCache( Collection<ConfiguredTarget> topLevelTargets, Collection<AspectValue> topLevelAspects) { discardAnalysisCache(topLevelTargets, topLevelAspects); } @Override public List<RuleStat> getRuleStats(ExtendedEventHandler eventHandler) { Map<String, RuleStat> ruleStats = new HashMap<>(); for (Map.Entry<SkyKey, ? extends NodeEntry> skyKeyAndNodeEntry : memoizingEvaluator.getGraphMap().entrySet()) { NodeEntry entry = skyKeyAndNodeEntry.getValue(); if (entry == null || !entry.isDone()) { continue; } SkyKey key = skyKeyAndNodeEntry.getKey(); SkyFunctionName functionName = key.functionName(); if (functionName.equals(SkyFunctions.CONFIGURED_TARGET)) { try { ConfiguredTargetValue ctValue = (ConfiguredTargetValue) entry.getValue(); ConfiguredTarget configuredTarget = ctValue.getConfiguredTarget(); if (configuredTarget instanceof RuleConfiguredTarget) { Rule rule; try { rule = (Rule) getPackageManager().getTarget(eventHandler, configuredTarget.getLabel()); } catch (NoSuchPackageException | NoSuchTargetException | InterruptedException e) { throw new IllegalStateException( "Failed to get Rule target from package when calculating stats.", e); } RuleConfiguredTarget ruleConfiguredTarget = (RuleConfiguredTarget) configuredTarget; RuleClass ruleClass = rule.getRuleClassObject(); RuleStat ruleStat = ruleStats.computeIfAbsent( ruleClass.getKey(), k -> new RuleStat(k, ruleClass.getName(), true)); ruleStat.addRule(ctValue.getNumActions()); } } catch (InterruptedException e) { throw new IllegalStateException("No interruption in sequenced evaluation", e); } } else if (functionName.equals(SkyFunctions.ASPECT)) { try { AspectValue aspectValue = (AspectValue) entry.getValue(); AspectClass aspectClass = aspectValue.getAspect().getAspectClass(); RuleStat ruleStat = ruleStats.computeIfAbsent( aspectClass.getKey(), k -> new RuleStat(k, aspectClass.getName(), false)); ruleStat.addRule(aspectValue.getNumActions()); } catch (InterruptedException e) { throw new IllegalStateException("No interruption in sequenced evaluation", e); } } } return new ArrayList<>(ruleStats.values()); } @Override public ActionGraphContainer getActionGraphContainer( List<String> actionGraphTargets, boolean includeActionCmdLine) throws CommandLineExpansionException { ActionGraphDump actionGraphDump = new ActionGraphDump(actionGraphTargets, includeActionCmdLine); for (Map.Entry<SkyKey, ? extends NodeEntry> skyKeyAndNodeEntry : memoizingEvaluator.getGraphMap().entrySet()) { NodeEntry entry = skyKeyAndNodeEntry.getValue(); SkyKey key = skyKeyAndNodeEntry.getKey(); SkyFunctionName functionName = key.functionName(); try { SkyValue skyValue = entry.getValue(); // The skyValue may be null in case analysis of the previous build failed. if (skyValue != null) { if (functionName.equals(SkyFunctions.CONFIGURED_TARGET)) { actionGraphDump.dumpConfiguredTarget((ConfiguredTargetValue) skyValue); } else if (functionName.equals(SkyFunctions.ASPECT)) { AspectValue aspectValue = (AspectValue) skyValue; AspectKey aspectKey = aspectValue.getKey(); ConfiguredTargetValue configuredTargetValue = (ConfiguredTargetValue) memoizingEvaluator.getExistingValue(aspectKey.getBaseConfiguredTargetKey()); actionGraphDump.dumpAspect(aspectValue, configuredTargetValue); } } } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IllegalStateException("No interruption in sequenced evaluation", e); } } return actionGraphDump.build(); } /** * In addition to calling the superclass method, deletes all ConfiguredTarget values from the * Skyframe cache. This is done to save memory (e.g. on a configuration change); since the * configuration is part of the key, these key/value pairs will be sitting around doing nothing * until the configuration changes back to the previous value. * * <p>The next evaluation will delete all invalid values. */ @Override public void handleConfiguredTargetChange() { super.handleConfiguredTargetChange(); memoizingEvaluator.delete( // We delete any value that can hold an action -- all subclasses of ActionLookupValue -- as // well as ActionExecutionValues, since they do not depend on ActionLookupValues. SkyFunctionName.functionIsIn(ImmutableSet.of( SkyFunctions.CONFIGURED_TARGET, SkyFunctions.BUILD_INFO, SkyFunctions.TARGET_COMPLETION, SkyFunctions.BUILD_INFO_COLLECTION, SkyFunctions.ACTION_EXECUTION)) ); } /** * Deletes all ConfiguredTarget values from the Skyframe cache. * * <p>After the execution of this method all invalidated and marked for deletion values (and the * values depending on them) will be deleted from the cache. * * <p>WARNING: Note that a call to this method leaves legacy data inconsistent with Skyframe. The * next build should clear the legacy caches. */ private void dropConfiguredTargetsNow(final ExtendedEventHandler eventHandler) { handleConfiguredTargetChange(); // Run the invalidator to actually delete the values. try { progressReceiver.ignoreInvalidations = true; Uninterruptibles.callUninterruptibly(new Callable<Void>() { @Override public Void call() throws InterruptedException { buildDriver.evaluate(ImmutableList.<SkyKey>of(), false, ResourceUsage.getAvailableProcessors(), eventHandler); return null; } }); } catch (Exception e) { throw new IllegalStateException(e); } finally { progressReceiver.ignoreInvalidations = false; } } @Override public void deleteOldNodes(long versionWindowForDirtyGc) { // TODO(bazel-team): perhaps we should come up with a separate GC class dedicated to maintaining // value garbage. If we ever do so, this logic should be moved there. memoizingEvaluator.deleteDirty(versionWindowForDirtyGc); } @Override public void dumpPackages(PrintStream out) { Iterable<SkyKey> packageSkyKeys = Iterables.filter(memoizingEvaluator.getValues().keySet(), SkyFunctions.isSkyFunction(SkyFunctions.PACKAGE)); out.println(Iterables.size(packageSkyKeys) + " packages"); for (SkyKey packageSkyKey : packageSkyKeys) { Package pkg = ((PackageValue) memoizingEvaluator.getValues().get(packageSkyKey)).getPackage(); pkg.dump(out); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.s3a.auth; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import com.google.common.collect.Lists; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import static org.apache.hadoop.fs.s3a.auth.RoleModel.*; /** * Operations, statements and policies covering the operations * needed to work with S3 and S3Guard. */ @InterfaceAudience.LimitedPrivate("Tests") @InterfaceStability.Unstable public final class RolePolicies { private RolePolicies() { } /** All KMS operations: {@value}.*/ public static final String KMS_ALL_OPERATIONS = "kms:*"; /** KMS encryption. This is <i>Not</i> used by SSE-KMS: {@value}. */ public static final String KMS_ENCRYPT = "kms:Encrypt"; /** * Decrypt data encrypted with SSE-KMS: {@value}. */ public static final String KMS_DECRYPT = "kms:Decrypt"; /** * Arn for all KMS keys: {@value}. */ public static final String KMS_ALL_KEYS = "*"; /** * This is used by S3 to generate a per-object encryption key and * the encrypted value of this, the latter being what it tags * the object with for later decryption: {@value}. */ public static final String KMS_GENERATE_DATA_KEY = "kms:GenerateDataKey"; /** * Actions needed to read and write SSE-KMS data. */ private static final String[] KMS_KEY_RW = new String[]{KMS_DECRYPT, KMS_GENERATE_DATA_KEY, KMS_ENCRYPT}; /** * Actions needed to read SSE-KMS data. */ private static final String[] KMS_KEY_READ = new String[] {KMS_DECRYPT}; /** * Statement to allow KMS R/W access access, so full use of * SSE-KMS. */ public static final Statement STATEMENT_ALLOW_SSE_KMS_RW = statement(true, KMS_ALL_KEYS, KMS_ALL_OPERATIONS); /** * Statement to allow read access to KMS keys, so the ability * to read SSE-KMS data,, but not decrypt it. */ public static final Statement STATEMENT_ALLOW_SSE_KMS_READ = statement(true, KMS_ALL_KEYS, KMS_KEY_READ); /** * All S3 operations: {@value}. */ public static final String S3_ALL_OPERATIONS = "s3:*"; /** * All S3 buckets: {@value}. */ public static final String S3_ALL_BUCKETS = "arn:aws:s3:::*"; /** * All bucket list operations, including * {@link #S3_BUCKET_LIST_BUCKET} and * {@link #S3_BUCKET_LIST_MULTIPART_UPLOADS}. */ public static final String S3_BUCKET_ALL_LIST = "s3:ListBucket*"; /** * List the contents of a bucket. * It applies to a bucket, not to a path in a bucket. */ public static final String S3_BUCKET_LIST_BUCKET = "s3:ListBucket"; /** * This is used by the abort operation in S3A commit work. * It applies to a bucket, not to a path in a bucket. */ public static final String S3_BUCKET_LIST_MULTIPART_UPLOADS = "s3:ListBucketMultipartUploads"; /** * List multipart upload is needed for the S3A Commit protocols. * It applies to a path in a bucket. */ public static final String S3_LIST_MULTIPART_UPLOAD_PARTS = "s3:ListMultipartUploadParts"; /** * Abort multipart upload is needed for the S3A Commit protocols. * It applies to a path in a bucket. */ public static final String S3_ABORT_MULTIPART_UPLOAD = "s3:AbortMultipartUpload"; /** * All s3:Delete* operations. */ public static final String S3_ALL_DELETE = "s3:Delete*"; public static final String S3_DELETE_OBJECT = "s3:DeleteObject"; public static final String S3_DELETE_OBJECT_TAGGING = "s3:DeleteObjectTagging"; public static final String S3_DELETE_OBJECT_VERSION = "s3:DeleteObjectVersion"; public static final String S3_DELETE_OBJECT_VERSION_TAGGING = "s3:DeleteObjectVersionTagging"; /** * All s3:Get* operations. */ public static final String S3_ALL_GET = "s3:Get*"; public static final String S3_GET_OBJECT = "s3:GetObject"; public static final String S3_GET_OBJECT_ACL = "s3:GetObjectAcl"; public static final String S3_GET_OBJECT_TAGGING = "s3:GetObjectTagging"; public static final String S3_GET_OBJECT_TORRENT = "s3:GetObjectTorrent"; public static final String S3_GET_OBJECT_VERSION = "s3:GetObjectVersion"; public static final String S3_GET_BUCKET_LOCATION = "s3:GetBucketLocation"; public static final String S3_GET_OBJECT_VERSION_ACL = "s3:GetObjectVersionAcl"; public static final String S3_GET_OBJECT_VERSION_TAGGING = "s3:GetObjectVersionTagging"; public static final String S3_GET_OBJECT_VERSION_TORRENT = "s3:GetObjectVersionTorrent"; /** * S3 Put*. * This covers single an multipart uploads, but not list/abort of the latter. */ public static final String S3_ALL_PUT = "s3:Put*"; public static final String S3_PUT_OBJECT = "s3:PutObject"; public static final String S3_PUT_OBJECT_ACL = "s3:PutObjectAcl"; public static final String S3_PUT_OBJECT_TAGGING = "s3:PutObjectTagging"; public static final String S3_PUT_OBJECT_VERSION_ACL = "s3:PutObjectVersionAcl"; public static final String S3_PUT_OBJECT_VERSION_TAGGING = "s3:PutObjectVersionTagging"; public static final String S3_RESTORE_OBJECT = "s3:RestoreObject"; /** * Actions needed to read a file in S3 through S3A, excluding * S3Guard and SSE-KMS. */ private static final String[] S3_PATH_READ_OPERATIONS = new String[]{ S3_GET_OBJECT, }; /** * Base actions needed to read data from S3 through S3A, * excluding: * <ol> * <li>bucket-level operations</li> * <li>SSE-KMS key operations</li> * <li>DynamoDB operations for S3Guard.</li> * </ol> * As this excludes the bucket list operations, it is not sufficient * to read from a bucket on its own. */ private static final String[] S3_ROOT_READ_OPERATIONS = new String[]{ S3_ALL_GET, }; public static final List<String> S3_ROOT_READ_OPERATIONS_LIST = Collections.unmodifiableList(Arrays.asList(S3_ALL_GET)); /** * Policies which can be applied to bucket resources for read operations. * <ol> * <li>SSE-KMS key operations</li> * <li>DynamoDB operations for S3Guard.</li> * </ol> */ public static final String[] S3_BUCKET_READ_OPERATIONS = new String[]{ S3_ALL_GET, S3_BUCKET_ALL_LIST, }; /** * Actions needed to write data to an S3A Path. * This includes the appropriate read operations, but * not SSE-KMS or S3Guard support. */ public static final List<String> S3_PATH_RW_OPERATIONS = Collections.unmodifiableList(Arrays.asList(new String[]{ S3_ALL_GET, S3_PUT_OBJECT, S3_DELETE_OBJECT, S3_ABORT_MULTIPART_UPLOAD, })); /** * Actions needed to write data to an S3A Path. * This is purely the extra operations needed for writing atop * of the read operation set. * Deny these and a path is still readable, but not writeable. * Excludes: bucket-ARN, SSE-KMS and S3Guard permissions. */ public static final List<String> S3_PATH_WRITE_OPERATIONS = Collections.unmodifiableList(Arrays.asList(new String[]{ S3_PUT_OBJECT, S3_DELETE_OBJECT, S3_ABORT_MULTIPART_UPLOAD })); /** * Actions needed for R/W IO from the root of a bucket. * Excludes: bucket-ARN, SSE-KMS and S3Guard permissions. */ public static final List<String> S3_ROOT_RW_OPERATIONS = Collections.unmodifiableList(Arrays.asList(new String[]{ S3_ALL_GET, S3_PUT_OBJECT, S3_DELETE_OBJECT, S3_ABORT_MULTIPART_UPLOAD, })); /** * All DynamoDB operations: {@value}. */ public static final String DDB_ALL_OPERATIONS = "dynamodb:*"; /** * Operations needed for DDB/S3Guard Admin. * For now: make this {@link #DDB_ALL_OPERATIONS}. */ public static final String DDB_ADMIN = DDB_ALL_OPERATIONS; /** * Permission for DDB describeTable() operation: {@value}. * This is used during initialization. */ public static final String DDB_DESCRIBE_TABLE = "dynamodb:DescribeTable"; /** * Permission to query the DDB table: {@value}. */ public static final String DDB_QUERY = "dynamodb:Query"; /** * Permission for DDB operation to get a record: {@value}. */ public static final String DDB_GET_ITEM = "dynamodb:GetItem"; /** * Permission for DDB write record operation: {@value}. */ public static final String DDB_PUT_ITEM = "dynamodb:PutItem"; /** * Permission for DDB update single item operation: {@value}. */ public static final String DDB_UPDATE_ITEM = "dynamodb:UpdateItem"; /** * Permission for DDB delete operation: {@value}. */ public static final String DDB_DELETE_ITEM = "dynamodb:DeleteItem"; /** * Permission for DDB operation: {@value}. */ public static final String DDB_BATCH_GET_ITEM = "dynamodb:BatchGetItem"; /** * Batch write permission for DDB: {@value}. */ public static final String DDB_BATCH_WRITE_ITEM = "dynamodb:BatchWriteItem"; /** * All DynamoDB tables: {@value}. */ public static final String ALL_DDB_TABLES = "arn:aws:dynamodb:*"; /** * Statement to allow all DDB access. */ public static final Statement STATEMENT_ALL_DDB = allowAllDynamoDBOperations(ALL_DDB_TABLES); /** * Statement to allow all client operations needed for S3Guard, * but none of the admin operations. */ public static final Statement STATEMENT_S3GUARD_CLIENT = allowS3GuardClientOperations(ALL_DDB_TABLES); /** * Allow all S3 Operations. * This does not cover DDB or S3-KMS */ public static final Statement STATEMENT_ALL_S3 = statement(true, S3_ALL_BUCKETS, S3_ALL_OPERATIONS); /** * The s3:GetBucketLocation permission is for all buckets, not for * any named bucket, which complicates permissions. */ public static final Statement STATEMENT_ALL_S3_GET_BUCKET_LOCATION = statement(true, S3_ALL_BUCKETS, S3_GET_BUCKET_LOCATION); /** * Policy for all S3 and S3Guard operations, and SSE-KMS. */ public static final Policy ALLOW_S3_AND_SGUARD = policy( STATEMENT_ALL_S3, STATEMENT_ALL_DDB, STATEMENT_ALLOW_SSE_KMS_RW, STATEMENT_ALL_S3_GET_BUCKET_LOCATION ); public static Statement allowS3GuardClientOperations(String tableArn) { return statement(true, tableArn, DDB_BATCH_GET_ITEM, DDB_BATCH_WRITE_ITEM, DDB_DELETE_ITEM, DDB_DESCRIBE_TABLE, DDB_GET_ITEM, DDB_PUT_ITEM, DDB_QUERY, DDB_UPDATE_ITEM ); } public static Statement allowAllDynamoDBOperations(String tableArn) { return statement(true, tableArn, DDB_ALL_OPERATIONS); } /** * From an S3 bucket name, build an ARN to refer to it. * @param bucket bucket name. * @param write are write permissions required * @return return statement granting access. */ public static List<Statement> allowS3Operations(String bucket, boolean write) { // add the bucket operations for the specific bucket ARN ArrayList<Statement> statements = Lists.newArrayList( statement(true, bucketToArn(bucket), S3_GET_BUCKET_LOCATION, S3_BUCKET_ALL_LIST)); // then add the statements for objects in the buckets if (write) { statements.add( statement(true, bucketObjectsToArn(bucket), S3_ROOT_RW_OPERATIONS)); } else { statements.add( statement(true, bucketObjectsToArn(bucket), S3_ROOT_READ_OPERATIONS_LIST)); } return statements; } /** * From an S3 bucket name, build an ARN to refer to all objects in * it. * @param bucket bucket name. * @return return the ARN to use in statements. */ public static String bucketObjectsToArn(String bucket) { return String.format("arn:aws:s3:::%s/*", bucket); } /** * From an S3 bucket name, build an ARN to refer to it. * @param bucket bucket name. * @return return the ARN to use in statements. */ public static String bucketToArn(String bucket) { return String.format("arn:aws:s3:::%s", bucket); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * DeleteDhcpOptions.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT) */ package com.amazon.ec2; /** * DeleteDhcpOptions bean class */ public class DeleteDhcpOptions implements org.apache.axis2.databinding.ADBBean{ public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName( "http://ec2.amazonaws.com/doc/2010-11-15/", "DeleteDhcpOptions", "ns1"); private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2010-11-15/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for DeleteDhcpOptions */ protected com.amazon.ec2.DeleteDhcpOptionsType localDeleteDhcpOptions ; /** * Auto generated getter method * @return com.amazon.ec2.DeleteDhcpOptionsType */ public com.amazon.ec2.DeleteDhcpOptionsType getDeleteDhcpOptions(){ return localDeleteDhcpOptions; } /** * Auto generated setter method * @param param DeleteDhcpOptions */ public void setDeleteDhcpOptions(com.amazon.ec2.DeleteDhcpOptionsType param){ this.localDeleteDhcpOptions=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,MY_QNAME){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { DeleteDhcpOptions.this.serialize(MY_QNAME,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( MY_QNAME,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ //We can safely assume an element has only one type associated with it if (localDeleteDhcpOptions==null){ throw new org.apache.axis2.databinding.ADBException("Property cannot be null!"); } localDeleteDhcpOptions.serialize(MY_QNAME,factory,xmlWriter); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ //We can safely assume an element has only one type associated with it return localDeleteDhcpOptions.getPullParser(MY_QNAME); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static DeleteDhcpOptions parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ DeleteDhcpOptions object = new DeleteDhcpOptions(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); while(!reader.isEndElement()) { if (reader.isStartElement() ){ if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","DeleteDhcpOptions").equals(reader.getName())){ object.setDeleteDhcpOptions(com.amazon.ec2.DeleteDhcpOptionsType.Factory.parse(reader)); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } } else { reader.next(); } } // end of while loop } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.server.protocol.http; import java.io.IOException; import java.nio.channels.Channel; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import org.xnio.ChannelExceptionHandler; import org.xnio.ChannelListener; import org.xnio.ChannelListeners; import org.xnio.channels.StreamSinkChannel; import io.undertow.UndertowMessages; import io.undertow.io.IoCallback; import io.undertow.server.HttpHandler; import io.undertow.server.HttpServerExchange; import io.undertow.util.AttachmentKey; import io.undertow.util.HeaderMap; import io.undertow.util.Headers; import io.undertow.util.HttpString; import io.undertow.util.Protocols; import io.undertow.util.StatusCodes; /** * Class that provides support for dealing with HTTP 100 (Continue) responses. * <p> * Note that if a client is pipelining some requests and sending continue for others this * could cause problems if the pipelining buffer is enabled. * * @author Stuart Douglas */ public class HttpContinue { private static final Set<HttpString> COMPATIBLE_PROTOCOLS; static { Set<HttpString> compat = new HashSet<>(); compat.add(Protocols.HTTP_1_1); compat.add(Protocols.HTTP_2_0); COMPATIBLE_PROTOCOLS = Collections.unmodifiableSet(compat); } public static final String CONTINUE = "100-continue"; private static final AttachmentKey<Boolean> ALREADY_SENT = AttachmentKey.create(Boolean.class); /** * Returns true if this exchange requires the server to send a 100 (Continue) response. * * @param exchange The exchange * @return <code>true</code> if the server needs to send a continue response */ public static boolean requiresContinueResponse(final HttpServerExchange exchange) { if (!COMPATIBLE_PROTOCOLS.contains(exchange.getProtocol()) || exchange.isResponseStarted() || !exchange.getConnection().isContinueResponseSupported() || exchange.getAttachment(ALREADY_SENT) != null) { return false; } if(exchange.getRequestContentLength() == 0) { return false; } if (exchange.getConnection() instanceof HttpServerConnection) { if (((HttpServerConnection) exchange.getConnection()).getExtraBytes() != null) { //we have already received some of the request body //so according to the RFC we do not need to send the Continue return false; } } HeaderMap requestHeaders = exchange.getRequestHeaders(); return requiresContinueResponse(requestHeaders); } public static boolean requiresContinueResponse(HeaderMap requestHeaders) { List<String> expect = requestHeaders.get(Headers.EXPECT); if (expect != null) { for (String header : expect) { if (header.equalsIgnoreCase(CONTINUE)) { return true; } } } return false; } /** * Sends a continuation using async IO, and calls back when it is complete. * * @param exchange The exchange * @param callback The completion callback */ public static void sendContinueResponse(final HttpServerExchange exchange, final IoCallback callback) { if (!exchange.isResponseChannelAvailable()) { callback.onException(exchange, null, UndertowMessages.MESSAGES.cannotSendContinueResponse()); return; } internalSendContinueResponse(exchange, callback); } /** * Creates a response sender that can be used to send a HTTP 100-continue response. * * @param exchange The exchange * @return The response sender */ public static ContinueResponseSender createResponseSender(final HttpServerExchange exchange) throws IOException { if (!exchange.isResponseChannelAvailable()) { throw UndertowMessages.MESSAGES.cannotSendContinueResponse(); } if(exchange.getAttachment(ALREADY_SENT) != null) { return new ContinueResponseSender() { @Override public boolean send() throws IOException { return true; } @Override public void awaitWritable() throws IOException { } @Override public void awaitWritable(long time, TimeUnit timeUnit) throws IOException { } }; } HttpServerExchange newExchange = exchange.getConnection().sendOutOfBandResponse(exchange); exchange.putAttachment(ALREADY_SENT, true); newExchange.setStatusCode(StatusCodes.CONTINUE); newExchange.getResponseHeaders().put(Headers.CONTENT_LENGTH, 0); final StreamSinkChannel responseChannel = newExchange.getResponseChannel(); return new ContinueResponseSender() { boolean shutdown = false; @Override public boolean send() throws IOException { if (!shutdown) { shutdown = true; responseChannel.shutdownWrites(); } return responseChannel.flush(); } @Override public void awaitWritable() throws IOException { responseChannel.awaitWritable(); } @Override public void awaitWritable(final long time, final TimeUnit timeUnit) throws IOException { responseChannel.awaitWritable(time, timeUnit); } }; } /** * Sends a continue response using blocking IO * * @param exchange The exchange */ public static void sendContinueResponseBlocking(final HttpServerExchange exchange) throws IOException { if (!exchange.isResponseChannelAvailable()) { throw UndertowMessages.MESSAGES.cannotSendContinueResponse(); } if(exchange.getAttachment(ALREADY_SENT) != null) { return; } HttpServerExchange newExchange = exchange.getConnection().sendOutOfBandResponse(exchange); exchange.putAttachment(ALREADY_SENT, true); newExchange.setStatusCode(StatusCodes.CONTINUE); newExchange.getResponseHeaders().put(Headers.CONTENT_LENGTH, 0); newExchange.startBlocking(); newExchange.getOutputStream().close(); newExchange.getInputStream().close(); } /** * Sets a 417 response code and ends the exchange. * * @param exchange The exchange to reject */ public static void rejectExchange(final HttpServerExchange exchange) { exchange.setStatusCode(StatusCodes.EXPECTATION_FAILED); exchange.setPersistent(false); exchange.endExchange(); } private static void internalSendContinueResponse(final HttpServerExchange exchange, final IoCallback callback) { if(exchange.getAttachment(ALREADY_SENT) != null) { callback.onComplete(exchange, null); return; } HttpServerExchange newExchange = exchange.getConnection().sendOutOfBandResponse(exchange); exchange.putAttachment(ALREADY_SENT, true); newExchange.setStatusCode(StatusCodes.CONTINUE); newExchange.getResponseHeaders().put(Headers.CONTENT_LENGTH, 0); final StreamSinkChannel responseChannel = newExchange.getResponseChannel(); try { responseChannel.shutdownWrites(); if (!responseChannel.flush()) { responseChannel.getWriteSetter().set(ChannelListeners.flushingChannelListener( new ChannelListener<StreamSinkChannel>() { @Override public void handleEvent(StreamSinkChannel channel) { channel.suspendWrites(); exchange.dispatch(new HttpHandler() { @Override public void handleRequest(HttpServerExchange exchange) throws Exception { callback.onComplete(exchange, null); } }); } }, new ChannelExceptionHandler<Channel>() { @Override public void handleException(Channel channel, final IOException e) { exchange.dispatch(new HttpHandler() { @Override public void handleRequest(HttpServerExchange exchange) throws Exception { callback.onException(exchange, null, e); } }); } })); responseChannel.resumeWrites(); exchange.dispatch(); } else { callback.onComplete(exchange, null); } } catch (IOException e) { callback.onException(exchange, null, e); } } /** * A continue response that is in the process of being sent. */ public interface ContinueResponseSender { /** * Continue sending the response. * * @return true if the response is fully sent, false otherwise. */ boolean send() throws IOException; void awaitWritable() throws IOException; void awaitWritable(long time, final TimeUnit timeUnit) throws IOException; } }
/* * JBoss, Home of Professional Open Source * Copyright 2013, Red Hat, Inc. and/or its affiliates, and individual * contributors by the @authors tag. See the copyright.txt in the * distribution for a full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.arquillian.extension.recorder.video; import java.io.File; import org.arquillian.extension.recorder.Configuration; import org.arquillian.recorder.reporter.ReporterConfiguration; /** * Video configuration for every recorder extension implementation. * * @author <a href="mailto:smikloso@redhat.com">Stefan Miklosovic</a> * */ public class VideoConfiguration extends Configuration<VideoConfiguration> { private String rootDir = "target/videos"; private String videoType = VideoType.MP4.name(); private String startBeforeTest = "false"; private String startBeforeClass = "false"; private String startBeforeSuite = "false"; private String takeOnlyOnFail = "true"; private String videoName = "record"; private String testTimeout = "1800"; // 30 minutes private String width = "0"; // pixels private String height = "0"; // pixels. private ReporterConfiguration reporterConfiguration; public VideoConfiguration(ReporterConfiguration reporterConfiguration) { this.reporterConfiguration = reporterConfiguration; } /** * By default set to true * * @return true if video should be taken only for the failed tests */ public boolean getTakeOnlyOnFail() { return Boolean.parseBoolean(getProperty("takeOnlyOnFail", takeOnlyOnFail)); } /** * By default set to 1800, i.e. 30 minutes * * @return timeout for each test in order to stop recording and save the video file if the test gets stuck */ public int getTestTimeout() { return Integer.parseInt(getProperty("testTimeout", testTimeout)); } /** * By default set to "record" * * @return video name which will be used in case you want to record whole test suite into one file. */ public String getVideoName() { return getProperty("videoName", videoName); } /** * By default set to "target/videos" * * @return root folder where all videos will be placed. Directory structure is left on the extension itself. */ public File getRootDir() { return new File(getProperty("rootDir", rootDir)); } /** * By default set to "MP4". * * @return type of video we want our screenshots to be of */ public String getVideoType() { return getProperty("videoType", videoType).toUpperCase(); } /** * By default set to false. * * @return true if video recording should start before test, false otherwise */ public boolean getStartBeforeTest() { return Boolean.parseBoolean(getProperty("startBeforeTest", startBeforeTest)); } /** * By default set to false. * * @return true if video should be taken before class, false otherwise */ public boolean getStartBeforeClass() { return Boolean.parseBoolean(getProperty("startBeforeClass", startBeforeClass)); } /** * By default set to false. * * @return true if screenshot should be taken before suite, false otherwise */ public boolean getStartBeforeSuite() { return Boolean.parseBoolean(getProperty("startBeforeSuite", startBeforeSuite)); } /** * By default set to 0, you have to set this in configuration in order to use it. * * @return height of recorded video */ public int getHeight() { return Integer.parseInt(getProperty("height", height)); } /** * By default set to 0, you have to set this in configuration in order to use it. * * @return width of recorder video */ public int getWidth() { return Integer.parseInt(getProperty("width", width)); } @Override public void validate() throws VideoConfigurationException { validate(reporterConfiguration); } private void validate(ReporterConfiguration reporterConfiguration) { try { VideoType.valueOf(VideoType.class, getVideoType()); } catch (IllegalArgumentException ex) { throw new VideoConfigurationException( "Video type you specified in arquillian.xml is not valid video type." + "Supported video types are: " + VideoType.getAll()); } final String report = reporterConfiguration.getReport().toLowerCase(); if (report.contains("htm") || report.contains("ad") || report.equals("asciidoc")) { final File recorderRootDir = new File(reporterConfiguration.getRootDir(), "videos"); if (!getRootDir().equals(recorderRootDir)) { setProperty("rootDir", recorderRootDir.getAbsolutePath()); } } try { if (!getRootDir().exists()) { boolean created = getRootDir().mkdir(); if (!created) { throw new VideoConfigurationException("Unable to create root directory " + getRootDir().getAbsolutePath()); } } else { if (!getRootDir().isDirectory()) { throw new VideoConfigurationException("Root directory you specified is not a directory - " + getRootDir().getAbsolutePath()); } if (!getRootDir().canWrite()) { throw new VideoConfigurationException( "You can not write to '" + getRootDir().getAbsolutePath() + "'."); } } } catch (SecurityException ex) { throw new VideoConfigurationException( "You are not permitted to operate on specified resource: " + getRootDir().getAbsolutePath() + "'."); } try { if (Integer.parseInt(getProperty("width", this.width)) < 0) { throw new VideoConfigurationException("It seems you have set width of video to be lower then 0."); } } catch (NumberFormatException ex) { throw new VideoConfigurationException("Provided width of video is not recognized to be an integer number."); } try { if (Integer.parseInt(getProperty("height", this.height)) < 0) { throw new VideoConfigurationException("It seems you have set height of video to be lower then 0."); } } catch (NumberFormatException ex) { throw new VideoConfigurationException("Provided height of video is not recognized to be an integer number."); } } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(String.format("%-40s %s\n", "startBeforeSuite", getStartBeforeSuite())); sb.append(String.format("%-40s %s\n", "startBeforeClass", getStartBeforeClass())); sb.append(String.format("%-40s %s\n", "startBeforeTest", getStartBeforeTest())); sb.append(String.format("%-40s %s\n", "takeOnlyOnFail", getTakeOnlyOnFail())); sb.append(String.format("%-40s %s\n", "testTimeOut", getTestTimeout())); sb.append(String.format("%-40s %s\n", "rootDir", getRootDir())); sb.append(String.format("%-40s %s\n", "videoName", getVideoName())); sb.append(String.format("%-40s %s\n", "videoType", getVideoType())); sb.append(String.format("%-40s %s\n", "width", getWidth())); sb.append(String.format("%-40s %s\n", "height", getHeight())); return sb.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /* * This package is based on the work done by Keiron Liddle, Aftex Software * <keiron@aftexsw.com> to whom the Ant project is very grateful for his * great code. */ package org.apache.commons.compress.compressors.bzip2; import java.io.IOException; import java.io.InputStream; import org.apache.commons.compress.compressors.CompressorInputStream; /** * An input stream that decompresses from the BZip2 format to be read as any other stream. * * @NotThreadSafe */ public class BZip2CompressorInputStream extends CompressorInputStream implements BZip2Constants { /** * Index of the last char in the block, so the block size == last + 1. */ private int last; /** * Index in zptr[] of original string after sorting. */ private int origPtr; /** * always: in the range 0 .. 9. The current block size is 100000 * this * number. */ private int blockSize100k; private boolean blockRandomised; private int bsBuff; private int bsLive; private final CRC crc = new CRC(); private int nInUse; private InputStream in; private final boolean decompressConcatenated; private static final int EOF = 0; private static final int START_BLOCK_STATE = 1; private static final int RAND_PART_A_STATE = 2; private static final int RAND_PART_B_STATE = 3; private static final int RAND_PART_C_STATE = 4; private static final int NO_RAND_PART_A_STATE = 5; private static final int NO_RAND_PART_B_STATE = 6; private static final int NO_RAND_PART_C_STATE = 7; private int currentState = START_BLOCK_STATE; private int storedBlockCRC, storedCombinedCRC; private int computedBlockCRC, computedCombinedCRC; // Variables used by setup* methods exclusively private int su_count; private int su_ch2; private int su_chPrev; private int su_i2; private int su_j2; private int su_rNToGo; private int su_rTPos; private int su_tPos; private char su_z; /** * All memory intensive stuff. This field is initialized by initBlock(). */ private BZip2CompressorInputStream.Data data; /** * Constructs a new BZip2CompressorInputStream which decompresses bytes * read from the specified stream. This doesn't suppprt decompressing * concatenated .bz2 files. * * @param in the InputStream from which this object should be created * @throws IOException * if the stream content is malformed or an I/O error occurs. * @throws NullPointerException * if {@code in == null} */ public BZip2CompressorInputStream(final InputStream in) throws IOException { this(in, false); } /** * Constructs a new BZip2CompressorInputStream which decompresses bytes * read from the specified stream. * * @param in the InputStream from which this object should be created * @param decompressConcatenated * if true, decompress until the end of the input; * if false, stop after the first .bz2 stream and * leave the input position to point to the next * byte after the .bz2 stream * * @throws IOException * if {@code in == null}, the stream content is malformed, or an I/O error occurs. */ public BZip2CompressorInputStream(final InputStream in, final boolean decompressConcatenated) throws IOException { this.in = in; this.decompressConcatenated = decompressConcatenated; init(true); initBlock(); } @Override public int read() throws IOException { if (this.in != null) { final int r = read0(); count(r < 0 ? -1 : 1); return r; } throw new IOException("stream closed"); } /* * (non-Javadoc) * * @see java.io.InputStream#read(byte[], int, int) */ @Override public int read(final byte[] dest, final int offs, final int len) throws IOException { if (offs < 0) { throw new IndexOutOfBoundsException("offs(" + offs + ") < 0."); } if (len < 0) { throw new IndexOutOfBoundsException("len(" + len + ") < 0."); } if (offs + len > dest.length) { throw new IndexOutOfBoundsException("offs(" + offs + ") + len(" + len + ") > dest.length(" + dest.length + ")."); } if (this.in == null) { throw new IOException("stream closed"); } if (len == 0) { return 0; } final int hi = offs + len; int destOffs = offs; int b; while (destOffs < hi && ((b = read0()) >= 0)) { dest[destOffs++] = (byte) b; count(1); } final int c = (destOffs == offs) ? -1 : (destOffs - offs); return c; } private void makeMaps() { final boolean[] inUse = this.data.inUse; final byte[] seqToUnseq = this.data.seqToUnseq; int nInUseShadow = 0; for (int i = 0; i < 256; i++) { if (inUse[i]) { seqToUnseq[nInUseShadow++] = (byte) i; } } this.nInUse = nInUseShadow; } private int read0() throws IOException { switch (currentState) { case EOF: return -1; case START_BLOCK_STATE: return setupBlock(); case RAND_PART_A_STATE: throw new IllegalStateException(); case RAND_PART_B_STATE: return setupRandPartB(); case RAND_PART_C_STATE: return setupRandPartC(); case NO_RAND_PART_A_STATE: throw new IllegalStateException(); case NO_RAND_PART_B_STATE: return setupNoRandPartB(); case NO_RAND_PART_C_STATE: return setupNoRandPartC(); default: throw new IllegalStateException(); } } private boolean init(final boolean isFirstStream) throws IOException { if (null == in) { throw new IOException("No InputStream"); } final int magic0 = this.in.read(); if (magic0 == -1 && !isFirstStream) { return false; } final int magic1 = this.in.read(); final int magic2 = this.in.read(); if (magic0 != 'B' || magic1 != 'Z' || magic2 != 'h') { throw new IOException(isFirstStream ? "Stream is not in the BZip2 format" : "Garbage after a valid BZip2 stream"); } final int blockSize = this.in.read(); if ((blockSize < '1') || (blockSize > '9')) { throw new IOException("BZip2 block size is invalid"); } this.blockSize100k = blockSize - '0'; this.bsLive = 0; this.computedCombinedCRC = 0; return true; } private void initBlock() throws IOException { char magic0; char magic1; char magic2; char magic3; char magic4; char magic5; while (true) { // Get the block magic bytes. magic0 = bsGetUByte(); magic1 = bsGetUByte(); magic2 = bsGetUByte(); magic3 = bsGetUByte(); magic4 = bsGetUByte(); magic5 = bsGetUByte(); // If isn't end of stream magic, break out of the loop. if (magic0 != 0x17 || magic1 != 0x72 || magic2 != 0x45 || magic3 != 0x38 || magic4 != 0x50 || magic5 != 0x90) { break; } // End of stream was reached. Check the combined CRC and // advance to the next .bz2 stream if decoding concatenated // streams. if (complete()) { return; } } if (magic0 != 0x31 || // '1' magic1 != 0x41 || // ')' magic2 != 0x59 || // 'Y' magic3 != 0x26 || // '&' magic4 != 0x53 || // 'S' magic5 != 0x59 // 'Y' ) { this.currentState = EOF; throw new IOException("bad block header"); } this.storedBlockCRC = bsGetInt(); this.blockRandomised = bsR(1) == 1; /** * Allocate data here instead in constructor, so we do not allocate * it if the input file is empty. */ if (this.data == null) { this.data = new Data(this.blockSize100k); } // currBlockNo++; getAndMoveToFrontDecode(); this.crc.initialiseCRC(); this.currentState = START_BLOCK_STATE; } private void endBlock() throws IOException { this.computedBlockCRC = this.crc.getFinalCRC(); // A bad CRC is considered a fatal error. if (this.storedBlockCRC != this.computedBlockCRC) { // make next blocks readable without error // (repair feature, not yet documented, not tested) this.computedCombinedCRC = (this.storedCombinedCRC << 1) | (this.storedCombinedCRC >>> 31); this.computedCombinedCRC ^= this.storedBlockCRC; throw new IOException("BZip2 CRC error"); } this.computedCombinedCRC = (this.computedCombinedCRC << 1) | (this.computedCombinedCRC >>> 31); this.computedCombinedCRC ^= this.computedBlockCRC; } private boolean complete() throws IOException { this.storedCombinedCRC = bsGetInt(); this.currentState = EOF; this.data = null; if (this.storedCombinedCRC != this.computedCombinedCRC) { throw new IOException("BZip2 CRC error"); } // Look for the next .bz2 stream if decompressing // concatenated files. return !decompressConcatenated || !init(false); } @Override public void close() throws IOException { final InputStream inShadow = this.in; if (inShadow != null) { try { if (inShadow != System.in) { inShadow.close(); } } finally { this.data = null; this.in = null; } } } private int bsR(final int n) throws IOException { int bsLiveShadow = this.bsLive; int bsBuffShadow = this.bsBuff; if (bsLiveShadow < n) { final InputStream inShadow = this.in; do { final int thech = inShadow.read(); if (thech < 0) { throw new IOException("unexpected end of stream"); } bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; } while (bsLiveShadow < n); this.bsBuff = bsBuffShadow; } this.bsLive = bsLiveShadow - n; return (bsBuffShadow >> (bsLiveShadow - n)) & ((1 << n) - 1); } private boolean bsGetBit() throws IOException { return bsR(1) != 0; } private char bsGetUByte() throws IOException { return (char) bsR(8); } private int bsGetInt() throws IOException { return (((((bsR(8) << 8) | bsR(8)) << 8) | bsR(8)) << 8) | bsR(8); } /** * Called by createHuffmanDecodingTables() exclusively. */ private static void hbCreateDecodeTables(final int[] limit, final int[] base, final int[] perm, final char[] length, final int minLen, final int maxLen, final int alphaSize) { for (int i = minLen, pp = 0; i <= maxLen; i++) { for (int j = 0; j < alphaSize; j++) { if (length[j] == i) { perm[pp++] = j; } } } for (int i = MAX_CODE_LEN; --i > 0;) { base[i] = 0; limit[i] = 0; } for (int i = 0; i < alphaSize; i++) { base[length[i] + 1]++; } for (int i = 1, b = base[0]; i < MAX_CODE_LEN; i++) { b += base[i]; base[i] = b; } for (int i = minLen, vec = 0, b = base[i]; i <= maxLen; i++) { final int nb = base[i + 1]; vec += nb - b; b = nb; limit[i] = vec - 1; vec <<= 1; } for (int i = minLen + 1; i <= maxLen; i++) { base[i] = ((limit[i - 1] + 1) << 1) - base[i]; } } private void recvDecodingTables() throws IOException { final Data dataShadow = this.data; final boolean[] inUse = dataShadow.inUse; final byte[] pos = dataShadow.recvDecodingTables_pos; final byte[] selector = dataShadow.selector; final byte[] selectorMtf = dataShadow.selectorMtf; int inUse16 = 0; /* Receive the mapping table */ for (int i = 0; i < 16; i++) { if (bsGetBit()) { inUse16 |= 1 << i; } } for (int i = 256; --i >= 0;) { inUse[i] = false; } for (int i = 0; i < 16; i++) { if ((inUse16 & (1 << i)) != 0) { final int i16 = i << 4; for (int j = 0; j < 16; j++) { if (bsGetBit()) { inUse[i16 + j] = true; } } } } makeMaps(); final int alphaSize = this.nInUse + 2; /* Now the selectors */ final int nGroups = bsR(3); final int nSelectors = bsR(15); for (int i = 0; i < nSelectors; i++) { int j = 0; while (bsGetBit()) { j++; } selectorMtf[i] = (byte) j; } /* Undo the MTF values for the selectors. */ for (int v = nGroups; --v >= 0;) { pos[v] = (byte) v; } for (int i = 0; i < nSelectors; i++) { int v = selectorMtf[i] & 0xff; final byte tmp = pos[v]; while (v > 0) { // nearly all times v is zero, 4 in most other cases pos[v] = pos[v - 1]; v--; } pos[0] = tmp; selector[i] = tmp; } final char[][] len = dataShadow.temp_charArray2d; /* Now the coding tables */ for (int t = 0; t < nGroups; t++) { int curr = bsR(5); final char[] len_t = len[t]; for (int i = 0; i < alphaSize; i++) { while (bsGetBit()) { curr += bsGetBit() ? -1 : 1; } len_t[i] = (char) curr; } } // finally create the Huffman tables createHuffmanDecodingTables(alphaSize, nGroups); } /** * Called by recvDecodingTables() exclusively. */ private void createHuffmanDecodingTables(final int alphaSize, final int nGroups) { final Data dataShadow = this.data; final char[][] len = dataShadow.temp_charArray2d; final int[] minLens = dataShadow.minLens; final int[][] limit = dataShadow.limit; final int[][] base = dataShadow.base; final int[][] perm = dataShadow.perm; for (int t = 0; t < nGroups; t++) { int minLen = 32; int maxLen = 0; final char[] len_t = len[t]; for (int i = alphaSize; --i >= 0;) { final char lent = len_t[i]; if (lent > maxLen) { maxLen = lent; } if (lent < minLen) { minLen = lent; } } hbCreateDecodeTables(limit[t], base[t], perm[t], len[t], minLen, maxLen, alphaSize); minLens[t] = minLen; } } private void getAndMoveToFrontDecode() throws IOException { this.origPtr = bsR(24); recvDecodingTables(); final InputStream inShadow = this.in; final Data dataShadow = this.data; final byte[] ll8 = dataShadow.ll8; final int[] unzftab = dataShadow.unzftab; final byte[] selector = dataShadow.selector; final byte[] seqToUnseq = dataShadow.seqToUnseq; final char[] yy = dataShadow.getAndMoveToFrontDecode_yy; final int[] minLens = dataShadow.minLens; final int[][] limit = dataShadow.limit; final int[][] base = dataShadow.base; final int[][] perm = dataShadow.perm; final int limitLast = this.blockSize100k * 100000; /* * Setting up the unzftab entries here is not strictly necessary, but it * does save having to do it later in a separate pass, and so saves a * block's worth of cache misses. */ for (int i = 256; --i >= 0;) { yy[i] = (char) i; unzftab[i] = 0; } int groupNo = 0; int groupPos = G_SIZE - 1; final int eob = this.nInUse + 1; int nextSym = getAndMoveToFrontDecode0(0); int bsBuffShadow = this.bsBuff; int bsLiveShadow = this.bsLive; int lastShadow = -1; int zt = selector[groupNo] & 0xff; int[] base_zt = base[zt]; int[] limit_zt = limit[zt]; int[] perm_zt = perm[zt]; int minLens_zt = minLens[zt]; while (nextSym != eob) { if ((nextSym == RUNA) || (nextSym == RUNB)) { int s = -1; for (int n = 1; true; n <<= 1) { if (nextSym == RUNA) { s += n; } else if (nextSym == RUNB) { s += n << 1; } else { break; } if (groupPos == 0) { groupPos = G_SIZE - 1; zt = selector[++groupNo] & 0xff; base_zt = base[zt]; limit_zt = limit[zt]; perm_zt = perm[zt]; minLens_zt = minLens[zt]; } else { groupPos--; } int zn = minLens_zt; // Inlined: // int zvec = bsR(zn); while (bsLiveShadow < zn) { final int thech = inShadow.read(); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } throw new IOException("unexpected end of stream"); } int zvec = (bsBuffShadow >> (bsLiveShadow - zn)) & ((1 << zn) - 1); bsLiveShadow -= zn; while (zvec > limit_zt[zn]) { zn++; while (bsLiveShadow < 1) { final int thech = inShadow.read(); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } throw new IOException( "unexpected end of stream"); } bsLiveShadow--; zvec = (zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1); } nextSym = perm_zt[zvec - base_zt[zn]]; } final byte ch = seqToUnseq[yy[0]]; unzftab[ch & 0xff] += s + 1; while (s-- >= 0) { ll8[++lastShadow] = ch; } if (lastShadow >= limitLast) { throw new IOException("block overrun"); } } else { if (++lastShadow >= limitLast) { throw new IOException("block overrun"); } final char tmp = yy[nextSym - 1]; unzftab[seqToUnseq[tmp] & 0xff]++; ll8[lastShadow] = seqToUnseq[tmp]; /* * This loop is hammered during decompression, hence avoid * native method call overhead of System.arraycopy for very * small ranges to copy. */ if (nextSym <= 16) { for (int j = nextSym - 1; j > 0;) { yy[j] = yy[--j]; } } else { System.arraycopy(yy, 0, yy, 1, nextSym - 1); } yy[0] = tmp; if (groupPos == 0) { groupPos = G_SIZE - 1; zt = selector[++groupNo] & 0xff; base_zt = base[zt]; limit_zt = limit[zt]; perm_zt = perm[zt]; minLens_zt = minLens[zt]; } else { groupPos--; } int zn = minLens_zt; // Inlined: // int zvec = bsR(zn); while (bsLiveShadow < zn) { final int thech = inShadow.read(); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } throw new IOException("unexpected end of stream"); } int zvec = (bsBuffShadow >> (bsLiveShadow - zn)) & ((1 << zn) - 1); bsLiveShadow -= zn; while (zvec > limit_zt[zn]) { zn++; while (bsLiveShadow < 1) { final int thech = inShadow.read(); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } throw new IOException("unexpected end of stream"); } bsLiveShadow--; zvec = (zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1); } nextSym = perm_zt[zvec - base_zt[zn]]; } } this.last = lastShadow; this.bsLive = bsLiveShadow; this.bsBuff = bsBuffShadow; } private int getAndMoveToFrontDecode0(final int groupNo) throws IOException { final InputStream inShadow = this.in; final Data dataShadow = this.data; final int zt = dataShadow.selector[groupNo] & 0xff; final int[] limit_zt = dataShadow.limit[zt]; int zn = dataShadow.minLens[zt]; int zvec = bsR(zn); int bsLiveShadow = this.bsLive; int bsBuffShadow = this.bsBuff; while (zvec > limit_zt[zn]) { zn++; while (bsLiveShadow < 1) { final int thech = inShadow.read(); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } throw new IOException("unexpected end of stream"); } bsLiveShadow--; zvec = (zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1); } this.bsLive = bsLiveShadow; this.bsBuff = bsBuffShadow; return dataShadow.perm[zt][zvec - dataShadow.base[zt][zn]]; } private int setupBlock() throws IOException { if (currentState == EOF || this.data == null) { return -1; } final int[] cftab = this.data.cftab; final int[] tt = this.data.initTT(this.last + 1); final byte[] ll8 = this.data.ll8; cftab[0] = 0; System.arraycopy(this.data.unzftab, 0, cftab, 1, 256); for (int i = 1, c = cftab[0]; i <= 256; i++) { c += cftab[i]; cftab[i] = c; } for (int i = 0, lastShadow = this.last; i <= lastShadow; i++) { tt[cftab[ll8[i] & 0xff]++] = i; } if ((this.origPtr < 0) || (this.origPtr >= tt.length)) { throw new IOException("stream corrupted"); } this.su_tPos = tt[this.origPtr]; this.su_count = 0; this.su_i2 = 0; this.su_ch2 = 256; /* not a char and not EOF */ if (this.blockRandomised) { this.su_rNToGo = 0; this.su_rTPos = 0; return setupRandPartA(); } return setupNoRandPartA(); } private int setupRandPartA() throws IOException { if (this.su_i2 <= this.last) { this.su_chPrev = this.su_ch2; int su_ch2Shadow = this.data.ll8[this.su_tPos] & 0xff; this.su_tPos = this.data.tt[this.su_tPos]; if (this.su_rNToGo == 0) { this.su_rNToGo = Rand.rNums(this.su_rTPos) - 1; if (++this.su_rTPos == 512) { this.su_rTPos = 0; } } else { this.su_rNToGo--; } this.su_ch2 = su_ch2Shadow ^= (this.su_rNToGo == 1) ? 1 : 0; this.su_i2++; this.currentState = RAND_PART_B_STATE; this.crc.updateCRC(su_ch2Shadow); return su_ch2Shadow; } endBlock(); initBlock(); return setupBlock(); } private int setupNoRandPartA() throws IOException { if (this.su_i2 <= this.last) { this.su_chPrev = this.su_ch2; final int su_ch2Shadow = this.data.ll8[this.su_tPos] & 0xff; this.su_ch2 = su_ch2Shadow; this.su_tPos = this.data.tt[this.su_tPos]; this.su_i2++; this.currentState = NO_RAND_PART_B_STATE; this.crc.updateCRC(su_ch2Shadow); return su_ch2Shadow; } this.currentState = NO_RAND_PART_A_STATE; endBlock(); initBlock(); return setupBlock(); } private int setupRandPartB() throws IOException { if (this.su_ch2 != this.su_chPrev) { this.currentState = RAND_PART_A_STATE; this.su_count = 1; return setupRandPartA(); } else if (++this.su_count >= 4) { this.su_z = (char) (this.data.ll8[this.su_tPos] & 0xff); this.su_tPos = this.data.tt[this.su_tPos]; if (this.su_rNToGo == 0) { this.su_rNToGo = Rand.rNums(this.su_rTPos) - 1; if (++this.su_rTPos == 512) { this.su_rTPos = 0; } } else { this.su_rNToGo--; } this.su_j2 = 0; this.currentState = RAND_PART_C_STATE; if (this.su_rNToGo == 1) { this.su_z ^= 1; } return setupRandPartC(); } else { this.currentState = RAND_PART_A_STATE; return setupRandPartA(); } } private int setupRandPartC() throws IOException { if (this.su_j2 < this.su_z) { this.crc.updateCRC(this.su_ch2); this.su_j2++; return this.su_ch2; } this.currentState = RAND_PART_A_STATE; this.su_i2++; this.su_count = 0; return setupRandPartA(); } private int setupNoRandPartB() throws IOException { if (this.su_ch2 != this.su_chPrev) { this.su_count = 1; return setupNoRandPartA(); } else if (++this.su_count >= 4) { this.su_z = (char) (this.data.ll8[this.su_tPos] & 0xff); this.su_tPos = this.data.tt[this.su_tPos]; this.su_j2 = 0; return setupNoRandPartC(); } else { return setupNoRandPartA(); } } private int setupNoRandPartC() throws IOException { if (this.su_j2 < this.su_z) { final int su_ch2Shadow = this.su_ch2; this.crc.updateCRC(su_ch2Shadow); this.su_j2++; this.currentState = NO_RAND_PART_C_STATE; return su_ch2Shadow; } this.su_i2++; this.su_count = 0; return setupNoRandPartA(); } private static final class Data { // (with blockSize 900k) final boolean[] inUse = new boolean[256]; // 256 byte final byte[] seqToUnseq = new byte[256]; // 256 byte final byte[] selector = new byte[MAX_SELECTORS]; // 18002 byte final byte[] selectorMtf = new byte[MAX_SELECTORS]; // 18002 byte /** * Freq table collected to save a pass over the data during * decompression. */ final int[] unzftab = new int[256]; // 1024 byte final int[][] limit = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 byte final int[][] base = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 byte final int[][] perm = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 byte final int[] minLens = new int[N_GROUPS]; // 24 byte final int[] cftab = new int[257]; // 1028 byte final char[] getAndMoveToFrontDecode_yy = new char[256]; // 512 byte final char[][] temp_charArray2d = new char[N_GROUPS][MAX_ALPHA_SIZE]; // 3096 // byte final byte[] recvDecodingTables_pos = new byte[N_GROUPS]; // 6 byte // --------------- // 60798 byte int[] tt; // 3600000 byte byte[] ll8; // 900000 byte // --------------- // 4560782 byte // =============== Data(final int blockSize100k) { this.ll8 = new byte[blockSize100k * BZip2Constants.BASEBLOCKSIZE]; } /** * Initializes the {@link #tt} array. * * This method is called when the required length of the array is known. * I don't initialize it at construction time to avoid unneccessary * memory allocation when compressing small files. */ int[] initTT(final int length) { int[] ttShadow = this.tt; // tt.length should always be >= length, but theoretically // it can happen, if the compressor mixed small and large // blocks. Normally only the last block will be smaller // than others. if ((ttShadow == null) || (ttShadow.length < length)) { this.tt = ttShadow = new int[length]; } return ttShadow; } } /** * Checks if the signature matches what is expected for a bzip2 file. * * @param signature * the bytes to check * @param length * the number of bytes to check * @return true, if this stream is a bzip2 compressed stream, false otherwise * * @since 1.1 */ public static boolean matches(final byte[] signature, final int length) { if (length < 3) { return false; } if (signature[0] != 'B') { return false; } if (signature[1] != 'Z') { return false; } if (signature[2] != 'h') { return false; } return true; } }
/* * SQLConsole.java * * This file is part of SQL Workbench/J, http://www.sql-workbench.net * * Copyright 2002-2015, Thomas Kellerer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * To contact the author please send an email to: support@sql-workbench.net * */ package workbench.console; import java.io.IOException; import java.sql.SQLException; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import workbench.AppArguments; import workbench.WbManager; import workbench.log.LogMgr; import workbench.resource.ResourceMgr; import workbench.resource.Settings; import workbench.db.ConnectionMgr; import workbench.db.ConnectionProfile; import workbench.db.WbConnection; import workbench.gui.WindowTitleBuilder; import workbench.gui.profiles.ProfileKey; import workbench.sql.BatchRunner; import workbench.sql.CommandRegistry; import workbench.sql.OutputPrinter; import workbench.sql.RefreshAnnotation; import workbench.sql.StatementHistory; import workbench.sql.macros.MacroManager; import workbench.sql.wbcommands.CommandTester; import workbench.sql.wbcommands.WbConnInfo; import workbench.sql.wbcommands.WbConnect; import workbench.sql.wbcommands.WbDescribeObject; import workbench.sql.wbcommands.WbHelp; import workbench.sql.wbcommands.WbHistory; import workbench.sql.wbcommands.WbList; import workbench.sql.wbcommands.WbListCatalogs; import workbench.sql.wbcommands.WbListProcedures; import workbench.sql.wbcommands.WbListSchemas; import workbench.sql.wbcommands.WbProcSource; import workbench.sql.wbcommands.WbSysExec; import workbench.sql.wbcommands.console.WbRun; import workbench.sql.wbcommands.console.WbToggleDisplay; import workbench.util.ExceptionUtil; import workbench.util.PlatformHelper; import workbench.util.SqlUtil; import workbench.util.StringUtil; import workbench.util.WbFile; import workbench.util.WbThread; import sun.misc.Signal; import sun.misc.SignalHandler; /** * A simple console interface for SQL Workbench/J * <br> * Commandline editing under Unix-style Operating systems is done using the * JLine library. * * @see jline.ConsoleReader * @see workbench.console.WbConsoleFactory * * @author Thomas Kellerer */ public class SQLConsole implements OutputPrinter, Runnable, SignalHandler { private static final String HISTORY_FILENAME = "sqlworkbench_history.txt"; private final ConsolePrompter prompter; private static final String DEFAULT_PROMPT = "SQL> "; private static final String CONTINUE_PROMPT = "..> "; private static final String PROMPT_END = "> "; private final WbThread shutdownHook = new WbThread(this, "ShutdownHook"); private final Map<String, String> abbreviations = new HashMap<>(); private final StatementHistory history; private BatchRunner runner; private WbThread cancelThread; private ConsoleRefresh refreshHandler = new ConsoleRefresh(); private final boolean changeTerminalTitle; private final String titlePrefix = "\033]0;"; private final String titleSuffix = "\007"; private WindowTitleBuilder titleBuilder = new WindowTitleBuilder(); public SQLConsole() { prompter = new ConsolePrompter(); history = new StatementHistory(Settings.getInstance().getConsoleHistorySize()); history.doAppend(true); installSignalHandler(); changeTerminalTitle = !PlatformHelper.isWindows() && ConsoleSettings.changeTerminalTitle(); titleBuilder.setShowWorkspace(false); titleBuilder.setShowProductNameAtEnd(ConsoleSettings.termTitleAppNameAtEnd()); titleBuilder.setShowProfileGroup(false); titleBuilder.setShowURL(ConsoleSettings.termTitleIncludeUrl()); titleBuilder.setShowNotConnected(false); CommandRegistry.getInstance().scanForExtensions(); } public void startConsole() { AppArguments cmdLine = WbManager.getInstance().getCommandLine(); if (cmdLine.isArgPresent("help")) { System.out.println(cmdLine.getHelp()); WbManager.getInstance().doShutdown(0); } boolean optimizeColWidths = cmdLine.getBoolean(AppArguments.ARG_CONSOLE_OPT_COLS, true); runner = initBatchRunner(cmdLine, optimizeColWidths); String currentPrompt = DEFAULT_PROMPT; try { showStartupMessage(cmdLine); currentPrompt = connectRunner(runner, currentPrompt); ResultSetPrinter printer = createPrinter(cmdLine, optimizeColWidths, runner); loadHistory(); initAbbreviations(); String previousPrompt = null; boolean startOfStatement = true; InputBuffer buffer = new InputBuffer(); buffer.setConnection(runner == null ? null : runner.getConnection()); while (true) { String line = WbConsoleFactory.getConsole().readLine(currentPrompt); if (line == null) continue; if (buffer.getLength() == 0 && StringUtil.isEmptyString(line)) continue; boolean isCompleteStatement = buffer.addLine(line); String stmt = buffer.getScript().trim(); if (startOfStatement && ("exit".equalsIgnoreCase(stmt) || "\\q".equals(stmt))) { break; } String firstWord = getFirstWord(line); String macro = getMacroText(stmt); if (StringUtil.isNonEmpty(macro)) { isCompleteStatement = true; stmt = macro; } else if (startOfStatement && abbreviations.containsKey(firstWord)) { stmt = replaceShortcuts(stmt); isCompleteStatement = true; } boolean changeHistory = false; boolean addToHistory = true; // WbConnect might change the history file, so we need to detect a change WbFile lastHistory = getHistoryFile(); if (isCompleteStatement) { stmt = replaceShortcuts(stmt); String verb = getFirstWord(stmt); try { prompter.resetExecuteAll(); if (verb.equalsIgnoreCase(WbHistory.VERB)) { stmt = handleHistory(runner, stmt); verb = getFirstWord(stmt); addToHistory = false; } else if (verb.equalsIgnoreCase(RefreshAnnotation.ANNOTATION)) { addToHistory = false; } if (StringUtil.isNonEmpty(stmt)) { if (addToHistory) history.add(stmt); changeHistory = verb.equalsIgnoreCase(WbConnect.VERB) && ConsoleSettings.useHistoryPerProfile(); if (changeHistory) { saveHistory(); } setTerminalTitle(runner.getConnection(), true); HandlerState state = refreshHandler.handleRefresh(runner, stmt, history); if (state == HandlerState.notHandled) { runner.runScript(stmt); if (ConsoleSettings.showScriptFinishTime()) { printMessage("(" + StringUtil.getCurrentTimestamp() + ")"); } } } } catch (Throwable th) { System.err.println(ExceptionUtil.getDisplay(th)); LogMgr.logError("SQLConsole.main()", "Error running statement", th); } finally { buffer.clear(); currentPrompt = checkConnection(runner, previousPrompt == null ? currentPrompt : previousPrompt); previousPrompt = null; startOfStatement = true; } // this needs to be done after each statement as the connection might have changed. buffer.setConnection(runner.getConnection()); if (changeHistory && !lastHistory.equals(getHistoryFile())) { loadHistory(); } // Restore the printing consumer in case a WbExport changed it if (printer != null && runner.getResultSetConsumer() == null) { runner.setResultSetConsumer(printer); } } else { startOfStatement = false; if (previousPrompt == null) previousPrompt = currentPrompt; currentPrompt = CONTINUE_PROMPT; } } } catch (Throwable th) { // this should not happen System.err.println(ExceptionUtil.getDisplay(th)); } finally { Runtime.getRuntime().removeShutdownHook(shutdownHook); saveHistory(); WbConsoleFactory.getConsole().shutdown(); ConnectionMgr.getInstance().disconnectAll(); if (Settings.getInstance().isModified()) { Settings.getInstance().saveSettings(false); } } try { WbManager.getInstance().doShutdown(0); } catch (Throwable th) { System.err.println(ExceptionUtil.getDisplay(th)); System.exit(1); } } private String replaceShortcuts(String sql) { if (StringUtil.isEmptyString(sql)) return sql; // this will change the original statement // but trimming whitespace from the start and end // doesn't matter for a valid SQL statement sql = sql.trim(); for (Map.Entry<String, String> entry : abbreviations.entrySet()) { if (sql.startsWith(entry.getKey())) { return entry.getValue() + sql.substring(entry.getKey().length()); } } return sql; } private ResultSetPrinter createPrinter(AppArguments cmdLine, boolean optimizeColWidths, BatchRunner runner) throws SQLException { boolean bufferResults = cmdLine.getBoolean(AppArguments.ARG_CONSOLE_BUFFER_RESULTS, true); ResultSetPrinter printer = null; if (!bufferResults) { printer = new ResultSetPrinter(System.out); printer.setFormatColumns(optimizeColWidths); printer.setPrintRowCount(true); runner.setResultSetConsumer(printer); ConsoleSettings.getInstance().addChangeListener(printer); } return printer; } private void showStartupMessage(AppArguments cmdLine) { LogMgr.logInfo("SQLConsole.main()", "SQL Workbench/J Console interface started"); System.out.println(ResourceMgr.getFormattedString("MsgConsoleStarted", ResourceMgr.getBuildNumber().toString())); WbFile f = new WbFile(Settings.getInstance().getConfigDir()); System.out.println(ResourceMgr.getFormattedString("MsgConfigDir", f.getFullPath())); System.out.println(""); // check the presence of the Profile again to put a possible error message after the startup messages. String profilename = cmdLine.getValue(AppArguments.ARG_PROFILE); String group = cmdLine.getValue(AppArguments.ARG_PROFILE_GROUP); if (StringUtil.isNonBlank(profilename)) { ProfileKey def = new ProfileKey(StringUtil.trimQuotes(profilename), StringUtil.trimQuotes(group)); ConnectionProfile profile = ConnectionMgr.getInstance().getProfile(def); if (profile == null) { String msg = ResourceMgr.getFormattedString("ErrProfileNotFound", def); System.err.println(); System.err.println(msg); } } if (cmdLine.hasUnknownArguments()) { StringBuilder err = new StringBuilder(ResourceMgr.getString("ErrUnknownParameter")); err.append(' '); err.append(cmdLine.getUnknownArguments()); System.err.println(err.toString()); System.err.println(); } } private String connectRunner(BatchRunner runner, String currentPrompt) { if (runner.hasProfile()) { try { runner.connect(); } catch (Exception e) { // nothing to log, already done by the runner } if (runner.isConnected() && !runner.getVerboseLogging()) { WbConnection conn = runner.getConnection(); System.out.println(ResourceMgr.getFormattedString("MsgBatchConnectOk", conn.getDisplayString())); String warn = conn.getWarnings(); if (StringUtil.isNonBlank(warn)) { System.out.println(warn); } } currentPrompt = checkConnection(runner, null); } return currentPrompt; } private BatchRunner initBatchRunner(AppArguments cmdLine, boolean optimizeColWidths) { BatchRunner batchRunner = BatchRunner.createBatchRunner(cmdLine, false); batchRunner.showResultSets(true); batchRunner.setShowStatementWithResult(false); batchRunner.setShowStatementSummary(false); batchRunner.setOptimizeColWidths(optimizeColWidths); batchRunner.setShowDataLoading(false); batchRunner.setConnectionId("Console"); batchRunner.setTraceOutput(this); // initialize a default max rows. // In console mode it doesn't really make sense to display that many rows int maxRows = Settings.getInstance().getIntProperty("workbench.console.default.maxrows", 5000); batchRunner.setMaxRows(maxRows); if (cmdLine.isArgNotPresent(AppArguments.ARG_SHOWPROGRESS)) { batchRunner.setShowProgress(true); } // Make the current directory the base directory for the BatchRunner // so that e.g. WbIncludes work properly WbFile currentDir = new WbFile(System.getProperty("user.dir")); batchRunner.setBaseDir(currentDir.getFullPath()); boolean showTiming = cmdLine.getBoolean(AppArguments.ARG_SHOW_TIMING, false); batchRunner.setShowTiming(showTiming); batchRunner.setShowStatementTiming(!showTiming); batchRunner.setHistoryProvider(this.history); batchRunner.setPersistentConnect(true); batchRunner.setExecutionController(prompter); batchRunner.setParameterPrompter(prompter); batchRunner.setShowRowCounts(true); return batchRunner; } private void initAbbreviations() { CommandTester cmd = new CommandTester(); // Some limited psql compatibility String last = cmd.formatVerb(WbHistory.VERB) + " last"; abbreviations.put("\\x", cmd.formatVerb(WbToggleDisplay.VERB)); abbreviations.put("\\?", cmd.formatVerb(WbHelp.VERB)); abbreviations.put("\\h", cmd.formatVerb(WbHelp.VERB)); abbreviations.put("\\i", cmd.formatVerb(WbRun.VERB)); abbreviations.put("\\d", cmd.formatVerb(WbList.VERB)); abbreviations.put("\\g", last); abbreviations.put("\\s", cmd.formatVerb(WbHistory.VERB)); abbreviations.put("\\!", cmd.formatVerb(WbSysExec.VERB)); abbreviations.put("\\dt", cmd.formatVerb(WbDescribeObject.VERB)); abbreviations.put("\\ds", cmd.formatVerb(WbList.VERB) + " -types=sequence"); abbreviations.put("\\sf", cmd.formatVerb(WbProcSource.VERB)); abbreviations.put("\\l", cmd.formatVerb(WbListCatalogs.VERB)); abbreviations.put("\\df", cmd.formatVerb(WbListProcedures.VERB)); abbreviations.put("\\dn", cmd.formatVerb(WbListSchemas.VERB)); abbreviations.put("\\conninfo", cmd.formatVerb(WbConnInfo.VERB)); abbreviations.put("\\connect", cmd.formatVerb(WbConnect.VERB)); abbreviations.put("\\c", cmd.formatVerb(WbConnect.VERB)); abbreviations.put("\\watch", RefreshAnnotation.ANNOTATION); // some limited SQL*Plus compatibility abbreviations.put("/", last); } @Override public void printMessage(String trace) { System.out.println(trace); } private String handleHistory(BatchRunner runner, String stmt) throws IOException { adjustHistoryDisplay(runner); String arg = SqlUtil.stripVerb(SqlUtil.makeCleanSql(stmt, false, false)); int index = -1; if (StringUtil.isBlank(arg)) { RowDisplay display = ConsoleSettings.getInstance().getRowDisplay(); try { ConsoleSettings.getInstance().setRowDisplay(RowDisplay.SingleLine); runner.runScript(stmt); // WbHistory without parameters was executed prompt for an index to be executed System.out.println(""); String input = WbConsoleFactory.getConsole().readLineWithoutHistory(">>> " + ResourceMgr.getString("TxtEnterStmtIndex") + " >>> "); index = StringUtil.getIntValue(input, -1); } finally { ConsoleSettings.getInstance().setRowDisplay(display); } } else { if (arg.equalsIgnoreCase("last")) { index = history.size(); } else { index = StringUtil.getIntValue(arg, -1); } } if (index > 0 && index <= history.size()) { return history.get(index - 1); } return null; } private String getMacroText(String sql) { return MacroManager.getInstance().getMacroText(MacroManager.DEFAULT_STORAGE, SqlUtil.trimSemicolon(sql)); } private void saveHistory() { history.saveTo(getHistoryFile()); } private void loadHistory() { history.clear(); WbFile histFile = getHistoryFile(); LogMgr.logDebug("SQLConsole.loadHistory()", "Loading history file: " + histFile.getFullPath()); history.readFrom(histFile); WbConsole console = WbConsoleFactory.getConsole(); console.clearHistory(); console.addToHistory(history.getHistoryEntries()); } private WbFile getHistoryFile() { String fname = null; if (ConsoleSettings.useHistoryPerProfile() && runner != null && runner.getConnection() != null) { fname = runner.getConnection().createFilename() + "_history.txt"; } if (fname == null) { fname = Settings.getInstance().getProperty("workbench.console.history.file", HISTORY_FILENAME); } WbFile result = new WbFile(Settings.getInstance().getConfigDir(), fname); return result; } private void adjustHistoryDisplay(BatchRunner runner) { int columns = WbConsoleFactory.getConsole().getColumns(); LogMgr.logDebug("SQLConsole.adjustHistoryDisplay()", "Console width: " + columns); if (columns < 0) { columns = Settings.getInstance().getIntProperty("workbench.console.history.displaylength", 100); } WbHistory wb = (WbHistory)runner.getCommand(WbHistory.VERB); wb.setMaxDisplayLength(columns); } private String getFirstWord(String input) { // I can't use SqlUtil.getSqlVerb() because that would not return e.g. \! if (StringUtil.isBlank(input)) return null; input = input.trim(); int pos = StringUtil.findFirstWhiteSpace(input); if (pos <= 0) return SqlUtil.trimSemicolon(input); return SqlUtil.trimSemicolon(input.substring(0, pos)); } private String appendSuffix(String prompt) { if (prompt == null) return null; if (prompt.endsWith(PROMPT_END)) return prompt; return prompt + PROMPT_END; } private String checkConnection(BatchRunner runner, String currentPrompt) { String newprompt = currentPrompt; WbConnection current = runner.getConnection(); if (current != null && ConsoleSettings.showProfileInPrompt()) { newprompt = current.getProfile().getName(); } else if (current != null && !runner.hasPendingActions()) { String user = current.getCurrentUser(); String catalog = current.getDisplayCatalog(); if (catalog == null) catalog = current.getCurrentCatalog(); String schema = current.getDisplaySchema(); if (schema == null) current.getCurrentSchema(); if (StringUtil.isBlank(catalog) && StringUtil.isNonBlank(schema)) { if (schema.equalsIgnoreCase(user)) { newprompt = user; } else { newprompt = user + "@" + schema; } } else if (StringUtil.isNonBlank(catalog) && StringUtil.isBlank(schema)) { newprompt = user + "@" + catalog; } else if (StringUtil.isNonBlank(catalog) && StringUtil.isNonBlank(schema)) { newprompt = user + "@" + catalog + "/" + schema; } } setTerminalTitle(current, false); return (newprompt == null ? DEFAULT_PROMPT : appendSuffix(newprompt)); } private void setTerminalTitle(WbConnection conn, boolean isRunning) { if (!changeTerminalTitle) return; ConnectionProfile profile = null; if (conn != null) { profile = conn.getProfile(); } String indicator = isRunning ? "> " : ""; String toPrint = titlePrefix + indicator + titleBuilder.getWindowTitle(profile) + titleSuffix; System.out.println(toPrint); } public static void main(String[] args) { AppArguments cmdLine = new AppArguments(); cmdLine.parse(args); if (cmdLine.isArgPresent(AppArguments.ARG_SCRIPT) || cmdLine.isArgPresent(AppArguments.ARG_COMMAND)) { // Allow batch mode through SQL Console // This way sqlwbconsole.exe can be used to start batch mode as well. WbManager.main(args); } else { System.setProperty("workbench.log.console", "false"); WbManager.initConsoleMode(args); SQLConsole console = new SQLConsole(); console.setTerminalTitle(null, false); console.startConsole(); } } public void abortStatement() { if (cancelThread != null) { try { LogMgr.logInfo("SQLConsole.cancelStatement()", "Trying to forcefully abort current statement"); printMessage(ResourceMgr.getString("MsgAbortStmt")); cancelThread.interrupt(); cancelThread.stop(); if (runner != null) { runner.abort(); } } catch (Exception ex) { LogMgr.logWarning("SQLConsole.cancelStatement()", "Could not cancel statement", ex); } finally { cancelThread = null; } } } public void cancelStatement() { if (cancelThread != null) { abortStatement(); } else if (runner != null && runner.isBusy() && cancelThread == null) { cancelThread = new WbThread("ConsoleStatementCancel") { @Override public void run() { LogMgr.logInfo("SQLConsole.cancelStatement()", "Trying to cancel the current statement"); printMessage(ResourceMgr.getString("MsgCancellingStmt")); runner.cancel(); } }; try { cancelThread.start(); cancelThread.join(Settings.getInstance().getIntProperty("workbench.sql.cancel.timeout", 5000)); } catch (Exception ex) { printMessage(ResourceMgr.getString("MsgAbortStmt")); LogMgr.logWarning("SQLConsole.cancelStatement()", "Could not cancel statement. Trying to forcefully abort the statemnt", ex); abortStatement(); } cancelThread = null; } } public void exit() { LogMgr.logWarning("SQLConsole.shutdownHook()", "SQL Workbench/J process has been interrupted."); cancelStatement(); boolean exitImmediately = Settings.getInstance().getBoolProperty("workbench.exitonbreak", true); if (exitImmediately) { LogMgr.logWarning("SQLConsole.shutdownHook()", "Aborting process..."); LogMgr.shutdown(); Runtime.getRuntime().halt(15); // exit() doesn't work properly from inside a shutdownhook! } else { ConnectionMgr.getInstance().abortAll(Collections.singletonList(runner.getConnection())); LogMgr.shutdown(); } } /** * Callback for the shutdown hook */ @Override public void run() { exit(); } private void installSignalHandler() { List<String> signals = Settings.getInstance().getListProperty("workbench.console.signal", false, "INT,QUIT"); for (String name : signals) { try { Signal signal = new Signal(name.toUpperCase()); Signal.handle(signal, this); LogMgr.logInfo("SQLConsole.installSignalHandler()", "Installed signal handler for " + name); } catch (Throwable th) { LogMgr.logInfo("SQLConsole.installSignalHandler()", "could not register signal handler for: " + name, th); } } } @Override public void handle(Signal signal) { LogMgr.logDebug("SQLConsole.handl()", "Received signal: " + signal.getName()); if (signal.getName().equals("INT")) { cancelStatement(); } if (signal.getName().equals("QUIT")) { exit(); } } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver15; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import com.google.common.collect.ImmutableSet; import java.util.List; import com.google.common.collect.ImmutableList; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnVlanCounterStatsReplyVer15 implements OFBsnVlanCounterStatsReply { private static final Logger logger = LoggerFactory.getLogger(OFBsnVlanCounterStatsReplyVer15.class); // version: 1.5 final static byte WIRE_VERSION = 6; final static int MINIMUM_LENGTH = 24; private final static long DEFAULT_XID = 0x0L; private final static Set<OFStatsReplyFlags> DEFAULT_FLAGS = ImmutableSet.<OFStatsReplyFlags>of(); private final static List<OFBsnVlanCounterStatsEntry> DEFAULT_ENTRIES = ImmutableList.<OFBsnVlanCounterStatsEntry>of(); // OF message fields private final long xid; private final Set<OFStatsReplyFlags> flags; private final List<OFBsnVlanCounterStatsEntry> entries; // // Immutable default instance final static OFBsnVlanCounterStatsReplyVer15 DEFAULT = new OFBsnVlanCounterStatsReplyVer15( DEFAULT_XID, DEFAULT_FLAGS, DEFAULT_ENTRIES ); // package private constructor - used by readers, builders, and factory OFBsnVlanCounterStatsReplyVer15(long xid, Set<OFStatsReplyFlags> flags, List<OFBsnVlanCounterStatsEntry> entries) { if(flags == null) { throw new NullPointerException("OFBsnVlanCounterStatsReplyVer15: property flags cannot be null"); } if(entries == null) { throw new NullPointerException("OFBsnVlanCounterStatsReplyVer15: property entries cannot be null"); } this.xid = xid; this.flags = flags; this.entries = entries; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x9L; } @Override public List<OFBsnVlanCounterStatsEntry> getEntries() { return entries; } public OFBsnVlanCounterStatsReply.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnVlanCounterStatsReply.Builder { final OFBsnVlanCounterStatsReplyVer15 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsReplyFlags> flags; private boolean entriesSet; private List<OFBsnVlanCounterStatsEntry> entries; BuilderWithParent(OFBsnVlanCounterStatsReplyVer15 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFBsnVlanCounterStatsReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public OFBsnVlanCounterStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x9L; } @Override public List<OFBsnVlanCounterStatsEntry> getEntries() { return entries; } @Override public OFBsnVlanCounterStatsReply.Builder setEntries(List<OFBsnVlanCounterStatsEntry> entries) { this.entries = entries; this.entriesSet = true; return this; } @Override public OFBsnVlanCounterStatsReply build() { long xid = this.xidSet ? this.xid : parentMessage.xid; Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : parentMessage.flags; if(flags == null) throw new NullPointerException("Property flags must not be null"); List<OFBsnVlanCounterStatsEntry> entries = this.entriesSet ? this.entries : parentMessage.entries; if(entries == null) throw new NullPointerException("Property entries must not be null"); // return new OFBsnVlanCounterStatsReplyVer15( xid, flags, entries ); } } static class Builder implements OFBsnVlanCounterStatsReply.Builder { // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsReplyFlags> flags; private boolean entriesSet; private List<OFBsnVlanCounterStatsEntry> entries; @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFType getType() { return OFType.STATS_REPLY; } @Override public long getXid() { return xid; } @Override public OFBsnVlanCounterStatsReply.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsReplyFlags> getFlags() { return flags; } @Override public OFBsnVlanCounterStatsReply.Builder setFlags(Set<OFStatsReplyFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x9L; } @Override public List<OFBsnVlanCounterStatsEntry> getEntries() { return entries; } @Override public OFBsnVlanCounterStatsReply.Builder setEntries(List<OFBsnVlanCounterStatsEntry> entries) { this.entries = entries; this.entriesSet = true; return this; } // @Override public OFBsnVlanCounterStatsReply build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; Set<OFStatsReplyFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS; if(flags == null) throw new NullPointerException("Property flags must not be null"); List<OFBsnVlanCounterStatsEntry> entries = this.entriesSet ? this.entries : DEFAULT_ENTRIES; if(entries == null) throw new NullPointerException("Property entries must not be null"); return new OFBsnVlanCounterStatsReplyVer15( xid, flags, entries ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnVlanCounterStatsReply> { @Override public OFBsnVlanCounterStatsReply readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 6 byte version = bb.readByte(); if(version != (byte) 0x6) throw new OFParseError("Wrong version: Expected=OFVersion.OF_15(6), got="+version); // fixed value property type == 19 byte type = bb.readByte(); if(type != (byte) 0x13) throw new OFParseError("Wrong type: Expected=OFType.STATS_REPLY(19), got="+type); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property statsType == 65535 short statsType = bb.readShort(); if(statsType != (short) 0xffff) throw new OFParseError("Wrong statsType: Expected=OFStatsType.EXPERIMENTER(65535), got="+statsType); Set<OFStatsReplyFlags> flags = OFStatsReplyFlagsSerializerVer15.readFrom(bb); // pad: 4 bytes bb.skipBytes(4); // fixed value property experimenter == 0x5c16c7L int experimenter = bb.readInt(); if(experimenter != 0x5c16c7) throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter); // fixed value property subtype == 0x9L int subtype = bb.readInt(); if(subtype != 0x9) throw new OFParseError("Wrong subtype: Expected=0x9L(0x9L), got="+subtype); List<OFBsnVlanCounterStatsEntry> entries = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFBsnVlanCounterStatsEntryVer15.READER); OFBsnVlanCounterStatsReplyVer15 bsnVlanCounterStatsReplyVer15 = new OFBsnVlanCounterStatsReplyVer15( xid, flags, entries ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnVlanCounterStatsReplyVer15); return bsnVlanCounterStatsReplyVer15; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnVlanCounterStatsReplyVer15Funnel FUNNEL = new OFBsnVlanCounterStatsReplyVer15Funnel(); static class OFBsnVlanCounterStatsReplyVer15Funnel implements Funnel<OFBsnVlanCounterStatsReplyVer15> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnVlanCounterStatsReplyVer15 message, PrimitiveSink sink) { // fixed value property version = 6 sink.putByte((byte) 0x6); // fixed value property type = 19 sink.putByte((byte) 0x13); // FIXME: skip funnel of length sink.putLong(message.xid); // fixed value property statsType = 65535 sink.putShort((short) 0xffff); OFStatsReplyFlagsSerializerVer15.putTo(message.flags, sink); // skip pad (4 bytes) // fixed value property experimenter = 0x5c16c7L sink.putInt(0x5c16c7); // fixed value property subtype = 0x9L sink.putInt(0x9); FunnelUtils.putList(message.entries, sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnVlanCounterStatsReplyVer15> { @Override public void write(ByteBuf bb, OFBsnVlanCounterStatsReplyVer15 message) { int startIndex = bb.writerIndex(); // fixed value property version = 6 bb.writeByte((byte) 0x6); // fixed value property type = 19 bb.writeByte((byte) 0x13); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); bb.writeInt(U32.t(message.xid)); // fixed value property statsType = 65535 bb.writeShort((short) 0xffff); OFStatsReplyFlagsSerializerVer15.writeTo(bb, message.flags); // pad: 4 bytes bb.writeZero(4); // fixed value property experimenter = 0x5c16c7L bb.writeInt(0x5c16c7); // fixed value property subtype = 0x9L bb.writeInt(0x9); ChannelUtils.writeList(bb, message.entries); // update length field int length = bb.writerIndex() - startIndex; bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnVlanCounterStatsReplyVer15("); b.append("xid=").append(xid); b.append(", "); b.append("flags=").append(flags); b.append(", "); b.append("entries=").append(entries); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnVlanCounterStatsReplyVer15 other = (OFBsnVlanCounterStatsReplyVer15) obj; if( xid != other.xid) return false; if (flags == null) { if (other.flags != null) return false; } else if (!flags.equals(other.flags)) return false; if (entries == null) { if (other.entries != null) return false; } else if (!entries.equals(other.entries)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((flags == null) ? 0 : flags.hashCode()); result = prime * result + ((entries == null) ? 0 : entries.hashCode()); return result; } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map.impl.record; import com.hazelcast.internal.serialization.Data; import com.hazelcast.internal.serialization.SerializationService; import com.hazelcast.map.impl.recordstore.expiry.ExpiryMetadata; import com.hazelcast.map.impl.recordstore.expiry.ExpiryMetadataImpl; import com.hazelcast.nio.ObjectDataInput; import com.hazelcast.nio.ObjectDataOutput; import java.io.IOException; import static com.hazelcast.map.impl.record.Record.NOT_CACHED; import static com.hazelcast.map.impl.record.RecordReaderWriter.getById; /** * Contains various factory &amp; helper methods for a {@link * com.hazelcast.map.impl.record.Record} object. */ public final class Records { private Records() { } public static void writeRecord(ObjectDataOutput out, Record record, Data dataValue, ExpiryMetadata expiryMetadata) throws IOException { RecordReaderWriter readerWriter = record.getMatchingRecordReaderWriter(); out.writeByte(readerWriter.getId()); readerWriter.writeRecord(out, record, dataValue, expiryMetadata); } public static Record readRecord(ObjectDataInput in, ExpiryMetadata expiryMetadata) throws IOException { byte matchingDataRecordId = in.readByte(); return getById(matchingDataRecordId).readRecord(in, expiryMetadata); } public static void writeExpiry(ObjectDataOutput out, ExpiryMetadata expiryMetadata) throws IOException { boolean hasExpiry = expiryMetadata.hasExpiry(); out.writeBoolean(hasExpiry); if (hasExpiry) { expiryMetadata.write(out); } } public static ExpiryMetadata readExpiry(ObjectDataInput in) throws IOException { ExpiryMetadata expiryMetadata = ExpiryMetadata.NULL; boolean hasExpiry = in.readBoolean(); if (hasExpiry) { expiryMetadata = new ExpiryMetadataImpl(); expiryMetadata.read(in); } return expiryMetadata; } /** * Except transient field {@link com.hazelcast.query.impl.Metadata}, * all record-metadata is copied from one record to another. * * @return populated record object with new metadata */ public static Record copyMetadataFrom(Record fromRecord, Record toRecord) { toRecord.setHits(fromRecord.getHits()); toRecord.setVersion(fromRecord.getVersion()); toRecord.setCreationTime(fromRecord.getCreationTime()); toRecord.setLastAccessTime(fromRecord.getLastAccessTime()); toRecord.setLastStoredTime(fromRecord.getLastStoredTime()); toRecord.setLastUpdateTime(fromRecord.getLastUpdateTime()); return toRecord; } /** * Get current cached value from the record. * This method protects you against accidental exposure * of cached value mutex into rest of the code. * <p> * Use it instead of raw {@link Record#getCachedValueUnsafe()} See * {@link #getValueOrCachedValue(Record, SerializationService)} * for details. * * @param record * @return */ public static Object getCachedValue(Record record) { for (; ; ) { Object cachedValue = record.getCachedValueUnsafe(); if (!(cachedValue instanceof Thread)) { return cachedValue; } cachedValue = ThreadWrapper.unwrapOrNull(cachedValue); if (cachedValue != null) { return cachedValue; } } } /** * Return cached value where appropriate, * otherwise return the actual value. * Value caching makes sense when: * <ul> * <li>OBJECT InMemoryFormat is not used</li> * <li>Portable serialization is not used</li> * <li>HazelcastJsonValue objects are not used</li> * </ul> * <p> * If Record does not contain cached value and is found * appropriate (see above) then new cache value is created * by de-serializing the {@link Record#getValue()} * <p> * The newly de-deserialized value may not be stored into the Record * cache when the record has been modified while the method was running. * <p> * WARNING: This method may temporarily set an arbitrary object into the * Record cache - this object acts as mutex. The mutex should never be * returned to the outside world. Use {@link #getCachedValue(Record)} * instead of raw {@link Record#getCachedValueUnsafe()} to * protect from accidental mutex exposure to the user-code. * * @param record * @param serializationService * @return */ public static Object getValueOrCachedValue(Record record, SerializationService serializationService) { Object cachedValue = record.getCachedValueUnsafe(); if (cachedValue == NOT_CACHED) { //record does not support caching at all return record.getValue(); } for (; ; ) { if (cachedValue == null) { Object valueBeforeCas = record.getValue(); if (!shouldCache(valueBeforeCas)) { //it's either a null or value which we do not want to cache. let's just return it. return valueBeforeCas; } Object fromCache = tryStoreIntoCache(record, valueBeforeCas, serializationService); if (fromCache != null) { return fromCache; } } else if (cachedValue instanceof Thread) { //the cachedValue is either locked by another thread or it contains a wrapped thread cachedValue = ThreadWrapper.unwrapOrNull(cachedValue); if (cachedValue != null) { //exceptional case: the cachedValue is not locked, it just contains an instance of Thread. //this can happen when user put an instance of Thread into a map //(=it should never happen, but never say never...) return cachedValue; } //it looks like some other thread actually locked the cachedValue. let's give it another try (iteration) } else { //it's not the 'in-progress' marker/lock && it's not a null -> it has to be the actual cachedValue return cachedValue; } Thread.yield(); cachedValue = record.getCachedValueUnsafe(); } } private static Object tryStoreIntoCache(Record record, Object valueBeforeCas, SerializationService serializationService) { Thread currentThread = Thread.currentThread(); if (!record.casCachedValue(null, currentThread)) { return null; } //we managed to lock the record for ourselves Object valueAfterCas = record.getValue(); Object object = null; try { object = serializationService.toObject(valueBeforeCas); } catch (Exception e) { record.casCachedValue(currentThread, null); throw e; } if (valueAfterCas == valueBeforeCas) { //this check is needed to make sure a partition thread had not changed the value //right before we won the CAS Object wrappedObject = ThreadWrapper.wrapIfNeeded(object); record.casCachedValue(currentThread, wrappedObject); //we can return the object no matter of the CAS outcome. if we lose the CAS it means //the value had been mutated concurrently and partition thread removed our lock. } else { //the value has changed -> we can return the object to the caller as it was valid at some point in time //we are just not storing it into the cache as apparently it's not valid anymore. //we have to CAS the lock out as it could had been already removed by the partition thread record.casCachedValue(currentThread, null); } return object; } static boolean shouldCache(Object value) { // For portables, we cannot extract information from the deserialized form. // For HazelcastJsonValue objects, if we pass the instanceof Data check, that // means the metadata is created from the Data representation of the object. // If we allow using the deserialized values, the metadata might not be safe to use. if (value instanceof Data) { Data data = (Data) value; return !(data.isPortable() || data.isJson() || data.isCompact()); } return false; } /** * currentThread inside cachedValue acts as "deserialization in-progress" marker * if the actual deserialized value is instance of Thread then we need to wrap it * otherwise it might be mistaken for the "deserialization in-progress" marker. */ private static final class ThreadWrapper extends Thread { private final Thread wrappedValue; private ThreadWrapper(Thread wrappedValue) { this.wrappedValue = wrappedValue; } static Object unwrapOrNull(Object o) { if (o instanceof ThreadWrapper) { return ((ThreadWrapper) o).wrappedValue; } return null; } static Object wrapIfNeeded(Object object) { if (object instanceof Thread) { //exceptional case: deserialized value is an instance of Thread //we need to wrap it as we use currentThread to mark the cacheValue is 'deserilization in-progress' //this is the only case where we allocate a new object. return new ThreadWrapper((Thread) object); } return object; } } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.bluetoothchat; import android.app.Activity; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.util.Log; import android.view.View; import android.view.Window; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import android.widget.TextView; import java.util.Set; /** * This Activity appears as a dialog. It lists any paired devices and * devices detected in the area after discovery. When a device is chosen * by the user, the MAC address of the device is sent back to the parent * Activity in the result Intent. */ public class DeviceListActivity extends Activity { /** * Tag for Log */ private static final String TAG = "DeviceListActivity"; /** * Return Intent extra */ public static String EXTRA_DEVICE_ADDRESS = "device_address"; /** * Member fields */ private BluetoothAdapter mBtAdapter; /** * Newly discovered devices */ private ArrayAdapter<String> mNewDevicesArrayAdapter; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Setup the window requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS); setContentView(R.layout.activity_device_list); // Set result CANCELED in case the user backs out setResult(Activity.RESULT_CANCELED); // Initialize the button to perform device discovery Button scanButton = (Button) findViewById(R.id.button_scan); scanButton.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { doDiscovery(); v.setVisibility(View.GONE); } }); // Initialize array adapters. One for already paired devices and // one for newly discovered devices ArrayAdapter<String> pairedDevicesArrayAdapter = new ArrayAdapter<String>(this, R.layout.device_name); mNewDevicesArrayAdapter = new ArrayAdapter<String>(this, R.layout.device_name); // Find and set up the ListView for paired devices ListView pairedListView = (ListView) findViewById(R.id.paired_devices); pairedListView.setAdapter(pairedDevicesArrayAdapter); pairedListView.setOnItemClickListener(mDeviceClickListener); // Find and set up the ListView for newly discovered devices ListView newDevicesListView = (ListView) findViewById(R.id.new_devices); newDevicesListView.setAdapter(mNewDevicesArrayAdapter); newDevicesListView.setOnItemClickListener(mDeviceClickListener); // Register for broadcasts when a device is discovered IntentFilter filter = new IntentFilter(BluetoothDevice.ACTION_FOUND); this.registerReceiver(mReceiver, filter); // Register for broadcasts when discovery has finished filter = new IntentFilter(BluetoothAdapter.ACTION_DISCOVERY_FINISHED); this.registerReceiver(mReceiver, filter); // Get the local Bluetooth adapter mBtAdapter = BluetoothAdapter.getDefaultAdapter(); // Get a set of currently paired devices Set<BluetoothDevice> pairedDevices = mBtAdapter.getBondedDevices(); // If there are paired devices, add each one to the ArrayAdapter if (pairedDevices.size() > 0) { findViewById(R.id.title_paired_devices).setVisibility(View.VISIBLE); for (BluetoothDevice device : pairedDevices) { pairedDevicesArrayAdapter.add(device.getName() + "\n" + device.getAddress()); } } else { String noDevices = getResources().getText(R.string.none_paired).toString(); pairedDevicesArrayAdapter.add(noDevices); } } @Override protected void onDestroy() { super.onDestroy(); // Make sure we're not doing discovery anymore if (mBtAdapter != null) { mBtAdapter.cancelDiscovery(); } // Unregister broadcast listeners this.unregisterReceiver(mReceiver); } /** * Start device discover with the BluetoothAdapter */ private void doDiscovery() { Log.d(TAG, "doDiscovery()"); // Indicate scanning in the title setProgressBarIndeterminateVisibility(true); setTitle(R.string.scanning); // Turn on sub-title for new devices findViewById(R.id.title_new_devices).setVisibility(View.VISIBLE); // If we're already discovering, stop it if (mBtAdapter.isDiscovering()) { mBtAdapter.cancelDiscovery(); } // Request discover from BluetoothAdapter mBtAdapter.startDiscovery(); } /** * The on-click listener for all devices in the ListViews */ private AdapterView.OnItemClickListener mDeviceClickListener = new AdapterView.OnItemClickListener() { public void onItemClick(AdapterView<?> av, View v, int arg2, long arg3) { // Cancel discovery because it's costly and we're about to connect mBtAdapter.cancelDiscovery(); // Get the device MAC address, which is the last 17 chars in the View String info = ((TextView) v).getText().toString(); String address = info.substring(info.length() - 17); // Create the result Intent and include the MAC address Intent intent = new Intent(); intent.putExtra(EXTRA_DEVICE_ADDRESS, address); // Set result and finish this Activity setResult(Activity.RESULT_OK, intent); finish(); } }; /** * The BroadcastReceiver that listens for discovered devices and changes the title when * discovery is finished */ private final BroadcastReceiver mReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); // When discovery finds a device if (BluetoothDevice.ACTION_FOUND.equals(action)) { // Get the BluetoothDevice object from the Intent BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); // If it's already paired, skip it, because it's been listed already if (device.getBondState() != BluetoothDevice.BOND_BONDED) { mNewDevicesArrayAdapter.add(device.getName() + "\n" + device.getAddress()); } // When discovery is finished, change the Activity title } else if (BluetoothAdapter.ACTION_DISCOVERY_FINISHED.equals(action)) { setProgressBarIndeterminateVisibility(false); setTitle(R.string.select_device); if (mNewDevicesArrayAdapter.getCount() == 0) { String noDevices = getResources().getText(R.string.none_found).toString(); mNewDevicesArrayAdapter.add(noDevices); } } } }; }
package com.fsck.k9.mail; import java.io.IOException; import java.util.Date; import java.util.HashSet; import java.util.Set; import android.util.Log; import com.fsck.k9.activity.MessageReference; import com.fsck.k9.mail.filter.CountingOutputStream; import com.fsck.k9.mail.filter.EOLConvertingOutputStream; import com.fsck.k9.mail.internet.MimeMultipart; import com.fsck.k9.mail.internet.MimeUtility; import com.fsck.k9.mail.store.UnavailableStorageException; import com.imaeses.squeaky.K9; public abstract class Message implements Part, CompositeBody { private static final Flag[] EMPTY_FLAG_ARRAY = new Flag[0]; private MessageReference mReference = null; public enum RecipientType { TO, CC, BCC, } protected String mUid; protected Set<Flag> mFlags = new HashSet<Flag>(); protected Date mInternalDate; protected Folder mFolder; // this is the content that was originally signed for unencrypted PGP/MIME messages protected MimeMultipart signedMultipart; // the original charset before the messages was converted and stored as utf-8 (useful for inline signed messages) protected String mOriginalCharset; public void setSignedMultipart( MimeMultipart signedMultipart ) { this.signedMultipart = signedMultipart; } public MimeMultipart getSignedMultipart() { return signedMultipart; } public String getOriginalCharset() { return mOriginalCharset; } public void setOriginalCharset( String originalCharset ) { mOriginalCharset = originalCharset; } public boolean olderThan(Date earliestDate) { if (earliestDate == null) { return false; } Date myDate = getSentDate(); if (myDate == null) { myDate = getInternalDate(); } if (myDate != null) { return myDate.before(earliestDate); } return false; } @Override public boolean equals(Object o) { if (o == null || !(o instanceof Message)) { return false; } Message other = (Message)o; return (mUid.equals(other.getUid()) && mFolder.getName().equals(other.getFolder().getName()) && mFolder.getAccount().getUuid().equals(other.getFolder().getAccount().getUuid())); } @Override public int hashCode() { final int MULTIPLIER = 31; int result = 1; result = MULTIPLIER * result + mFolder.getName().hashCode(); result = MULTIPLIER * result + mFolder.getAccount().getUuid().hashCode(); result = MULTIPLIER * result + mUid.hashCode(); return result; } public String getUid() { return mUid; } public void setUid(String uid) { mReference = null; this.mUid = uid; } public Folder getFolder() { return mFolder; } public abstract String getSubject(); public abstract void setSubject(String subject) throws MessagingException; public Date getInternalDate() { return mInternalDate; } public void setInternalDate(Date internalDate) { this.mInternalDate = internalDate; } public abstract Date getSentDate(); public abstract void setSentDate(Date sentDate) throws MessagingException; public abstract Address[] getRecipients(RecipientType type) throws MessagingException; public abstract void setRecipients(RecipientType type, Address[] addresses) throws MessagingException; public void setRecipient(RecipientType type, Address address) throws MessagingException { setRecipients(type, new Address[] { address }); } public abstract Address[] getFrom(); public abstract void setFrom(Address from) throws MessagingException; public abstract Address[] getReplyTo(); public abstract void setReplyTo(Address[] from) throws MessagingException; public abstract String getMessageId() throws MessagingException; public abstract void setInReplyTo(String inReplyTo) throws MessagingException; public abstract String[] getReferences() throws MessagingException; public abstract void setReferences(String references) throws MessagingException; public abstract Body getBody(); public abstract String getContentType() throws MessagingException; public abstract void addHeader(String name, String value) throws MessagingException; public abstract void setHeader(String name, String value) throws MessagingException; public abstract String[] getHeader(String name) throws MessagingException; public abstract Set<String> getHeaderNames() throws UnavailableStorageException; public abstract void removeHeader(String name) throws MessagingException; public abstract void setBody(Body body) throws MessagingException; public abstract long getId(); public abstract String getPreview(); public abstract boolean hasAttachments(); /* * calculateContentPreview * Takes a plain text message body as a string. * Returns a message summary as a string suitable for showing in a message list * * A message summary should be about the first 160 characters * of unique text written by the message sender * Quoted text, "On $date" and so on will be stripped out. * All newlines and whitespace will be compressed. * */ public static String calculateContentPreview(String text) { if (text == null) { return null; } // Only look at the first 8k of a message when calculating // the preview. This should avoid unnecessary // memory usage on large messages if (text.length() > 8192) { text = text.substring(0, 8192); } // Remove (correctly delimited by '-- \n') signatures text = text.replaceAll("(?ms)^-- [\\r\\n]+.*", ""); // try to remove lines of dashes in the preview text = text.replaceAll("(?m)^----.*?$", ""); // remove quoted text from the preview text = text.replaceAll("(?m)^[#>].*$", ""); // Remove a common quote header from the preview text = text.replaceAll("(?m)^On .*wrote.?$", ""); // Remove a more generic quote header from the preview text = text.replaceAll("(?m)^.*\\w+:$", ""); // Remove horizontal rules. text = text.replaceAll("\\s*([-=_]{30,}+)\\s*", " "); // URLs in the preview should just be shown as "..." - They're not // clickable and they usually overwhelm the preview text = text.replaceAll("https?://\\S+", "..."); // Don't show newlines in the preview text = text.replaceAll("(\\r|\\n)+", " "); // Collapse whitespace in the preview text = text.replaceAll("\\s+", " "); // Remove any whitespace at the beginning and end of the string. text = text.trim(); return (text.length() <= 512) ? text : text.substring(0, 512); } public void delete(String trashFolderName) throws MessagingException {} /* * TODO Refactor Flags at some point to be able to store user defined flags. */ public Flag[] getFlags() { return mFlags.toArray(EMPTY_FLAG_ARRAY); } /** * @param flag * Flag to set. Never <code>null</code>. * @param set * If <code>true</code>, the flag is added. If <code>false</code> * , the flag is removed. * @throws MessagingException */ public void setFlag(Flag flag, boolean set) throws MessagingException { if (set) { mFlags.add(flag); } else { mFlags.remove(flag); } } /** * This method calls setFlag(Flag, boolean) * @param flags * @param set */ public void setFlags(Flag[] flags, boolean set) throws MessagingException { for (Flag flag : flags) { setFlag(flag, set); } } public boolean isSet(Flag flag) { return mFlags.contains(flag); } public void destroy() throws MessagingException {} public abstract void setEncoding(String encoding) throws UnavailableStorageException, MessagingException; public abstract void setCharset(String charset) throws MessagingException; public MessageReference makeMessageReference() { if (mReference == null) { mReference = new MessageReference(); mReference.accountUuid = getFolder().getAccount().getUuid(); mReference.folderName = getFolder().getName(); mReference.uid = mUid; } return mReference; } public long calculateSize() { try { CountingOutputStream out = new CountingOutputStream(); EOLConvertingOutputStream eolOut = new EOLConvertingOutputStream(out); writeTo(eolOut); eolOut.flush(); return out.getCount(); } catch (IOException e) { Log.e(K9.LOG_TAG, "Failed to calculate a message size", e); } catch (MessagingException e) { Log.e(K9.LOG_TAG, "Failed to calculate a message size", e); } return 0; } /** * Copy the contents of this object into another {@code Message} object. * * @param destination * The {@code Message} object to receive the contents of this instance. */ protected void copy(Message destination) { copyInternals( destination ); } public void copyInternals(Message destination) { destination.mUid = mUid; destination.mInternalDate = mInternalDate; destination.mFolder = mFolder; destination.mReference = mReference; // mFlags contents can change during the object lifetime, so copy the Set destination.mFlags = new HashSet<Flag>(mFlags); } /** * Creates a new {@code Message} object with the same content as this object. * * <p> * <strong>Note:</strong> * This method was introduced as a hack to prevent {@code ConcurrentModificationException}s. It * shouldn't be used unless absolutely necessary. See the comment in * {@link com.fsck.k9.activity.MessageView.Listener#loadMessageForViewHeadersAvailable(com.fsck.k9.Account, String, String, Message)} * for more information. * </p> */ public abstract Message clone(); public abstract void setUsing7bitTransport() throws MessagingException; }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.repositories.blobstore.testkit; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryVerificationException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskAwareRequest; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.nio.file.NoSuchFileException; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import java.util.zip.CRC32; /** * Action which instructs a node to read a range of a blob from a {@link org.elasticsearch.repositories.blobstore.BlobStoreRepository} * (possibly the entire blob) and compute its checksum. It is acceptable if the blob is not found but we do not accept the blob being * otherwise unreadable. */ public class GetBlobChecksumAction extends ActionType<GetBlobChecksumAction.Response> { private static final Logger logger = LogManager.getLogger(GetBlobChecksumAction.class); public static final GetBlobChecksumAction INSTANCE = new GetBlobChecksumAction(); public static final String NAME = "cluster:admin/repository/analyze/blob/read"; private GetBlobChecksumAction() { super(NAME, Response::new); } public static class TransportAction extends HandledTransportAction<Request, Response> { private static final Logger logger = GetBlobChecksumAction.logger; private static final int BUFFER_SIZE = ByteSizeUnit.KB.toIntBytes(8); private final RepositoriesService repositoriesService; @Inject public TransportAction(TransportService transportService, ActionFilters actionFilters, RepositoriesService repositoriesService) { super(NAME, transportService, actionFilters, Request::new, ThreadPool.Names.SNAPSHOT); this.repositoriesService = repositoriesService; } @Override protected void doExecute(Task task, Request request, ActionListener<Response> listener) { assert task instanceof CancellableTask; CancellableTask cancellableTask = (CancellableTask) task; final Repository repository = repositoriesService.repository(request.getRepositoryName()); if (repository instanceof BlobStoreRepository == false) { throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is not a blob store repository"); } final BlobStoreRepository blobStoreRepository = (BlobStoreRepository) repository; final BlobContainer blobContainer = blobStoreRepository.blobStore() .blobContainer(blobStoreRepository.basePath().add(request.getBlobPath())); logger.trace("handling [{}]", request); final InputStream rawInputStream; try { if (request.isWholeBlob()) { rawInputStream = blobContainer.readBlob(request.getBlobName()); } else { rawInputStream = blobContainer.readBlob(request.getBlobName(), request.getRangeStart(), request.getRangeLength()); } } catch (FileNotFoundException | NoSuchFileException e) { logger.trace("blob not found for [{}]", request); listener.onResponse(Response.BLOB_NOT_FOUND); return; } catch (IOException e) { logger.warn("failed to read blob for [{}]", request); listener.onFailure(e); return; } logger.trace("reading blob for [{}]", request); final AtomicLong throttleNanos = new AtomicLong(); final InputStream throttledInputStream = blobStoreRepository.maybeRateLimitRestores(rawInputStream, throttleNanos::addAndGet); final CRC32 crc32 = new CRC32(); final byte[] buffer = new byte[BUFFER_SIZE]; long bytesRead = 0L; final long startTimeNanos = System.nanoTime(); long firstByteNanos = startTimeNanos; boolean success = false; try { while (true) { final int readSize; try { readSize = throttledInputStream.read(buffer, 0, buffer.length); } catch (IOException e) { logger.warn("exception while read blob for [{}]", request); listener.onFailure(e); return; } if (readSize == -1) { break; } if (readSize > 0) { if (bytesRead == 0L) { firstByteNanos = System.nanoTime(); } crc32.update(buffer, 0, readSize); bytesRead += readSize; } if (cancellableTask.isCancelled()) { throw new RepositoryVerificationException( request.repositoryName, "cancelled [" + request.getDescription() + "] after reading [" + bytesRead + "] bytes" ); } } success = true; } finally { if (success == false) { IOUtils.closeWhileHandlingException(throttledInputStream); } } try { throttledInputStream.close(); } catch (IOException e) { throw new RepositoryVerificationException( request.repositoryName, "failed to close input stream when handling [" + request.getDescription() + "]", e ); } final long endTimeNanos = System.nanoTime(); if (request.isWholeBlob() == false && bytesRead != request.getRangeLength()) { throw new RepositoryVerificationException( request.repositoryName, "unexpectedly read [" + bytesRead + "] bytes when handling [" + request.getDescription() + "]" ); } final Response response = new Response( bytesRead, crc32.getValue(), firstByteNanos - startTimeNanos, endTimeNanos - startTimeNanos, throttleNanos.get() ); logger.trace("responding to [{}] with [{}]", request, response); listener.onResponse(response); } } public static class Request extends ActionRequest implements TaskAwareRequest { private final String repositoryName; private final String blobPath; private final String blobName; // Requesting the range [rangeStart, rangeEnd), but we treat the range [0, 0) as a special case indicating to read the whole blob. private final long rangeStart; private final long rangeEnd; Request(StreamInput in) throws IOException { super(in); repositoryName = in.readString(); blobPath = in.readString(); blobName = in.readString(); rangeStart = in.readVLong(); rangeEnd = in.readVLong(); } @Override public ActionRequestValidationException validate() { return null; } Request(String repositoryName, String blobPath, String blobName, long rangeStart, long rangeEnd) { assert rangeStart == 0L && rangeEnd == 0L || 0L <= rangeStart && rangeStart < rangeEnd : rangeStart + "-" + rangeEnd; this.repositoryName = repositoryName; this.blobPath = blobPath; this.blobName = blobName; this.rangeStart = rangeStart; this.rangeEnd = rangeEnd; } public String getRepositoryName() { return repositoryName; } public String getBlobPath() { return blobPath; } public String getBlobName() { return blobName; } public long getRangeStart() { assert isWholeBlob() == false; return rangeStart; } public long getRangeEnd() { assert isWholeBlob() == false; return rangeEnd; } public long getRangeLength() { assert isWholeBlob() == false; return rangeEnd - rangeStart; } /** * @return whether we should read the whole blob or a range. */ boolean isWholeBlob() { return rangeEnd == 0L; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(repositoryName); out.writeString(blobPath); out.writeString(blobName); out.writeVLong(rangeStart); out.writeVLong(rangeEnd); } @Override public String getDescription() { return "retrieve [" + (isWholeBlob() ? "whole blob" : (getRangeStart() + "-" + getRangeEnd())) + "] from [" + getRepositoryName() + ":" + getBlobPath() + "/" + getBlobName() + "]"; } @Override public String toString() { return "GetRepositoryBlobChecksumRequest{" + getDescription() + "}"; } @Override public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) { return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers) { @Override public boolean shouldCancelChildrenOnCancellation() { return false; // no children } }; } } public static class Response extends ActionResponse { static Response BLOB_NOT_FOUND = new Response(0L, 0L, 0L, 0L, 0L); private final long bytesRead; // 0 if not found private final long checksum; // 0 if not found private final long firstByteNanos; // 0 if not found private final long elapsedNanos; // 0 if not found private final long throttleNanos; // 0 if not found Response(long bytesRead, long checksum, long firstByteNanos, long elapsedNanos, long throttleNanos) { this.bytesRead = bytesRead; this.checksum = checksum; this.firstByteNanos = firstByteNanos; this.elapsedNanos = elapsedNanos; this.throttleNanos = throttleNanos; } Response(StreamInput in) throws IOException { super(in); this.bytesRead = in.readVLong(); this.checksum = in.readLong(); this.firstByteNanos = in.readVLong(); this.elapsedNanos = in.readVLong(); this.throttleNanos = in.readVLong(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(bytesRead); out.writeLong(checksum); out.writeVLong(firstByteNanos); out.writeVLong(elapsedNanos); out.writeVLong(throttleNanos); } @Override public String toString() { return "GetRepositoryBlobChecksumResponse{" + "bytesRead=" + bytesRead + ", checksum=" + checksum + ", firstByteNanos=" + firstByteNanos + ", elapsedNanos=" + elapsedNanos + ", throttleNanos=" + throttleNanos + '}'; } public long getBytesRead() { return bytesRead; } public long getChecksum() { return checksum; } public long getFirstByteNanos() { return firstByteNanos; } public long getElapsedNanos() { return elapsedNanos; } public long getThrottleNanos() { return throttleNanos; } public boolean isNotFound() { return bytesRead == 0L && checksum == 0L && firstByteNanos == 0L && elapsedNanos == 0L && throttleNanos == 0L; } } }
/* Copyright 2012 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.arbeitspferde.groningen.http; import com.google.gson.Gson; import com.google.inject.Inject; import org.arbeitspferde.groningen.Pipeline; import org.arbeitspferde.groningen.PipelineId; import org.arbeitspferde.groningen.PipelineManager; import org.arbeitspferde.groningen.common.EvaluatedSubject; import org.arbeitspferde.groningen.config.GroningenConfig.ClusterConfig; import org.arbeitspferde.groningen.config.GroningenConfig.SubjectGroupConfig; import org.arbeitspferde.groningen.display.DisplayMediator; import org.arbeitspferde.groningen.display.DisplayableObject; import org.arbeitspferde.groningen.experimentdb.PauseTime; import org.arbeitspferde.groningen.experimentdb.ResourceMetric; import org.arbeitspferde.groningen.experimentdb.SubjectStateBridge; import org.arbeitspferde.groningen.proto.Params.GroningenParamsOrBuilder; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Map; /** * RESTful Service, serves data for the dashboard. */ @Path("/pipelines") @Produces(MediaType.APPLICATION_JSON) public class Pipelines { private final Gson gson; private final PipelineManager pipelineManager; // TODO(sanragsood): This is configurable via params.proto; switch once the code is moved. private static final int NUM_EXPERIMENT_SCORES = 5; private static final DateFormat df = new SimpleDateFormat("d MMM yyyy hh:mm a z"); @Inject public Pipelines(Gson gson, PipelineManager pipelineManager) { this.gson = gson; this.pipelineManager = pipelineManager; } // TODO(sanragsood): These methods assume the base yet prominent use case where the experiment is // run on a single job in a single cluster. This needs to be fixed. private static String extractJobName(Pipeline pipeline) { for (ClusterConfig clusterConfig : pipeline.getConfig().getClusterConfigs()) { for (SubjectGroupConfig subjectGroupConfig : clusterConfig.getSubjectGroupConfigs()) { return subjectGroupConfig.getName(); } } return ""; } private static String extractUserName(Pipeline pipeline) { for (ClusterConfig clusterConfig : pipeline.getConfig().getClusterConfigs()) { for (SubjectGroupConfig subjectGroupConfig : clusterConfig.getSubjectGroupConfigs()) { return subjectGroupConfig.getUser(); } } return ""; } @GET public String getAllPipelines() { ArrayList<PipelineInfo> pipelineGroup = new ArrayList<>(); Map<PipelineId, Pipeline> pipelineMap = pipelineManager.getAllPipelines(); for (Pipeline pipeline : pipelineMap.values()) { pipelineGroup.add(new PipelineInfo(pipeline.id().toString(), extractJobName(pipeline), extractUserName(pipeline))); } return this.gson.toJson(pipelineGroup); } private static ExperimentInfo[] extractExperimentInfo( EvaluatedSubject[] subjects, long cumulativeExperimentIdSum) { ArrayList<ExperimentInfo> experimentScores = new ArrayList<>(); int count = 1; for (EvaluatedSubject subject : subjects) { experimentScores.add(new ExperimentInfo( subject.getExperimentId(), count, subject.isDefault() ? "DEFAULT SETTINGS" : subject.getBridge().getCommandLine().toArgumentString(), df.format(subject.getTimeStamp().getMillis()))); if (count++ >= NUM_EXPERIMENT_SCORES) { break; } } return experimentScores.toArray(new ExperimentInfo[0]); } @GET @Path("/{pipelineIds}") public String getPipeline(@PathParam("pipelineIds") String pipelineIds) { String[] pipelineIdList = pipelineIds.split(","); ArrayList<DetailedPipelineInfo> pipelines = new ArrayList<>(); for (String pipelineId : pipelineIdList) { if (!pipelineId.isEmpty()) { Pipeline pipeline = pipelineManager.findPipelineById(new PipelineId(pipelineId)); DetailedPipelineInfo pipelineInfo = new DetailedPipelineInfo( pipelineId, extractJobName(pipeline), extractUserName(pipeline)); // TODO(sanragsood): Possibly rename DisplayMediator? DisplayMediator infoProvider = pipeline.getDisplayableInformationProvider(); // Warnings pipelineInfo.warnings = infoProvider.getWarnings(); // Status ArrayList<StatusData> status = new ArrayList<>(); for (DisplayableObject statusObj : infoProvider.getMonitoredObjects()) { status.add(new StatusData(statusObj.getInfoString(), statusObj.getObject().toString())); } pipelineInfo.status = status.toArray(new StatusData[0]); // Best Experiment Scores EvaluatedSubject[] alltimeSubjects = infoProvider.getAlltimeExperimentSubjects(); long cumulativeExperimentIdSum = infoProvider.getCumulativeExperimentIdSum(); pipelineInfo.bestExperimentScores = extractExperimentInfo( alltimeSubjects, cumulativeExperimentIdSum); // Experiment History EvaluatedSubject[] allSubjects = infoProvider.getAllExperimentSubjects(); ArrayList<HistoricalData> historyData = new ArrayList<>(); for (EvaluatedSubject subject : allSubjects) { historyData.add(new HistoricalData( subject.getExperimentId(), subject.getFitness(), subject.isDefault() ? "DEFAULT SETTINGS" : subject.getBridge().getCommandLine().toArgumentString())); } pipelineInfo.history = historyData.toArray(new HistoricalData[0]); pipelines.add(pipelineInfo); } } return this.gson.toJson(pipelines); } @GET @Path("/{pipelineId}/csv") @Produces("text/csv") public Response getPipelineDataCsv(@PathParam("pipelineId") String pipelineId) { StringBuilder sb = new StringBuilder(); sb.append("Timestamp,TaskID,Job,User,ExperimentIteration,JVMParamString," + "FitnessScore,LatencyWeight,LatencyScore,ThroughputWeight," + "ThroughputScore,FootprintWeight,FootprintScore\n"); Pipeline pipeline = pipelineManager.findPipelineById(new PipelineId(pipelineId)); if (pipeline != null) { DisplayMediator infoProvider = pipeline.getDisplayableInformationProvider(); EvaluatedSubject[] allSubjects = infoProvider.getAllExperimentSubjects(); GroningenParamsOrBuilder params = pipeline.getConfig().getParamBlock(); for (EvaluatedSubject subject : allSubjects) { SubjectStateBridge subjectBridge = subject.getBridge(); sb.append('"') .append(df.format(subject.getTimeStamp().getMillis())) // Timestamp .append('"') .append(',') .append(subject.getSubjectGroupIndex()) // TaskID .append(',') .append(subject.getSubjectGroupName()) // Job .append(',') .append(subject.getUserName()) // User .append(',') .append(subject.getExperimentId()) // ExperimentIteration .append(',') .append('"') .append(subject.isDefault() ? "DEFAULT SETTINGS" : subjectBridge.getCommandLine().toArgumentString()) // JVMParamString .append('"') .append(',') .append(subject.getFitness()) // FitnessScore .append(',') .append(params.getLatencyWeight()) // LatencyWeight .append(',') .append(subjectBridge.getPauseTime().computeScore( PauseTime.ScoreType.LATENCY)) // LatencyScore .append(',') .append(params.getThroughputWeight()) // ThroughputWeight .append(',') .append(subjectBridge.getPauseTime().computeScore( PauseTime.ScoreType.THROUGHPUT)) // ThroughputScore .append(',') .append(params.getMemoryWeight()) // FootprintWeight .append(',') .append(subjectBridge.getResourceMetric().computeScore( ResourceMetric.ScoreType.MEMORY)) // FootprintScore .append('\n'); } } return Response .ok(sb.toString()) .header("Content-Disposition", "attachment; filename=" + pipelineId + ".csv") .build(); } @GET @Path("/{pipelineId}/config") @Produces(MediaType.TEXT_PLAIN) public Response getPipelineConfig(@PathParam("pipelineId") String pipelineId) { String config = ""; Pipeline pipeline = this.pipelineManager.findPipelineById(new PipelineId(pipelineId)); if (pipeline != null) { config = pipeline.getConfig().getProtoConfig().toString(); } return Response .ok(config) .header("Content-Disposition", "attachment; filename=" + pipelineId + ".config") .build(); } }
package net.graphical.model.causality.model.adjImpl; import net.graphical.model.causality.graph.model.AdjImpl.Dag; import net.graphical.model.causality.graph.model.intervention.InterventionFamily; import net.graphical.model.causality.graph.algorithm.graphTypeChecking.DagChecker; import net.graphical.model.causality.graph.algorithm.transformer.DagGenerator; import net.graphical.model.causality.graph.algorithm.transformer.EssentialGraphGenerator; import net.graphical.model.causality.graph.model.AdjImpl.ChainGraph; import net.graphical.model.causality.graph.model.Edge; import net.graphical.model.causality.graph.model.EdgeType; import net.graphical.model.causality.graph.model.Node; import net.graphical.model.causality.graph.model.intervention.Intervention; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.junit.Assert.assertEquals; /** * Created by sli on 11/18/15. */ public class EssentialGraphTest { @Test public void testDagEssentialGraphConversion() throws Exception { Dag dag = getDag(); DagChecker checker = new DagChecker(dag); assertEquals(checker.isTrue(), false); } @Test public void testDagEssentialGraphConversion_1() throws Exception { Dag dag = getDag2(); DagChecker checker = new DagChecker(dag); assertEquals(checker.isTrue(), true); EssentialGraphGenerator eg = new EssentialGraphGenerator(dag); eg.toEssentialGraph(); ChainGraph ed0 = dag.deepCopy(); DagGenerator dg = new DagGenerator(dag); dg.toDag(); EssentialGraphGenerator eg1 = new EssentialGraphGenerator(dag); eg1.toEssentialGraph(); assertEquals(ed0.hasSameEdges(dag), true); } private Dag getDag2(){ int noOfNodes = 7; List<Node> nodes = new ArrayList<>(); for(int i = 1; i <= noOfNodes; i++){ Node node = new Node(i); node.addLevel("0","1"); nodes.add(node); } List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_MINUS), new Edge(1,5, EdgeType.DIRECTED_PLUS) ,new Edge(2,3, EdgeType.DIRECTED_PLUS), new Edge(2,5, EdgeType.DIRECTED_PLUS), new Edge(2,6, EdgeType.DIRECTED_PLUS) ,new Edge(3,4, EdgeType.DIRECTED_PLUS), new Edge(3,7, EdgeType.DIRECTED_PLUS), new Edge(3,6, EdgeType.DIRECTED_PLUS) ,new Edge(4,7, EdgeType.DIRECTED_PLUS) ,new Edge(5,6, EdgeType.DIRECTED_PLUS) ); return new Dag(nodes, edges); } private Dag getDag(){ int noOfNodes = 7; List<Node> nodes = new ArrayList<>(); for(int i = 1; i <= noOfNodes; i++){ Node node = new Node(i); node.addLevel("0","1"); nodes.add(node); } List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_MINUS), new Edge(1,5, EdgeType.DIRECTED_PLUS) ,new Edge(2,3, EdgeType.DIRECTED_PLUS), new Edge(2,5, EdgeType.DIRECTED_PLUS), new Edge(2,6, EdgeType.DIRECTED_MINUS) ,new Edge(3,4, EdgeType.DIRECTED_PLUS), new Edge(3,7, EdgeType.DIRECTED_PLUS), new Edge(3,6, EdgeType.DIRECTED_PLUS) ,new Edge(4,7, EdgeType.DIRECTED_PLUS) ,new Edge(5,6, EdgeType.DIRECTED_PLUS) ); return new Dag(nodes, edges); } @Test public void testEssentialOrdering() throws Exception { List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.UNDIRECTED),new Edge(1,5, EdgeType.UNDIRECTED) ,new Edge(2,3, EdgeType.UNDIRECTED),new Edge(2,5, EdgeType.UNDIRECTED),new Edge(2,6, EdgeType.DIRECTED_PLUS) ,new Edge(3,4, EdgeType.DIRECTED_PLUS),new Edge(3,7, EdgeType.DIRECTED_PLUS),new Edge(3,6, EdgeType.DIRECTED_PLUS) ,new Edge(4,7, EdgeType.DIRECTED_PLUS) ,new Edge(5,6, EdgeType.DIRECTED_PLUS) ); ChainGraph ed = new ChainGraph(nodes, edges); ed.orderEdge(Arrays.asList(new Node(3),new Node(2),new Node(1),new Node(5))); DagChecker checker = new DagChecker(ed); assertEquals(checker.isTrue(), true); } //REF_HB @Test public void testEssentialGraphGenerator_fig4a() throws Exception { List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_MINUS),new Edge(1,5, EdgeType.DIRECTED_PLUS) ,new Edge(2,3, EdgeType.DIRECTED_MINUS),new Edge(2,5, EdgeType.DIRECTED_PLUS),new Edge(2,6, EdgeType.DIRECTED_PLUS) ,new Edge(3,4, EdgeType.DIRECTED_PLUS),new Edge(3,7, EdgeType.DIRECTED_PLUS),new Edge(3,6, EdgeType.DIRECTED_PLUS) ,new Edge(4,7, EdgeType.DIRECTED_PLUS) ,new Edge(5,6, EdgeType.DIRECTED_PLUS) ); Dag dag = new Dag(nodes, edges); Intervention inv0 = new Intervention(Arrays.asList()); Intervention inv1 = new Intervention( Arrays.asList(new Node(4))); InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0, inv1)); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(invf); } //REF_HB @Test public void testEssentialGraphGenerator_fig4b() throws Exception { List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_MINUS),new Edge(1,5, EdgeType.DIRECTED_PLUS) ,new Edge(2,3, EdgeType.DIRECTED_MINUS),new Edge(2,4, EdgeType.DIRECTED_MINUS),new Edge(2,5, EdgeType.DIRECTED_PLUS),new Edge(2,6, EdgeType.DIRECTED_PLUS) ,new Edge(3,4, EdgeType.DIRECTED_PLUS),new Edge(3,7, EdgeType.DIRECTED_PLUS),new Edge(3,6, EdgeType.DIRECTED_PLUS) ,new Edge(4,7, EdgeType.DIRECTED_PLUS) ,new Edge(5,6, EdgeType.DIRECTED_PLUS) ); Dag dag = new Dag(nodes, edges); Intervention inv0 = new Intervention(Arrays.asList()); Intervention inv1 = new Intervention( Arrays.asList(new Node(4))); InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0, inv1)); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(invf); assertEquals(result.noOfUndirectEdge(), 1); } @Test public void testEssentialGraphGenerator_fig4b_outOfOrder() throws Exception { List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7) ); List<Edge> edges = Arrays.asList( new Edge(2,1, EdgeType.DIRECTED_PLUS),new Edge(1,5, EdgeType.DIRECTED_PLUS) ,new Edge(3,2, EdgeType.DIRECTED_PLUS),new Edge(4,2, EdgeType.DIRECTED_PLUS),new Edge(2,5, EdgeType.DIRECTED_PLUS),new Edge(2,6, EdgeType.DIRECTED_PLUS) ,new Edge(3,4, EdgeType.DIRECTED_PLUS),new Edge(3,7, EdgeType.DIRECTED_PLUS),new Edge(3,6, EdgeType.DIRECTED_PLUS) ,new Edge(4,7, EdgeType.DIRECTED_PLUS) ,new Edge(5,6, EdgeType.DIRECTED_PLUS) ); Dag dag = new Dag(nodes, edges); Intervention inv0 = new Intervention(Arrays.asList()); Intervention inv1 = new Intervention( Arrays.asList(new Node(4))); InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0, inv1)); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(invf); assertEquals(result.noOfUndirectEdge(), 1); } //REF_HB @Test public void testEssentialGraphGenerator_fig5a() throws Exception { List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_MINUS),new Edge(1,5, EdgeType.DIRECTED_MINUS) ,new Edge(2,3, EdgeType.DIRECTED_PLUS),new Edge(2,5, EdgeType.DIRECTED_PLUS),new Edge(2,6, EdgeType.DIRECTED_PLUS) ,new Edge(3,4, EdgeType.DIRECTED_PLUS),new Edge(3,7, EdgeType.DIRECTED_PLUS),new Edge(3,6, EdgeType.DIRECTED_PLUS) ,new Edge(4,7, EdgeType.DIRECTED_PLUS) ,new Edge(5,6, EdgeType.DIRECTED_PLUS) ); Dag dag = new Dag(nodes, edges); Intervention inv0 = new Intervention(Arrays.asList()); Intervention inv1 = new Intervention( Arrays.asList(new Node(4))); InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0, inv1)); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(invf); } //REF_HB @Test public void testEssentialGraphGenerator_fig6a() throws Exception { List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_MINUS),new Edge(1,5, EdgeType.DIRECTED_MINUS) ,new Edge(2,3, EdgeType.DIRECTED_MINUS),new Edge(2,5, EdgeType.DIRECTED_PLUS),new Edge(2,6, EdgeType.DIRECTED_PLUS) ,new Edge(3,4, EdgeType.DIRECTED_PLUS),new Edge(3,7, EdgeType.DIRECTED_PLUS),new Edge(3,6, EdgeType.DIRECTED_PLUS) ,new Edge(4,7, EdgeType.DIRECTED_PLUS) ,new Edge(5,6, EdgeType.DIRECTED_PLUS) ); Dag dag = new Dag(nodes, edges); Intervention inv0 = new Intervention(Arrays.asList()); Intervention inv1 = new Intervention( Arrays.asList(new Node(4))); InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0, inv1)); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(invf); } //REF_HB @Test public void testEssentialGraphGenerator_fig6b() throws Exception { List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_MINUS),new Edge(1,5, EdgeType.DIRECTED_MINUS) ,new Edge(2,3, EdgeType.DIRECTED_MINUS),new Edge(2,5, EdgeType.DIRECTED_MINUS),new Edge(2,6, EdgeType.DIRECTED_PLUS) ,new Edge(3,4, EdgeType.DIRECTED_PLUS),new Edge(3,7, EdgeType.DIRECTED_PLUS),new Edge(3,6, EdgeType.DIRECTED_PLUS) ,new Edge(4,7, EdgeType.DIRECTED_PLUS) ,new Edge(5,6, EdgeType.DIRECTED_PLUS) ); Dag dag = new Dag(nodes, edges); Intervention inv0 = new Intervention(Arrays.asList()); Intervention inv1 = new Intervention( Arrays.asList(new Node(4))); InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0, inv1)); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(invf); } //REF_HB @Test public void testEssentialGraphGenerator_fig7b() throws Exception { List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_MINUS),new Edge(1,5, EdgeType.DIRECTED_PLUS),new Edge(1,4, EdgeType.DIRECTED_MINUS) ,new Edge(2,3, EdgeType.DIRECTED_MINUS),new Edge(2,5, EdgeType.DIRECTED_PLUS) ,new Edge(4,5, EdgeType.DIRECTED_PLUS) ); Dag dag = new Dag(nodes, edges); Intervention inv0 = new Intervention(Arrays.asList()); Intervention inv1 = new Intervention( Arrays.asList(new Node(4))); InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0, inv1)); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(invf); } @Test public void testEssentialGraphGenerator_patternA() throws Exception { Dag dag = getSimpleDag(); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(); assertEquals(result.isUnDirectedGraph(), true); } @Test public void testEssentialGraphGenerator_patternB() throws Exception { Dag dag = getSimpleV(); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(); assertEquals(result.isDirectedGraph(), true); } @Test public void testEssentialGraphGenerator_patternC() throws Exception { Dag dag = getPatternC(); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(); assertEquals(result.isUnDirectedGraph(), true); } private Dag getPatternC(){ List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_PLUS),new Edge(1,3, EdgeType.DIRECTED_PLUS) ,new Edge(2,3, EdgeType.DIRECTED_MINUS) ); return new Dag(nodes, edges); } @Test public void testEssentialGraphGenerator_patternD() throws Exception { Dag dag = getPatternD(); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(); assertEquals(result.isUnDirectedGraph(), false); assertEquals(result.isDirectedGraph(), false); } private Dag getPatternD(){ List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_PLUS),new Edge(1,3, EdgeType.UNDIRECTED),new Edge(1,4, EdgeType.UNDIRECTED) ,new Edge(2,3, EdgeType.DIRECTED_MINUS),new Edge(2,4, EdgeType.DIRECTED_MINUS) ); return new Dag(nodes, edges); } @Test public void testEssentialGraphGenerator_allProtected() throws Exception { Dag dag = getAllProtected(); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(); assertEquals(result.isUnDirectedGraph(), false); assertEquals(result.isDirectedGraph(), true); } private Dag getAllProtected(){ List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_PLUS) ,new Edge(2,3, EdgeType.DIRECTED_MINUS),new Edge(2,4, EdgeType.DIRECTED_PLUS) ,new Edge(3,4, EdgeType.DIRECTED_PLUS) ); return new Dag(nodes, edges); } @Test public void testEssentialGraphGenerator_intervention() throws Exception { Dag dag = getintervention(); Intervention inv0 = new Intervention(Arrays.asList()); Intervention inv1 = new Intervention( Arrays.asList(new Node(4))); InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0, inv1)); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(invf); assertEquals(result.isUnDirectedGraph(), false); assertEquals(result.isDirectedGraph(), false); } @Test public void testEssentialGraphGenerator_intervention_2() throws Exception { List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.UNDIRECTED),new Edge(1,3, EdgeType.DIRECTED_MINUS),new Edge(1,4, EdgeType.DIRECTED_MINUS) ,new Edge(2,3, EdgeType.DIRECTED_MINUS),new Edge(2,4, EdgeType.DIRECTED_MINUS) ,new Edge(3,4, EdgeType.DIRECTED_MINUS) ); ChainGraph ed = new ChainGraph(nodes, edges); Intervention inv0 = new Intervention(Arrays.asList()); Intervention inv1 = new Intervention( Arrays.asList(new Node(4))); InterventionFamily interventionFamily = new InterventionFamily(Arrays.asList(inv0, inv1)); EssentialGraphGenerator generator = new EssentialGraphGenerator(ed); ChainGraph result = generator.toEssentialGraph(interventionFamily); assertEquals(result.isUnDirectedGraph(), false); assertEquals(result.isDirectedGraph(), false); } @Test public void testEssentialGraphGenerator_3nodes() throws Exception { int noOfNodes = 3; List<Node> nodes = new ArrayList<>(); for(int i = 1; i <= noOfNodes; i++){ Node node = new Node(i); node.addLevel("0","1"); nodes.add(node); } List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_PLUS), new Edge(2,3, EdgeType.DIRECTED_PLUS), new Edge(1,3, EdgeType.DIRECTED_PLUS) ); Dag dag = new Dag(nodes, edges); Intervention inv0 = new Intervention(Arrays.asList()); InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0)); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(invf); assertEquals(result.isUnDirectedGraph(), true); assertEquals(result.isDirectedGraph(), false); } @Test public void testEssentialGraphGenerator_intervention2() throws Exception { Dag dag = getintervention(); Intervention inv0 = new Intervention(Arrays.asList()); Intervention inv1 = new Intervention( Arrays.asList(new Node(1), new Node(2))); InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0, inv1)); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(invf); assertEquals(result.isUnDirectedGraph(), false); assertEquals(result.isDirectedGraph(), false); } @Test public void testEssentialGraphGenerator_observe() throws Exception { Dag dag = getintervention(); Intervention inv0 = new Intervention(Arrays.asList()); InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0)); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(invf); assertEquals(result.isUnDirectedGraph(), true); } private Dag getintervention(){ List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_PLUS) ,new Edge(2,3, EdgeType.DIRECTED_PLUS), new Edge(3,4, EdgeType.DIRECTED_PLUS) ,new Edge(4,5, EdgeType.DIRECTED_PLUS) ,new Edge(5,6, EdgeType.DIRECTED_PLUS) ,new Edge(6,7, EdgeType.DIRECTED_PLUS) ); return new Dag(nodes, edges); } private Dag getSimpleV(){ List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_PLUS) ,new Edge(2,3, EdgeType.DIRECTED_MINUS) ); return new Dag(nodes, edges); } private Dag getSimpleDag(){ List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_PLUS) ,new Edge(2,3, EdgeType.DIRECTED_PLUS) ); return new Dag(nodes, edges); } private Dag getSimpleDag_other(){ List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3) ); List<Edge> edges = Arrays.asList( new Edge(1,2, EdgeType.DIRECTED_MINUS) ,new Edge(2,3, EdgeType.DIRECTED_MINUS) ); return new Dag(nodes, edges); } @Test public void testEssentialGraphGenerator_patternA_other() throws Exception { Dag dag = getSimpleDag_other(); EssentialGraphGenerator generator = new EssentialGraphGenerator(dag); ChainGraph result = generator.toEssentialGraph(); assertEquals(result.isUnDirectedGraph(), true); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.client.internal; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.apache.logging.log4j.Logger; import org.apache.geode.SystemFailure; import org.apache.geode.cache.Cache; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionDestroyedException; import org.apache.geode.cache.client.internal.PoolImpl.PoolTask; import org.apache.geode.internal.cache.ClientServerObserver; import org.apache.geode.internal.cache.ClientServerObserverHolder; import org.apache.geode.internal.cache.EventID; import org.apache.geode.internal.cache.GemFireCacheImpl; import org.apache.geode.internal.cache.LocalRegion; import org.apache.geode.internal.cache.ha.ThreadIdentifier; import org.apache.geode.logging.internal.log4j.api.LogService; public class QueueStateImpl implements QueueState { private static final Logger logger = LogService.getLogger(); protected QueueManager qManager = null; private boolean processedMarker = false; private final AtomicInteger invalidateCount = new AtomicInteger(); /** * This will store the ThreadId to latest received sequence Id * * Keys are instances of {@link ThreadIdentifier} Values are instances of * {@link SequenceIdAndExpirationObject} */ protected final Map threadIdToSequenceId = new LinkedHashMap(); public QueueStateImpl(QueueManager qm) { qManager = qm; } @Override public void processMarker() { if (!processedMarker) { handleMarker(); processedMarker = true; } else { if (logger.isDebugEnabled()) { logger.debug("{}: extra marker received", this); } } } @Override public boolean getProcessedMarker() { return processedMarker; } public void handleMarker() { ArrayList regions = new ArrayList(); Cache cache = GemFireCacheImpl.getInstance(); if (cache == null) { return; } Set rootRegions = cache.rootRegions(); for (final Object value : rootRegions) { Region rootRegion = (Region) value; regions.add(rootRegion); try { Set subRegions = rootRegion.subregions(true); // throws RDE for (final Object subRegion : subRegions) { regions.add(subRegion); } } catch (RegionDestroyedException e) { continue; // region is gone go to the next one bug 38705 } } for (final Object o : regions) { LocalRegion region = (LocalRegion) o; try { if (region.getAttributes().getPoolName() != null && region.getAttributes().getPoolName().equals(qManager.getPool().getName())) { region.handleMarker(); // can this throw RDE?? } } catch (RegionDestroyedException e) { continue; // region is gone go to the next one bug 38705 } } } @Override public void incrementInvalidatedStats() { invalidateCount.incrementAndGet(); } public int getInvalidateCount() { return invalidateCount.get(); } /** * test hook - access to this map should be synchronized on the map to avoid concurrent * modification exceptions */ @Override public Map getThreadIdToSequenceIdMap() { return threadIdToSequenceId; } @Override public boolean verifyIfDuplicate(EventID eid) { return verifyIfDuplicate(eid, true); } @Override public boolean verifyIfDuplicate(EventID eid, boolean addToMap) { ThreadIdentifier tid = new ThreadIdentifier(eid.getMembershipID(), eid.getThreadID()); long seqId = eid.getSequenceID(); SequenceIdAndExpirationObject seo = null; // Fix 36930: save the max sequence id for each non-putAll operation's thread // There're totally 3 cases to consider: // check the tid: // 1) if duplicated, (both putall or non-putall): reject // 2) if not duplicate // 2.1)if putAll, check via real thread id again, // if duplicate, reject (because one non-putall operation with bigger // seqno has happened) // otherwise save the putAllSeqno for real thread id // and save seqno for tid // 2.2) if not putAll, // check putAllSequenceId with real thread id // if request's seqno is smaller, reject (because one putAll operation // with bigger seqno has happened) // otherwise, update the seqno for tid // lock taken to avoid concurrentModification // while the objects are being expired synchronized (threadIdToSequenceId) { seo = (SequenceIdAndExpirationObject) threadIdToSequenceId.get(tid); if (seo != null && seo.getSequenceId() >= seqId) { if (logger.isDebugEnabled()) { logger.debug(" got a duplicate entry with EventId {}. Ignoring the entry", eid); } seo.setAckSend(false); return true; } else if (addToMap) { ThreadIdentifier real_tid = new ThreadIdentifier(eid.getMembershipID(), ThreadIdentifier.getRealThreadIDIncludingWan(eid.getThreadID())); if (ThreadIdentifier.isPutAllFakeThreadID(eid.getThreadID())) { // it's a putAll seo = (SequenceIdAndExpirationObject) threadIdToSequenceId.get(real_tid); if (seo != null && seo.getSequenceId() >= seqId) { if (logger.isDebugEnabled()) { logger.debug( "got a duplicate putAll entry with eventId {}. Other operation with same thread id and bigger seqno {} has happened. Ignoring the entry", eid, seo.getSequenceId()); } seo.setAckSend(false); // bug #41289: send ack to servers that send old events return true; } else { // save the seqno for real thread id into a putAllSequenceId threadIdToSequenceId.remove(real_tid); threadIdToSequenceId.put(real_tid, seo == null ? new SequenceIdAndExpirationObject(-1, seqId) : new SequenceIdAndExpirationObject(seo.getSequenceId(), seqId)); // save seqno for tid // here tid!=real_tid, for fake tid, putAllSeqno should be 0 threadIdToSequenceId.remove(tid); threadIdToSequenceId.put(tid, new SequenceIdAndExpirationObject(seqId, -1)); } } else { // non-putAll operation: // check putAllSeqno for real thread id // if request's seqno is smaller, reject // otherwise, update the seqno for tid seo = (SequenceIdAndExpirationObject) threadIdToSequenceId.get(real_tid); if (seo != null && seo.getPutAllSequenceId() >= seqId) { if (logger.isDebugEnabled()) { logger.debug( "got a duplicate non-putAll entry with eventId {}. One putAll operation with same real thread id and bigger seqno {} has happened. Ignoring the entry", eid, seo.getPutAllSequenceId()); } seo.setAckSend(false); // bug #41289: send ack to servers that send old events return true; } else { // here tid==real_tid threadIdToSequenceId.remove(tid); threadIdToSequenceId.put(tid, seo == null ? new SequenceIdAndExpirationObject(seqId, -1) : new SequenceIdAndExpirationObject(seqId, seo.getPutAllSequenceId())); } } } } return false; } @Override public void start(ScheduledExecutorService timer, int interval) { timer.scheduleWithFixedDelay(new ThreadIdToSequenceIdExpiryTask(), interval, interval, TimeUnit.MILLISECONDS); } /** * * Thread which will iterate over threadIdToSequenceId map * * 1)It will send an ack primary server for all threadIds for which it has not send an ack. 2)It * will expire the entries which have exceeded the specified expiry time and for which ack has * been alerady sent. * * @since GemFire 5.1 * */ private class ThreadIdToSequenceIdExpiryTask extends PoolTask { /** * The expiry time of the entries in the map */ private final long expiryTime; /** * constructs the Thread and initializes the expiry time * */ public ThreadIdToSequenceIdExpiryTask() { expiryTime = qManager.getPool().getSubscriptionMessageTrackingTimeout(); } @Override public void run2() { SystemFailure.checkFailure(); if (qManager.getPool().getCancelCriterion().isCancelInProgress()) { return; } if (PoolImpl.BEFORE_SENDING_CLIENT_ACK_CALLBACK_FLAG) { ClientServerObserver bo = ClientServerObserverHolder.getInstance(); bo.beforeSendingClientAck(); } sendPeriodicAck(); checkForExpiry(); } void checkForExpiry() { synchronized (threadIdToSequenceId) { Iterator iterator = threadIdToSequenceId.entrySet().iterator(); long currentTime = System.currentTimeMillis(); Map.Entry entry; SequenceIdAndExpirationObject seo; while (iterator.hasNext()) { entry = (Map.Entry) iterator.next(); seo = (SequenceIdAndExpirationObject) entry.getValue(); if ((currentTime - seo.getCreationTime() > expiryTime)) { if (seo.getAckSend() || (qManager.getPool().getSubscriptionRedundancy() == 0 && !qManager.getPool().isDurableClient())) { iterator.remove(); } } else { break; } } } } /** * Sends Periodic ack to the primary server for all threadIds for which it has not send an ack. */ void sendPeriodicAck() { List events = new ArrayList(); boolean success = false; synchronized (threadIdToSequenceId) { for (final Object o : threadIdToSequenceId.entrySet()) { Map.Entry entry = (Map.Entry) o; SequenceIdAndExpirationObject seo = (SequenceIdAndExpirationObject) entry.getValue(); if (!seo.getAckSend()) { ThreadIdentifier tid = (ThreadIdentifier) entry.getKey(); events.add(new EventID(tid.getMembershipID(), tid.getThreadID(), seo.getSequenceId())); seo.setAckSend(true); } // if ends } // while ends } // synchronized ends if (events.size() > 0) { try { PrimaryAckOp.execute(qManager.getAllConnections().getPrimary(), qManager.getPool(), events); success = true; } catch (Exception ex) { if (logger.isDebugEnabled()) { logger.debug("Exception while sending an ack to the primary server: {}", ex); } } finally { if (!success) { for (final Object event : events) { EventID eid = (EventID) event; ThreadIdentifier tid = new ThreadIdentifier(eid.getMembershipID(), eid.getThreadID()); synchronized (threadIdToSequenceId) { SequenceIdAndExpirationObject seo = (SequenceIdAndExpirationObject) threadIdToSequenceId.get(tid); if (seo != null && seo.getAckSend()) { seo = (SequenceIdAndExpirationObject) threadIdToSequenceId.remove(tid); if (seo != null) { // put back the old seqId with a new time stamp SequenceIdAndExpirationObject siaeo = new SequenceIdAndExpirationObject( seo.getSequenceId(), seo.getPutAllSequenceId()); threadIdToSequenceId.put(tid, siaeo); } } // if ends } // synchronized ends } // while ends } // if(!success) ends } // finally ends } // if(events.size() > 0)ends }// method ends } /** * A class to store sequenceId and the creation time of the object to be used for expiring the * entry * * @since GemFire 5.1 * */ public static class SequenceIdAndExpirationObject { /** The sequence Id of the entry * */ private final long sequenceId; /** The sequence Id of the putAll operations * */ private final long putAllSequenceId; /** The time of creation of the object* */ private final long creationTime; /** Client ack is send to server or not* */ private boolean ackSend; SequenceIdAndExpirationObject(long sequenceId, long putAllSequenceId) { this.sequenceId = sequenceId; this.putAllSequenceId = putAllSequenceId; creationTime = System.currentTimeMillis(); ackSend = false; } /** * @return Returns the creationTime. */ public long getCreationTime() { return creationTime; } /** * @return Returns the sequenceId. */ public long getSequenceId() { return sequenceId; } /** * @return Returns the putAllSequenceId. */ public long getPutAllSequenceId() { return putAllSequenceId; } /** * * @return Returns the ackSend */ public boolean getAckSend() { return ackSend; } /** * Sets the ackSend * */ public void setAckSend(boolean ackSend) { this.ackSend = ackSend; } @Override public String toString() { return "SequenceIdAndExpirationObject[" + "ackSend = " + ackSend + "; creation = " + creationTime + "; seq = " + sequenceId + "; putAll seq = " + putAllSequenceId + "]"; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.adapter.clone; import org.apache.calcite.jdbc.JavaTypeFactoryImpl; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Linq4j; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeImpl; import org.apache.calcite.rel.type.RelDataTypeSystem; import org.junit.jupiter.api.Test; import java.util.Arrays; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; /** * Unit test for {@link ArrayTable} and {@link ColumnLoader}. */ class ArrayTableTest { @Test void testPrimitiveArray() { long[] values = {0, 0}; ArrayTable.BitSlicedPrimitiveArray.orLong(4, values, 0, 0x0F); assertEquals(0x0F, values[0]); ArrayTable.BitSlicedPrimitiveArray.orLong(4, values, 2, 0x0F); assertEquals(0xF0F, values[0]); values = new long[]{ 0x1213141516171819L, 0x232425262728292AL, 0x3435363738393A3BL}; assertEquals( 0x324, ArrayTable.BitSlicedPrimitiveArray.getLong(12, values, 9)); assertEquals( 0xa3b, ArrayTable.BitSlicedPrimitiveArray.getLong(12, values, 10)); Arrays.fill(values, 0); for (int i = 0; i < 10; i++) { ArrayTable.BitSlicedPrimitiveArray.orLong(10, values, i, i); } for (int i = 0; i < 10; i++) { assertEquals( i, ArrayTable.BitSlicedPrimitiveArray.getLong(10, values, i)); } } @Test void testNextPowerOf2() { assertEquals(1, ColumnLoader.nextPowerOf2(1)); assertEquals(2, ColumnLoader.nextPowerOf2(2)); assertEquals(4, ColumnLoader.nextPowerOf2(3)); assertEquals(4, ColumnLoader.nextPowerOf2(4)); assertEquals(0x40000000, ColumnLoader.nextPowerOf2(0x3456789a)); assertEquals(0x40000000, ColumnLoader.nextPowerOf2(0x40000000)); // overflow assertEquals(0x80000000, ColumnLoader.nextPowerOf2(0x7fffffff)); assertEquals(0x80000000, ColumnLoader.nextPowerOf2(0x7ffffffe)); } @Test void testLog2() { assertEquals(0, ColumnLoader.log2(0)); assertEquals(0, ColumnLoader.log2(1)); assertEquals(1, ColumnLoader.log2(2)); assertEquals(2, ColumnLoader.log2(4)); assertEquals(16, ColumnLoader.log2(65536)); assertEquals(15, ColumnLoader.log2(65535)); assertEquals(16, ColumnLoader.log2(65537)); assertEquals(30, ColumnLoader.log2(Integer.MAX_VALUE)); assertEquals(30, ColumnLoader.log2(Integer.MAX_VALUE - 1)); assertEquals(29, ColumnLoader.log2(0x3fffffff)); assertEquals(30, ColumnLoader.log2(0x40000000)); } @Test void testValueSetInt() { ArrayTable.BitSlicedPrimitiveArray representation; ArrayTable.Column pair; final ColumnLoader.ValueSet valueSet = new ColumnLoader.ValueSet(int.class); valueSet.add(0); valueSet.add(1); valueSet.add(10); pair = valueSet.freeze(0, null); assertTrue( pair.representation instanceof ArrayTable.BitSlicedPrimitiveArray); representation = (ArrayTable.BitSlicedPrimitiveArray) pair.representation; // unsigned 4 bit integer (values 0..15) assertEquals(4, representation.bitCount); assertFalse(representation.signed); assertEquals(0, representation.getInt(pair.dataSet, 0)); assertEquals(1, representation.getInt(pair.dataSet, 1)); assertEquals(10, representation.getInt(pair.dataSet, 2)); assertEquals(10, representation.getObject(pair.dataSet, 2)); // -32 takes us to 6 bit signed valueSet.add(-32); pair = valueSet.freeze(0, null); assertTrue( pair.representation instanceof ArrayTable.BitSlicedPrimitiveArray); representation = (ArrayTable.BitSlicedPrimitiveArray) pair.representation; assertEquals(6, representation.bitCount); assertTrue(representation.signed); assertEquals(10, representation.getInt(pair.dataSet, 2)); assertEquals(10, representation.getObject(pair.dataSet, 2)); assertEquals(-32, representation.getInt(pair.dataSet, 3)); assertEquals(-32, representation.getObject(pair.dataSet, 3)); // 63 takes us to 7 bit signed valueSet.add(63); pair = valueSet.freeze(0, null); assertTrue( pair.representation instanceof ArrayTable.BitSlicedPrimitiveArray); representation = (ArrayTable.BitSlicedPrimitiveArray) pair.representation; assertEquals(7, representation.bitCount); assertTrue(representation.signed); // 128 pushes us to 8 bit signed, i.e. byte valueSet.add(64); pair = valueSet.freeze(0, null); assertTrue(pair.representation instanceof ArrayTable.PrimitiveArray); ArrayTable.PrimitiveArray representation2 = (ArrayTable.PrimitiveArray) pair.representation; assertEquals(0, representation2.getInt(pair.dataSet, 0)); assertEquals(-32, representation2.getInt(pair.dataSet, 3)); assertEquals(-32, representation2.getObject(pair.dataSet, 3)); assertEquals(64, representation2.getInt(pair.dataSet, 5)); assertEquals(64, representation2.getObject(pair.dataSet, 5)); } @Test void testValueSetBoolean() { final ColumnLoader.ValueSet valueSet = new ColumnLoader.ValueSet(boolean.class); valueSet.add(0); valueSet.add(1); valueSet.add(1); valueSet.add(0); final ArrayTable.Column pair = valueSet.freeze(0, null); assertTrue( pair.representation instanceof ArrayTable.BitSlicedPrimitiveArray); final ArrayTable.BitSlicedPrimitiveArray representation = (ArrayTable.BitSlicedPrimitiveArray) pair.representation; assertEquals(1, representation.bitCount); assertEquals(0, representation.getInt(pair.dataSet, 0)); assertEquals(1, representation.getInt(pair.dataSet, 1)); assertEquals(1, representation.getInt(pair.dataSet, 2)); assertEquals(0, representation.getInt(pair.dataSet, 3)); } @Test void testValueSetZero() { final ColumnLoader.ValueSet valueSet = new ColumnLoader.ValueSet(boolean.class); valueSet.add(0); final ArrayTable.Column pair = valueSet.freeze(0, null); assertTrue(pair.representation instanceof ArrayTable.Constant); final ArrayTable.Constant representation = (ArrayTable.Constant) pair.representation; assertEquals(0, representation.getInt(pair.dataSet, 0)); assertEquals(1, pair.cardinality); } @Test void testStrings() { ArrayTable.Column pair; final ColumnLoader.ValueSet valueSet = new ColumnLoader.ValueSet(String.class); valueSet.add("foo"); valueSet.add("foo"); pair = valueSet.freeze(0, null); assertTrue(pair.representation instanceof ArrayTable.ObjectArray); final ArrayTable.ObjectArray representation = (ArrayTable.ObjectArray) pair.representation; assertEquals("foo", representation.getObject(pair.dataSet, 0)); assertEquals("foo", representation.getObject(pair.dataSet, 1)); assertEquals(1, pair.cardinality); // Large number of the same string. ObjectDictionary backed by Constant. for (int i = 0; i < 2000; i++) { valueSet.add("foo"); } pair = valueSet.freeze(0, null); final ArrayTable.ObjectDictionary representation2 = (ArrayTable.ObjectDictionary) pair.representation; assertTrue( representation2.representation instanceof ArrayTable.Constant); assertEquals("foo", representation2.getObject(pair.dataSet, 0)); assertEquals("foo", representation2.getObject(pair.dataSet, 1000)); assertEquals(1, pair.cardinality); // One different string. ObjectDictionary backed by 1-bit // BitSlicedPrimitiveArray valueSet.add("bar"); pair = valueSet.freeze(0, null); final ArrayTable.ObjectDictionary representation3 = (ArrayTable.ObjectDictionary) pair.representation; assertTrue( representation3.representation instanceof ArrayTable.BitSlicedPrimitiveArray); final ArrayTable.BitSlicedPrimitiveArray representation4 = (ArrayTable.BitSlicedPrimitiveArray) representation3.representation; assertEquals(1, representation4.bitCount); assertFalse(representation4.signed); assertEquals("foo", representation3.getObject(pair.dataSet, 0)); assertEquals("foo", representation3.getObject(pair.dataSet, 1000)); assertEquals("bar", representation3.getObject(pair.dataSet, 2003)); assertEquals(2, pair.cardinality); } @Test void testAllNull() { ArrayTable.Column pair; final ColumnLoader.ValueSet valueSet = new ColumnLoader.ValueSet(String.class); valueSet.add(null); pair = valueSet.freeze(0, null); assertTrue(pair.representation instanceof ArrayTable.ObjectArray); final ArrayTable.ObjectArray representation = (ArrayTable.ObjectArray) pair.representation; assertNull(representation.getObject(pair.dataSet, 0)); assertEquals(1, pair.cardinality); for (int i = 0; i < 3000; i++) { valueSet.add(null); } pair = valueSet.freeze(0, null); final ArrayTable.ObjectDictionary representation2 = (ArrayTable.ObjectDictionary) pair.representation; assertTrue( representation2.representation instanceof ArrayTable.Constant); assertEquals(1, pair.cardinality); } @Test void testOneValueOneNull() { ArrayTable.Column pair; final ColumnLoader.ValueSet valueSet = new ColumnLoader.ValueSet(String.class); valueSet.add(null); valueSet.add("foo"); pair = valueSet.freeze(0, null); assertTrue(pair.representation instanceof ArrayTable.ObjectArray); final ArrayTable.ObjectArray representation = (ArrayTable.ObjectArray) pair.representation; assertNull(representation.getObject(pair.dataSet, 0)); assertEquals(2, pair.cardinality); for (int i = 0; i < 3000; i++) { valueSet.add(null); } pair = valueSet.freeze(0, null); final ArrayTable.ObjectDictionary representation2 = (ArrayTable.ObjectDictionary) pair.representation; assertEquals( 1, ((ArrayTable.BitSlicedPrimitiveArray) representation2.representation).bitCount); assertEquals("foo", representation2.getObject(pair.dataSet, 1)); assertNull(representation2.getObject(pair.dataSet, 10)); assertEquals(2, pair.cardinality); } @Test void testLoadSorted() { final JavaTypeFactoryImpl typeFactory = new JavaTypeFactoryImpl(RelDataTypeSystem.DEFAULT); final RelDataType rowType = typeFactory.builder() .add("empid", typeFactory.createType(int.class)) .add("deptno", typeFactory.createType(int.class)) .add("name", typeFactory.createType(String.class)) .build(); final Enumerable<Object[]> enumerable = Linq4j.asEnumerable( Arrays.asList( new Object[]{100, 10, "Bill"}, new Object[]{200, 20, "Eric"}, new Object[]{150, 10, "Sebastian"}, new Object[]{160, 10, "Theodore"})); final ColumnLoader<Object[]> loader = new ColumnLoader<Object[]>(typeFactory, enumerable, RelDataTypeImpl.proto(rowType), null); checkColumn( loader.representationValues.get(0), ArrayTable.RepresentationType.BIT_SLICED_PRIMITIVE_ARRAY, "Column(representation=BitSlicedPrimitiveArray(ordinal=0, bitCount=8, primitive=INT, signed=false), value=[100, 150, 160, 200, 0, 0, 0, 0])"); checkColumn( loader.representationValues.get(1), ArrayTable.RepresentationType.BIT_SLICED_PRIMITIVE_ARRAY, "Column(representation=BitSlicedPrimitiveArray(ordinal=1, bitCount=5, primitive=INT, signed=false), value=[10, 10, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0])"); checkColumn( loader.representationValues.get(2), ArrayTable.RepresentationType.OBJECT_ARRAY, "Column(representation=ObjectArray(ordinal=2), value=[Bill, Sebastian, Theodore, Eric])"); } /** As {@link #testLoadSorted()} but column #1 is the unique column, not * column #0. The algorithm needs to go back and permute the values of * column #0 after it discovers that column #1 is unique and sorts by it. */ @Test void testLoadSorted2() { final JavaTypeFactoryImpl typeFactory = new JavaTypeFactoryImpl(RelDataTypeSystem.DEFAULT); final RelDataType rowType = typeFactory.builder() .add("deptno", typeFactory.createType(int.class)) .add("empid", typeFactory.createType(int.class)) .add("name", typeFactory.createType(String.class)) .build(); final Enumerable<Object[]> enumerable = Linq4j.asEnumerable( Arrays.asList( new Object[]{10, 100, "Bill"}, new Object[]{20, 200, "Eric"}, new Object[]{30, 150, "Sebastian"}, new Object[]{10, 160, "Theodore"})); final ColumnLoader<Object[]> loader = new ColumnLoader<Object[]>(typeFactory, enumerable, RelDataTypeImpl.proto(rowType), null); // Note that values have been sorted with {20, 200, Eric} last because the // value 200 is the highest value of empid, the unique column. checkColumn( loader.representationValues.get(0), ArrayTable.RepresentationType.BIT_SLICED_PRIMITIVE_ARRAY, "Column(representation=BitSlicedPrimitiveArray(ordinal=0, bitCount=5, primitive=INT, signed=false), value=[10, 30, 10, 20, 0, 0, 0, 0, 0, 0, 0, 0])"); checkColumn( loader.representationValues.get(1), ArrayTable.RepresentationType.BIT_SLICED_PRIMITIVE_ARRAY, "Column(representation=BitSlicedPrimitiveArray(ordinal=1, bitCount=8, primitive=INT, signed=false), value=[100, 150, 160, 200, 0, 0, 0, 0])"); checkColumn( loader.representationValues.get(2), ArrayTable.RepresentationType.OBJECT_ARRAY, "Column(representation=ObjectArray(ordinal=2), value=[Bill, Sebastian, Theodore, Eric])"); } private void checkColumn(ArrayTable.Column x, ArrayTable.RepresentationType expectedRepresentationType, String expectedString) { assertEquals( expectedRepresentationType, x.representation.getType()); assertEquals(expectedString, x.toString()); } }
package psidev.psi.mi.jami.xml.io.writer.elements.impl.abstracts.xml25; import psidev.psi.mi.jami.model.*; import psidev.psi.mi.jami.xml.cache.PsiXmlObjectCache; import psidev.psi.mi.jami.xml.io.writer.elements.impl.XmlConfidenceWriter; import psidev.psi.mi.jami.xml.io.writer.elements.impl.XmlCvTermWriter; import psidev.psi.mi.jami.xml.io.writer.elements.impl.XmlDbXrefWriter; import psidev.psi.mi.jami.xml.io.writer.elements.impl.XmlInferredInteractionWriter; import psidev.psi.mi.jami.xml.io.writer.elements.impl.xml25.XmlExperimentWriter; import psidev.psi.mi.jami.xml.io.writer.elements.impl.xml25.XmlParameterWriter; import psidev.psi.mi.jami.xml.model.extension.XmlExperiment; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import java.util.Collection; import java.util.Set; /** * Abstract class for XML 2.5 writers of modelled interaction * * @author Marine Dumousseau (marine@ebi.ac.uk) * @version $Id$ * @since <pre>18/11/13</pre> */ public abstract class AbstractXmlModelledInteractionWriter<I extends ModelledInteraction> extends psidev.psi.mi.jami.xml.io.writer.elements.impl.abstracts.AbstractXmlModelledInteractionWriter<I> { /** * <p>Constructor for AbstractXmlModelledInteractionWriter.</p> * * @param writer a {@link javax.xml.stream.XMLStreamWriter} object. * @param objectIndex a {@link psidev.psi.mi.jami.xml.cache.PsiXmlObjectCache} object. */ public AbstractXmlModelledInteractionWriter(XMLStreamWriter writer, PsiXmlObjectCache objectIndex) { super(writer, objectIndex); } /** {@inheritDoc} */ @Override protected void initialiseXrefWriter(){ super.setXrefWriter(new XmlDbXrefWriter(getStreamWriter())); } /** {@inheritDoc} */ @Override protected void initialiseExperimentWriter(){ super.setExperimentWriter(new XmlExperimentWriter(getStreamWriter(), getObjectIndex())); } /** {@inheritDoc} */ @Override protected void initialiseConfidenceWriter(){ super.setConfidenceWriter(new XmlConfidenceWriter(getStreamWriter())); } /** {@inheritDoc} */ @Override protected void initialiseParameterWriter(){ super.setParameterWriter(new XmlParameterWriter(getStreamWriter(), getObjectIndex())); } /** {@inheritDoc} */ @Override protected void initialiseInferredInteractionWriter() { super.setInferredInteractionWriter(new XmlInferredInteractionWriter(getStreamWriter(), getObjectIndex())); } /** {@inheritDoc} */ @Override protected void initialiseInteractionTypeWriter() { super.setInteractionTypeWriter(new XmlCvTermWriter(getStreamWriter())); } /** {@inheritDoc} */ @Override protected void initialiseDefaultExperiment() { super.initialiseDefaultExperiment(); getParameterWriter().setDefaultExperiment(getDefaultExperiment()); } /** {@inheritDoc} */ @Override public void setDefaultExperiment(Experiment defaultExperiment) { super.setDefaultExperiment(defaultExperiment); getParameterWriter().setDefaultExperiment(defaultExperiment); } /** {@inheritDoc} */ @Override public Experiment extractDefaultExperimentFrom(I interaction) { Experiment exp = null; if (!interaction.getCooperativeEffects().isEmpty()){ CooperativeEffect effect = interaction.getCooperativeEffects().iterator().next(); if (!effect.getCooperativityEvidences().isEmpty()){ CooperativityEvidence evidence = effect.getCooperativityEvidences().iterator().next(); // set first experiment as default experiment if (evidence.getPublication() != null){ exp = new XmlExperiment(evidence.getPublication()); ((XmlExperiment)exp).setFullName(evidence.getPublication().getTitle()); } } } return exp != null ? exp : getDefaultExperiment() ; } /** * <p>writeInferredInteractions.</p> * * @param object a I object. * @throws javax.xml.stream.XMLStreamException if any. */ protected void writeInferredInteractions(I object) throws XMLStreamException { Collection<Set<Feature>> inferredInteractions = collectInferredInteractionsFrom(object); if (inferredInteractions != null && !inferredInteractions.isEmpty()){ getStreamWriter().writeStartElement("inferredInteractionList"); for (Set<Feature> inferred : inferredInteractions){ getInferredInteractionWriter().write(inferred); } getStreamWriter().writeEndElement(); } } /** {@inheritDoc} */ protected void writeCooperativeEffect(I object, boolean startAttributeList) throws XMLStreamException { if (startAttributeList){ // write start attribute list getStreamWriter().writeStartElement("attributeList"); } CooperativeEffect effect = object.getCooperativeEffects().iterator().next(); // write mechanism first if (effect instanceof Preassembly){ writeCooperativeEffectAttribute(CooperativeEffect.PREASSEMBLY, CooperativeEffect.PREASSEMBLY_ID, null); } else if (effect instanceof Allostery){ writeCooperativeEffectAttribute(CooperativeEffect.ALLOSTERY, CooperativeEffect.ALLOSTERY_ID, null); Allostery allostery = (Allostery)effect; // write allosteric molecule writeCooperativeEffectAttribute(CooperativeEffect.ALLOSTERIC_MOLECULE, CooperativeEffect.ALLOSTERIC_MOLECULE_ID, Integer.toString(getObjectIndex().extractIdForParticipant(allostery.getAllostericMolecule()))); // write allosteric effector AllostericEffector effector = allostery.getAllostericEffector(); switch (effector.getEffectorType()){ case molecule: MoleculeEffector moleculeEffector = (MoleculeEffector)effector; writeCooperativeEffectAttribute(CooperativeEffect.ALLOSTERIC_EFFECTOR, CooperativeEffect.ALLOSTERIC_EFFECTOR_ID, Integer.toString(getObjectIndex().extractIdForParticipant(moleculeEffector.getMolecule()))); break; case feature_modification: FeatureModificationEffector featureEffector = (FeatureModificationEffector)effector; writeCooperativeEffectAttribute(CooperativeEffect.ALLOSTERIC_PTM, CooperativeEffect.ALLOSTERIC_PTM_ID, Integer.toString(getObjectIndex().extractIdForFeature(featureEffector.getFeatureModification()))); break; default: break; } // write allostery type if (allostery.getAllosteryType() != null){ writeCooperativeEffectAttribute(allostery.getAllosteryType().getShortName(), allostery.getAllosteryType().getMIIdentifier(), null); } // write allostery mechanism if (allostery.getAllostericMechanism() != null){ writeCooperativeEffectAttribute(allostery.getAllostericMechanism().getShortName(), allostery.getAllostericMechanism().getMIIdentifier(), null); } } // write outcome writeCooperativeEffectAttribute(effect.getOutCome().getShortName(), effect.getOutCome().getMIIdentifier(), null); // write response if (effect.getResponse() != null){ writeCooperativeEffectAttribute(effect.getResponse().getShortName(), effect.getResponse().getMIIdentifier(), null); } // write affected interactions if (!effect.getAffectedInteractions().isEmpty()){ for (ModelledInteraction affected : effect.getAffectedInteractions()){ getObjectIndex().registerSubComplex(affected); writeCooperativeEffectAttribute(CooperativeEffect.AFFECTED_INTERACTION, CooperativeEffect.AFFECTED_INTERACTION_ID, Integer.toString(getObjectIndex().extractIdForInteraction(affected))); } } if (startAttributeList){ // write end attributeList getStreamWriter().writeEndElement(); } } /** * <p>writeCooperativeEffectAttribute.</p> * * @param name a {@link java.lang.String} object. * @param nameAc a {@link java.lang.String} object. * @param value a {@link java.lang.String} object. * @throws javax.xml.stream.XMLStreamException if any. */ protected void writeCooperativeEffectAttribute(String name, String nameAc, String value) throws XMLStreamException { // write start getStreamWriter().writeStartElement("attribute"); // write topic getStreamWriter().writeAttribute("name", name); if (nameAc != null){ getStreamWriter().writeAttribute("nameAc", nameAc); } // write description if (value != null){ getStreamWriter().writeCharacters(value); } // write end attribute getStreamWriter().writeEndElement(); } /** {@inheritDoc} */ @Override protected void writeOtherProperties(I object) { // nothing to do } /** {@inheritDoc} */ @Override protected void writeStartInteraction() throws XMLStreamException { getStreamWriter().writeStartElement("interaction"); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Filter; import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Numbers; import org.elasticsearch.common.Strings; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeContext; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.search.NumericRangeFieldDataFilter; import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeShortValue; import static org.elasticsearch.index.mapper.MapperBuilders.shortField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; /** * */ public class ShortFieldMapper extends NumberFieldMapper<Short> { public static final String CONTENT_TYPE = "short"; public static final int DEFAULT_PRECISION_STEP = 8; public static class Defaults extends NumberFieldMapper.Defaults { public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); static { FIELD_TYPE.freeze(); } public static final Short NULL_VALUE = null; } public static class Builder extends NumberFieldMapper.Builder<Builder, ShortFieldMapper> { protected Short nullValue = Defaults.NULL_VALUE; public Builder(String name) { super(name, new FieldType(Defaults.FIELD_TYPE), DEFAULT_PRECISION_STEP); builder = this; } public Builder nullValue(short nullValue) { this.nullValue = nullValue; return this; } @Override public ShortFieldMapper build(BuilderContext context) { fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); ShortFieldMapper fieldMapper = new ShortFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { ShortFieldMapper.Builder builder = shortField(name); parseNumberField(builder, name, node, parserContext); for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); if (propName.equals("null_value")) { if (propNode == null) { throw new MapperParsingException("Property [null_value] cannot be null."); } builder.nullValue(nodeShortValue(propNode)); iterator.remove(); } } return builder; } } private Short nullValue; private String nullValueAsString; protected ShortFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, Short nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, new NamedAnalyzer("_short/" + precisionStep, new NumericIntegerAnalyzer(precisionStep)), new NamedAnalyzer("_short/max", new NumericIntegerAnalyzer(Integer.MAX_VALUE)), similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override public FieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @Override public FieldDataType defaultFieldDataType() { return new FieldDataType("short"); } @Override protected int maxPrecisionStep() { return 32; } @Override public Short value(Object value) { if (value == null) { return null; } if (value instanceof Number) { return ((Number) value).shortValue(); } if (value instanceof BytesRef) { return Numbers.bytesToShort((BytesRef) value); } return Short.parseShort(value.toString()); } @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } private short parseValue(Object value) { if (value instanceof Number) { return ((Number) value).shortValue(); } if (value instanceof BytesRef) { return Short.parseShort(((BytesRef) value).utf8ToString()); } return Short.parseShort(value.toString()); } private int parseValueAsInt(Object value) { return parseValue(value); } @Override public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { short iValue = Short.parseShort(value); short iSim = fuzziness.asShort(); return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, iValue - iSim, iValue + iSim, true, true); } @Override public Query termQuery(Object value, @Nullable QueryParseContext context) { int iValue = parseValueAsInt(value); return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, iValue, iValue, true, true); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, lowerTerm == null ? null : parseValueAsInt(lowerTerm), upperTerm == null ? null : parseValueAsInt(upperTerm), includeLower, includeUpper); } @Override public Filter termFilter(Object value, @Nullable QueryParseContext context) { int iValue = parseValueAsInt(value); return Queries.wrap(NumericRangeQuery.newIntRange(names.indexName(), precisionStep, iValue, iValue, true, true)); } @Override public Filter rangeFilter(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { return Queries.wrap(NumericRangeQuery.newIntRange(names.indexName(), precisionStep, lowerTerm == null ? null : parseValueAsInt(lowerTerm), upperTerm == null ? null : parseValueAsInt(upperTerm), includeLower, includeUpper)); } @Override public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { return Queries.wrap(NumericRangeFieldDataFilter.newShortRange((IndexNumericFieldData) parseContext.getForField(this), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper)); } @Override public Filter nullValueFilter() { if (nullValue == null) { return null; } return Queries.wrap(NumericRangeQuery.newIntRange(names.indexName(), precisionStep, nullValue.intValue(), nullValue.intValue(), true, true)); } @Override protected boolean customBoost() { return true; } @Override protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException { short value; float boost = this.boost; if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { if (nullValue == null) { return; } value = nullValue; } else if (externalValue instanceof String) { String sExternalValue = (String) externalValue; if (sExternalValue.length() == 0) { if (nullValue == null) { return; } value = nullValue; } else { value = Short.parseShort(sExternalValue); } } else { value = ((Number) externalValue).shortValue(); } if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(names.fullName(), Short.toString(value), boost); } } else { XContentParser parser = context.parser(); if (parser.currentToken() == XContentParser.Token.VALUE_NULL || (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { if (nullValue == null) { return; } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { context.allEntries().addText(names.fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; String currentFieldName = null; Short objValue = nullValue; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { objValue = parser.shortValue(coerce.value()); } } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { throw new ElasticsearchIllegalArgumentException("unknown property [" + currentFieldName + "]"); } } } if (objValue == null) { // no value return; } value = objValue; } else { value = parser.shortValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(names.fullName(), parser.text(), boost); } } } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { CustomShortNumericField field = new CustomShortNumericField(this, value, fieldType); field.setBoost(boost); fields.add(field); } if (hasDocValues()) { addDocValue(context, fields, value); } } @Override protected String contentType() { return CONTENT_TYPE; } @Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException { super.merge(mergeWith, mergeContext); if (!this.getClass().equals(mergeWith.getClass())) { return; } if (!mergeContext.mergeFlags().simulate()) { this.nullValue = ((ShortFieldMapper) mergeWith).nullValue; this.nullValueAsString = ((ShortFieldMapper) mergeWith).nullValueAsString; } } @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); if (includeDefaults || precisionStep != DEFAULT_PRECISION_STEP) { builder.field("precision_step", precisionStep); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } else if (includeDefaults) { builder.field("include_in_all", false); } } public static class CustomShortNumericField extends CustomNumericField { private final short number; private final NumberFieldMapper mapper; public CustomShortNumericField(NumberFieldMapper mapper, short number, FieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; } @Override public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) throws IOException { if (fieldType().indexOptions() != IndexOptions.NONE) { return mapper.popCachedStream().setIntValue(number); } return null; } @Override public String numericAsString() { return Short.toString(number); } } }
/* * Copyright 1999-2012 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.core.id; import java.math.BigInteger; import java.security.SecureRandom; import java.util.Arrays; import com.orientechnologies.common.serialization.types.OIntegerSerializer; import com.orientechnologies.common.serialization.types.OLongSerializer; import com.orientechnologies.common.util.MersenneTwister; /** * Id of the server node in autoshareded storage. It is presented as 192 bit number with values from -2<sup>192</sup>+1 till * 2<sup>192</sup>-1. * * Internally it presents as unsigned 192 bit number with signature flag. * * @author Andrey Lomakin * @since 12.11.12 */ public class ONodeId extends Number implements Comparable<ONodeId> { private static final int CHUNKS_SIZE = 6; public static final int NODE_SIZE_BYTES = CHUNKS_SIZE * OIntegerSerializer.INT_SIZE; public static final int NODE_SIZE_BITS = NODE_SIZE_BYTES * 8; public static final int SERIALIZED_SIZE = NODE_SIZE_BYTES + 1; private static final long LONG_INT_MASK = 0xFFFFFFFFL; private static final int UNSIGNED_INT_MAX_VALUE = 0xFFFFFFFF; public static final ONodeId MAX_VALUE = new ONodeId(new int[] { UNSIGNED_INT_MAX_VALUE, UNSIGNED_INT_MAX_VALUE, UNSIGNED_INT_MAX_VALUE, UNSIGNED_INT_MAX_VALUE, UNSIGNED_INT_MAX_VALUE, UNSIGNED_INT_MAX_VALUE }, 1); public static final ONodeId MIN_VALUE = new ONodeId(new int[] { UNSIGNED_INT_MAX_VALUE, UNSIGNED_INT_MAX_VALUE, UNSIGNED_INT_MAX_VALUE, UNSIGNED_INT_MAX_VALUE, UNSIGNED_INT_MAX_VALUE, UNSIGNED_INT_MAX_VALUE }, -1); public static final ONodeId ZERO = new ONodeId(new int[CHUNKS_SIZE], 0); public static final ONodeId ONE = new ONodeId(new int[] { 0, 0, 0, 0, 0, 1 }, 1); public static final ONodeId TWO = new ONodeId(new int[] { 0, 0, 0, 0, 0, 2 }, 1); private static final MersenneTwister random = new MersenneTwister(); private static final SecureRandom secureRandom = new SecureRandom(); static { random.setSeed(OLongSerializer.INSTANCE.deserialize(secureRandom.generateSeed(OLongSerializer.LONG_SIZE), 0)); } private final int[] chunks; private final int signum; private ONodeId(int[] chunks, int signum) { this.chunks = chunks; this.signum = signum; } @Override public int compareTo(ONodeId o) { if (signum > o.signum) return 1; else if (signum < o.signum) return -1; if (signum == 0 && o.signum == 0) return 0; final int result = compareChunks(chunks, o.chunks); if (signum < 0) return -result; return result; } public ONodeId add(final ONodeId idToAdd) { if (idToAdd.signum == 0) return new ONodeId(chunks, signum); if (signum == 0) return new ONodeId(idToAdd.chunks, idToAdd.signum); final int[] result; if (signum == idToAdd.signum) { result = addArrays(chunks, idToAdd.chunks); if (Arrays.equals(ZERO.chunks, result)) return ZERO; return new ONodeId(result, signum); } final int cmp = compareChunks(chunks, idToAdd.chunks); if (cmp == 0) return ZERO; if (cmp > 0) result = substructArrays(chunks, idToAdd.chunks); else result = substructArrays(idToAdd.chunks, chunks); return new ONodeId(result, cmp == signum ? 1 : -1); } public ONodeId subtract(final ONodeId idToSubtract) { if (idToSubtract.signum == 0) return this; if (signum == 0) return new ONodeId(idToSubtract.chunks, -idToSubtract.signum); final int[] result; if (signum != idToSubtract.signum) { result = addArrays(chunks, idToSubtract.chunks); if (Arrays.equals(ZERO.chunks, result)) return ZERO; return new ONodeId(result, signum); } int cmp = compareChunks(chunks, idToSubtract.chunks); if (cmp == 0) return ZERO; if (cmp > 0) result = substructArrays(chunks, idToSubtract.chunks); else result = substructArrays(idToSubtract.chunks, chunks); return new ONodeId(result, cmp == signum ? 1 : -1); } public ONodeId multiply(final int value) { if (value == 0) return ZERO; final int[] result = new int[CHUNKS_SIZE]; long carry = 0; for (int j = CHUNKS_SIZE - 1; j >= 0; j--) { final long product = (chunks[j] & LONG_INT_MASK) * (value & LONG_INT_MASK) + carry; result[j] = (int) product; carry = product >>> 32; } return new ONodeId(result, signum); } public ONodeId shiftLeft(final int shift) { int nInts = shift >>> 5; if (nInts == CHUNKS_SIZE) return ZERO; final int nBits = shift & 0x1f; final int result[] = new int[CHUNKS_SIZE]; if (nBits != 0) { int nBits2 = 32 - nBits; int i = nInts; int j = 0; while (i < CHUNKS_SIZE - 1) result[j++] = chunks[i++] << nBits | chunks[i] >>> nBits2; result[j] = chunks[i] << nBits; } else System.arraycopy(chunks, nInts, result, 0, CHUNKS_SIZE - nInts); if (Arrays.equals(ZERO.chunks, result)) return ZERO; return new ONodeId(result, signum); } public ONodeId shiftRight(final int shift) { int nInts = shift >>> 5; if (nInts == CHUNKS_SIZE) return ZERO; int nBits = shift & 0x1f; final int result[] = new int[CHUNKS_SIZE]; if (nBits != 0) { int nBits2 = 32 - nBits; int i = 0; int j = nInts; result[j++] = chunks[i] >>> nBits; while (j < CHUNKS_SIZE) result[j++] = chunks[i++] << nBits2 | chunks[i] >>> nBits; } else System.arraycopy(chunks, 0, result, nInts, CHUNKS_SIZE - nInts); if (Arrays.equals(ZERO.chunks, result)) return ZERO; return new ONodeId(result, signum); } public static ONodeId generateUniqueId() { final long clusterPosition = random.nextLong(Long.MAX_VALUE); final int[] chunks = new int[CHUNKS_SIZE]; final byte[] uuid = new byte[16]; secureRandom.nextBytes(uuid); chunks[0] = (int) (clusterPosition >>> 32); chunks[1] = (int) clusterPosition; chunks[2] = OIntegerSerializer.INSTANCE.deserialize(uuid, 0); chunks[3] = OIntegerSerializer.INSTANCE.deserialize(uuid, 4); chunks[4] = OIntegerSerializer.INSTANCE.deserialize(uuid, 8); chunks[5] = OIntegerSerializer.INSTANCE.deserialize(uuid, 12); return new ONodeId(chunks, 1); } private static int[] addArrays(int[] chunksToAddOne, int[] chunksToAddTwo) { int[] result = new int[CHUNKS_SIZE]; int index = CHUNKS_SIZE; long sum = 0; while (index > 0) { index--; sum = (chunksToAddTwo[index] & LONG_INT_MASK) + (chunksToAddOne[index] & LONG_INT_MASK) + (sum >>> 32); result[index] = (int) sum; } return result; } private static int compareChunks(int[] chunksOne, int[] chunksTwo) { for (int i = 0; i < CHUNKS_SIZE; i++) { final long chunk = chunksOne[i] & LONG_INT_MASK; final long otherChunk = chunksTwo[i] & LONG_INT_MASK; if (chunk == otherChunk) continue; if (chunk > otherChunk) return 1; return -1; } return 0; } private static int[] substructArrays(int[] chunksOne, int[] chunksTwo) { int[] result = new int[CHUNKS_SIZE]; int index = CHUNKS_SIZE; long difference = 0; while (index > 0) { index--; difference = (chunksOne[index] & LONG_INT_MASK) - (chunksTwo[index] & LONG_INT_MASK) + (difference >> 32); result[index] = (int) difference; } return result; } private static void multiplyAndAdd(int[] chunks, int multiplier, int summand) { long carry = 0; for (int j = CHUNKS_SIZE - 1; j >= 0; j--) { final long product = (chunks[j] & LONG_INT_MASK) * (multiplier & LONG_INT_MASK) + carry; chunks[j] = (int) product; carry = product >>> 32; } if (summand == 0) return; long sum = (chunks[CHUNKS_SIZE - 1] & LONG_INT_MASK) + (summand & LONG_INT_MASK); chunks[CHUNKS_SIZE - 1] = (int) sum; int j = CHUNKS_SIZE - 2; while (j >= 0 && sum > 0) { sum = (chunks[j] & LONG_INT_MASK) + (sum >>> 32); chunks[j] = (int) sum; j--; } } public int intValue() { final int reslut = chunks[CHUNKS_SIZE - 1]; if(signum < 0) return -reslut; return reslut; } @Override public long longValue() { final long reslut = (((chunks[CHUNKS_SIZE - 2] & LONG_INT_MASK) << 32) + (chunks[CHUNKS_SIZE - 1] & LONG_INT_MASK)) & Long.MAX_VALUE; if (signum < 0) return -reslut; return reslut; } public long longValueHigh() { final long reslut = (((chunks[0] & LONG_INT_MASK) << 32) + (chunks[1] & LONG_INT_MASK)) & Long.MAX_VALUE; if (signum < 0) return -reslut; return reslut; } @Override public float floatValue() { return Float.parseFloat(toString()); } @Override public double doubleValue() { return Double.parseDouble(toString()); } public byte[] toStream() { final byte[] bytes = new byte[SERIALIZED_SIZE]; int pos = 0; for (int i = 0; i < CHUNKS_SIZE; i++) { OIntegerSerializer.INSTANCE.serialize(chunks[i], bytes, pos); pos += OIntegerSerializer.INT_SIZE; } bytes[pos] = (byte) signum; return bytes; } public byte[] chunksToByteArray() { final byte[] bytes = new byte[NODE_SIZE_BYTES]; int pos = 0; for (int i = 0; i < CHUNKS_SIZE; i++) { OIntegerSerializer.INSTANCE.serialize(chunks[i], bytes, pos); pos += OIntegerSerializer.INT_SIZE; } return bytes; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ONodeId oNodeId = (ONodeId) o; if (signum != oNodeId.signum) return false; return Arrays.equals(chunks, oNodeId.chunks); } @Override public int hashCode() { int result = Arrays.hashCode(chunks); result = 31 * result + signum; return result; } public String toString() { return new BigInteger(signum, chunksToByteArray()).toString(); } public static ONodeId valueOf(long value) { final ONodeId constant = findInConstantPool(value); if (constant != null) return constant; final int signum; if (value > 0) signum = 1; else { signum = -1; value = -value; } final int[] chunks = new int[CHUNKS_SIZE]; chunks[5] = (int) (value & LONG_INT_MASK); chunks[4] = (int) (value >>> 32); return new ONodeId(chunks, signum); } public static ONodeId parseString(String value) { final int intChunkLength = 9; final int longChunkLength = 18; int signum; int pos; if (value.charAt(0) == '-') { pos = 1; signum = -1; } else { pos = 0; signum = 1; } while (pos < value.length() && Character.digit(value.charAt(pos), 10) == 0) pos++; if (pos == value.length()) return ZERO; int chunkToRead = Math.min(pos + longChunkLength, value.length()); long initialValue = Long.parseLong(value.substring(pos, chunkToRead)); pos = chunkToRead; int[] result = new int[CHUNKS_SIZE]; result[CHUNKS_SIZE - 1] = (int) initialValue; result[CHUNKS_SIZE - 2] = (int) (initialValue >>> 32); while (pos < value.length()) { chunkToRead = Math.min(pos + intChunkLength, value.length()); int parsedValue = Integer.parseInt(value.substring(pos, chunkToRead)); final int multiplier = (chunkToRead == intChunkLength) ? 1000000000 : (int) Math.pow(10, chunkToRead - pos); multiplyAndAdd(result, multiplier, parsedValue); pos = chunkToRead; } return new ONodeId(result, signum); } public static ONodeId fromStream(byte[] content, int start) { final int[] chunks = new int[CHUNKS_SIZE]; int pos = start; for (int i = 0; i < CHUNKS_SIZE; i++) { chunks[i] = OIntegerSerializer.INSTANCE.deserialize(content, pos); pos += OIntegerSerializer.INT_SIZE; } final int signum = content[pos]; return new ONodeId(chunks, signum); } public static ONodeId parseHexSting(String value) { int pos; int signum; if (value.charAt(0) == '-') { pos = 1; signum = -1; } else { pos = 0; signum = 1; } final int[] chunks = new int[6]; for (int i = 0; i < CHUNKS_SIZE; i++) { final String chunk = value.substring(pos, pos + OIntegerSerializer.INT_SIZE * 2); chunks[i] = (int) Long.parseLong(chunk, 16); pos += OIntegerSerializer.INT_SIZE * 2; } if (Arrays.equals(ZERO.chunks, chunks)) return ZERO; return new ONodeId(chunks, signum); } public String toHexString() { final StringBuilder builder = new StringBuilder(); if (signum < 0) builder.append("-"); for (int chunk : chunks) builder.append(String.format("%1$08x", chunk)); return builder.toString(); } private static ONodeId findInConstantPool(long value) { if (value == 0) return ZERO; if (value == 1) return ONE; if (value == 2) return TWO; return null; } }
package com.cedarsoftware.util; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLSession; import javax.net.ssl.SSLSocketFactory; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.ConnectException; import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.MalformedURLException; import java.net.Proxy; import java.net.URL; import java.net.URLConnection; import java.security.SecureRandom; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import java.text.ParseException; import java.util.Date; import java.util.Iterator; import java.util.Map; import java.util.StringTokenizer; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Useful utilities for working with UrlConnections and IO. * * Anyone using the deprecated api calls for proxying to urls should update to use the new suggested calls. * To let the jvm proxy for you automatically, use the following -D parameters: * * http.proxyHost * http.proxyPort (default: 80) * http.nonProxyHosts (should always include localhost) * https.proxyHost * https.proxyPort * * Example: -Dhttp.proxyHost=proxy.example.org -Dhttp.proxyPort=8080 -Dhttps.proxyHost=proxy.example.org -Dhttps.proxyPort=8080 -Dhttp.nonProxyHosts=*.foo.com|localhost|*.td.afg * * @author Ken Partlow * @author John DeRegnaucourt (john@cedarsoftware.com) * <br> * Copyright (c) Cedar Software LLC * <br><br> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <br><br> * http://www.apache.org/licenses/LICENSE-2.0 * <br><br> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public final class UrlUtilities { private static String globalUserAgent = null; private static String globalReferrer = null; public static final ThreadLocal<String> userAgent = new ThreadLocal<>(); public static final ThreadLocal<String> referrer = new ThreadLocal<>(); public static final String SET_COOKIE = "Set-Cookie"; public static final String SET_COOKIE_SEPARATOR = "; "; public static final String COOKIE = "Cookie"; public static final String COOKIE_VALUE_DELIMITER = ";"; public static final String PATH = "path"; public static final String EXPIRES = "expires"; public static final SafeSimpleDateFormat DATE_FORMAT = new SafeSimpleDateFormat("EEE, dd-MMM-yyyy hh:mm:ss z"); public static final char NAME_VALUE_SEPARATOR = '='; public static final char DOT = '.'; private static final Pattern resPattern = Pattern.compile("^res\\:\\/\\/", Pattern.CASE_INSENSITIVE); private static final Logger LOG = LogManager.getLogger(UrlUtilities.class); public static final TrustManager[] NAIVE_TRUST_MANAGER = new TrustManager[] { new X509TrustManager() { public void checkClientTrusted(X509Certificate[] x509Certificates, String s) throws CertificateException { } public void checkServerTrusted(X509Certificate[] x509Certificates, String s) throws CertificateException { } public X509Certificate[] getAcceptedIssuers() { return null; } } }; public static final HostnameVerifier NAIVE_VERIFIER = new HostnameVerifier() { public boolean verify(String s, SSLSession sslSession) { return true; } }; protected static SSLSocketFactory naiveSSLSocketFactory; static { try { // Default new HTTP connections to follow redirects HttpURLConnection.setFollowRedirects(true); } catch (Exception ignored) {} try { // could be other algorithms (prob need to calculate this another way. final SSLContext sslContext = SSLContext.getInstance("SSL"); sslContext.init(null, NAIVE_TRUST_MANAGER, new SecureRandom()); naiveSSLSocketFactory = sslContext.getSocketFactory(); } catch (Exception e) { LOG.warn("Failed to build Naive SSLSocketFactory", e); } } private UrlUtilities() { super(); } public static void clearGlobalUserAgent() { globalUserAgent = null; } public static void clearGlobalReferrer() { globalReferrer = null; } public static void setReferrer(String referer) { if (StringUtilities.isEmpty(globalReferrer)) { globalReferrer = referer; } referrer.set(referer); } public static String getReferrer() { String localReferrer = referrer.get(); if (StringUtilities.hasContent(localReferrer)) { return localReferrer; } return globalReferrer; } public static void setUserAgent(String agent) { if (StringUtilities.isEmpty(globalUserAgent)) { globalUserAgent = agent; } userAgent.set(agent); } public static String getUserAgent() { String localAgent = userAgent.get(); if (StringUtilities.hasContent(localAgent)) { return localAgent; } return globalUserAgent; } public static void readErrorResponse(URLConnection c) { if (c == null) { return; } InputStream in = null; try { int error = ((HttpURLConnection) c).getResponseCode(); in = ((HttpURLConnection) c).getErrorStream(); if (in == null) { return; } LOG.warn("HTTP error response: " + ((HttpURLConnection) c).getResponseMessage()); // read the response body ByteArrayOutputStream out = new ByteArrayOutputStream(1024); int count; byte[] bytes = new byte[8192]; while ((count = in.read(bytes)) != -1) { out.write(bytes, 0, count); } LOG.warn("HTTP error Code: " + error); } catch (ConnectException e) { LOG.error("Connection exception trying to read HTTP error response", e); } catch (IOException e) { LOG.error("IO Exception trying to read HTTP error response", e); } catch (Exception e) { LOG.error("Exception trying to read HTTP error response", e); } finally { IOUtilities.close(in); } } public static void disconnect(HttpURLConnection c) { if (c != null) { try { c.disconnect(); } catch (Exception ignored) {} } } /** * Retrieves and stores cookies returned by the host on the other side of * the open java.net.URLConnection. * <p> * The connection MUST have been opened using the connect() method or a * IOException will be thrown. * * @param conn a java.net.URLConnection - must be open, or IOException will * be thrown */ public static void getCookies(URLConnection conn, Map store) { // let's determine the domain from where these cookies are being sent String domain = getCookieDomainFromHost(conn.getURL().getHost()); Map domainStore; // this is where we will store cookies for this domain // now let's check the store to see if we have an entry for this domain if (store.containsKey(domain)) { // we do, so lets retrieve it from the store domainStore = (Map) store.get(domain); } else { // we don't, so let's create it and put it in the store domainStore = new ConcurrentHashMap(); store.put(domain, domainStore); } if (domainStore.containsKey("JSESSIONID")) { // No need to continually get the JSESSIONID (and set-cookies header) as this does not change throughout the session. return; } // OK, now we are ready to get the cookies out of the URLConnection String headerName; for (int i = 1; (headerName = conn.getHeaderFieldKey(i)) != null; i++) { if (headerName.equalsIgnoreCase(SET_COOKIE)) { Map cookie = new ConcurrentHashMap(); StringTokenizer st = new StringTokenizer(conn.getHeaderField(i), COOKIE_VALUE_DELIMITER); // the specification dictates that the first name/value pair // in the string is the cookie name and value, so let's handle // them as a special case: if (st.hasMoreTokens()) { String token = st.nextToken(); String key = token.substring(0, token.indexOf(NAME_VALUE_SEPARATOR)).trim(); String value = token.substring(token.indexOf(NAME_VALUE_SEPARATOR) + 1); domainStore.put(key, cookie); cookie.put(key, value); } while (st.hasMoreTokens()) { String token = st.nextToken(); int pos = token.indexOf(NAME_VALUE_SEPARATOR); if (pos != -1) { String key = token.substring(0, pos).toLowerCase().trim(); String value = token.substring(token.indexOf(NAME_VALUE_SEPARATOR) + 1); cookie.put(key, value); } } } } } /** * Prior to opening a URLConnection, calling this method will set all * unexpired cookies that match the path or subpaths for thi underlying URL * <p> * The connection MUST NOT have been opened * method or an IOException will be thrown. * * @param conn a java.net.URLConnection - must NOT be open, or IOException will be thrown * @throws IOException Thrown if conn has already been opened. */ public static void setCookies(URLConnection conn, Map store) throws IOException { // let's determine the domain and path to retrieve the appropriate cookies URL url = conn.getURL(); String domain = getCookieDomainFromHost(url.getHost()); String path = url.getPath(); Map domainStore = (Map) store.get(domain); if (domainStore == null) { return; } StringBuilder cookieStringBuffer = new StringBuilder(); Iterator cookieNames = domainStore.keySet().iterator(); while (cookieNames.hasNext()) { String cookieName = (String) cookieNames.next(); Map cookie = (Map) domainStore.get(cookieName); // check cookie to ensure path matches and cookie is not expired // if all is cool, add cookie to header string if (comparePaths((String) cookie.get(PATH), path) && isNotExpired((String) cookie.get(EXPIRES))) { cookieStringBuffer.append(cookieName); cookieStringBuffer.append('='); cookieStringBuffer.append((String) cookie.get(cookieName)); if (cookieNames.hasNext()) { cookieStringBuffer.append(SET_COOKIE_SEPARATOR); } } } try { conn.setRequestProperty(COOKIE, cookieStringBuffer.toString()); } catch (IllegalStateException e) { throw new IOException("Illegal State! Cookies cannot be set on a URLConnection that is already connected. " + "Only call setCookies(java.net.URLConnection) AFTER calling java.net.URLConnection.connect()."); } } public static String getCookieDomainFromHost(String host) { while (host.indexOf(DOT) != host.lastIndexOf(DOT)) { host = host.substring(host.indexOf(DOT) + 1); } return host; } static boolean isNotExpired(String cookieExpires) { if (cookieExpires == null) { return true; } try { return new Date().compareTo(DATE_FORMAT.parse(cookieExpires)) <= 0; } catch (ParseException e) { LOG.info("Parse error on cookie expires value: " + cookieExpires, e); return false; } } static boolean comparePaths(String cookiePath, String targetPath) { return cookiePath == null || "/".equals(cookiePath) || targetPath.regionMatches(0, cookiePath, 0, cookiePath.length()); } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * String. * * @param url URL to hit * @return UTF-8 String read from URL or null in the case of error. */ public static String getContentFromUrlAsString(String url) { return getContentFromUrlAsString(url, null, null, true); } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * String. * * @param url URL to hit * @param allowAllCerts true to not verify certificates * @return UTF-8 String read from URL or null in the case of error. */ public static String getContentFromUrlAsString(URL url, boolean allowAllCerts) { return getContentFromUrlAsString(url, null, null, allowAllCerts); } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * String. * * @param url URL to hit * @param inCookies Map of session cookies (or null if not needed) * @param outCookies Map of session cookies (or null if not needed) * @param trustAllCerts if true, SSL connection will always be trusted. * @return String of content fetched from URL. */ public static String getContentFromUrlAsString(String url, Map inCookies, Map outCookies, boolean trustAllCerts) { byte[] bytes = getContentFromUrl(url, inCookies, outCookies, trustAllCerts); return bytes == null ? null : StringUtilities.createString(bytes, "UTF-8"); } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * String. * * @param url URL to hit * @param inCookies Map of session cookies (or null if not needed) * @param outCookies Map of session cookies (or null if not needed) * @param trustAllCerts if true, SSL connection will always be trusted. * @return String of content fetched from URL. */ public static String getContentFromUrlAsString(URL url, Map inCookies, Map outCookies, boolean trustAllCerts) { byte[] bytes = getContentFromUrl(url, inCookies, outCookies, trustAllCerts); return bytes == null ? null : StringUtilities.createString(bytes, "UTF-8"); } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * byte[]. * * @param url URL to hit * @return byte[] read from URL or null in the case of error. */ public static byte[] getContentFromUrl(String url) { return getContentFromUrl(url, null, null, true); } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * byte[]. * * @param url URL to hit * @return byte[] read from URL or null in the case of error. */ public static byte[] getContentFromUrl(URL url, boolean allowAllCerts) { return getContentFromUrl(url, null, null, allowAllCerts); } /* * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * byte[]. * * @param url URL to hit * @param inCookies Map of session cookies (or null if not needed) * @param outCookies Map of session cookies (or null if not needed) * @param ignoreSec if true, SSL connection will always be trusted. * @return byte[] of content fetched from URL. */ public static byte[] getContentFromUrl(String url, Map inCookies, Map outCookies, boolean allowAllCerts) { try { return getContentFromUrl(getActualUrl(url),inCookies, outCookies, allowAllCerts); } catch (Exception e) { LOG.warn("Exception occurred fetching content from url: " + url, e); return null; } } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * byte[]. * * @param url URL to hit * @param inCookies Map of session cookies (or null if not needed) * @param outCookies Map of session cookies (or null if not needed) * @param allowAllCerts override certificate validation? * @return byte[] of content fetched from URL. */ public static byte[] getContentFromUrl(URL url, Map inCookies, Map outCookies, boolean allowAllCerts) { URLConnection c = null; try { c = getConnection(url, inCookies, true, false, false, allowAllCerts); ByteArrayOutputStream out = new ByteArrayOutputStream(16384); InputStream stream = IOUtilities.getInputStream(c); IOUtilities.transfer(stream, out); stream.close(); if (outCookies != null) { // [optional] Fetch cookies from server and update outCookie Map (pick up JSESSIONID, other headers) getCookies(c, outCookies); } return out.toByteArray(); } catch (SSLHandshakeException e) { // Don't read error response. it will just cause another exception. LOG.warn("SSL Exception occurred fetching content from url: " + url, e); return null; } catch (Exception e) { readErrorResponse(c); LOG.warn("Exception occurred fetching content from url: " + url, e); return null; } finally { if (c instanceof HttpURLConnection) { disconnect((HttpURLConnection)c); } } } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * byte[]. * * @param url URL to hit * @param inCookies Map of session cookies (or null if not needed) * @param outCookies Map of session cookies (or null if not needed) * @return byte[] of content fetched from URL. */ public static byte[] getContentFromUrl(String url, Map inCookies, Map outCookies) { return getContentFromUrl(url, inCookies, outCookies, true); } /** * @param input boolean indicating whether this connection will be used for input * @param output boolean indicating whether this connection will be used for output * @param cache boolean allow caching (be careful setting this to true for non-static retrievals). * @return URLConnection established URL connection. */ public static URLConnection getConnection(String url, boolean input, boolean output, boolean cache) throws IOException { return getConnection(getActualUrl(url), null, input, output, cache, true); } /** * * @param input boolean indicating whether this connection will be used for input * @param output boolean indicating whether this connection will be used for output * @param cache boolean allow caching (be careful setting this to true for non-static retrievals). * @return URLConnection established URL connection. */ public static URLConnection getConnection(URL url, boolean input, boolean output, boolean cache) throws IOException { return getConnection(url, null, input, output, cache, true); } /** * Gets a connection from a url. All getConnection calls should go through this code. * @param inCookies Supply cookie Map (received from prior setCookies calls from server) * @param input boolean indicating whether this connection will be used for input * @param output boolean indicating whether this connection will be used for output * @param cache boolean allow caching (be careful setting this to true for non-static retrievals). * @return URLConnection established URL connection. */ public static URLConnection getConnection(URL url, Map inCookies, boolean input, boolean output, boolean cache, boolean allowAllCerts) throws IOException { URLConnection c = url.openConnection(); c.setRequestProperty("Accept-Encoding", "gzip, deflate"); c.setAllowUserInteraction(false); c.setDoOutput(output); c.setDoInput(input); c.setUseCaches(cache); c.setReadTimeout(220000); c.setConnectTimeout(45000); String ref = getReferrer(); if (StringUtilities.hasContent(ref)) { c.setRequestProperty("Referer", ref); } String agent = getUserAgent(); if (StringUtilities.hasContent(agent)) { c.setRequestProperty("User-Agent", agent); } if (c instanceof HttpURLConnection) { // setFollowRedirects is a static (global) method / setting - resetting it in case other code changed it? HttpURLConnection.setFollowRedirects(true); } if (c instanceof HttpsURLConnection && allowAllCerts) { try { setNaiveSSLSocketFactory((HttpsURLConnection) c); } catch(Exception e) { LOG.warn("Could not access '" + url.toString() + "'", e); } } // Set cookies in the HTTP header if (inCookies != null) { // [optional] place cookies (JSESSIONID) into HTTP headers setCookies(c, inCookies); } return c; } private static void setNaiveSSLSocketFactory(HttpsURLConnection sc) { sc.setSSLSocketFactory(naiveSSLSocketFactory); sc.setHostnameVerifier(NAIVE_VERIFIER); } public static URL getActualUrl(String url) throws MalformedURLException { Matcher m = resPattern.matcher(url); return m.find() ? UrlUtilities.class.getClassLoader().getResource(url.substring(m.end())) : new URL(url); } /************************************ DEPRECATED ITEMS ONLY BELOW ******************************************/ /** * * @return String host name * @deprecated As of release 1.13.0, replaced by {@link com.cedarsoftware.util.InetAddressUtilities#getHostName()} */ @Deprecated public static String getHostName() { return InetAddressUtilities.getHostName(); } /** * * Anyone using the proxy calls such as this one should have that managed by the jvm with -D parameters: * http.proxyHost * http.proxyPort (default: 80) * http.nonProxyHosts (should always include localhost) * https.proxyHost * https.proxyPort * * Example: -Dhttp.proxyHost=proxy.example.org -Dhttp.proxyPort=8080 -Dhttps.proxyHost=proxy.example.org -Dhttps.proxyPort=8080 -Dhttp.nonProxyHosts=*.foo.com|localhost|*.td.afg * @deprecated As of release 1.13.0, replaced by {@link #getConnection(java.net.URL, java.util.Map, boolean, boolean, boolean, boolean)} */ @Deprecated public static URLConnection getConnection(URL url, Map inCookies, boolean input, boolean output, boolean cache, Proxy proxy, boolean allowAllCerts) throws IOException { return getConnection(url, inCookies, input, output, cache, allowAllCerts); } /** * Anyone using the proxy calls such as this one should have that managed by the jvm with -D parameters: * http.proxyHost * http.proxyPort (default: 80) * http.nonProxyHosts (should always include localhost) * https.proxyHost * https.proxyPort * * Example: -Dhttp.proxyHost=proxy.example.org -Dhttp.proxyPort=8080 -Dhttps.proxyHost=proxy.example.org -Dhttps.proxyPort=8080 -Dhttp.nonProxyHosts=*.foo.com|localhost|*.td.afg * * @deprecated As of release 1.13.0, replaced by {@link #getConnection(java.net.URL, java.util.Map, boolean, boolean, boolean, boolean)} */ @Deprecated public static URLConnection getConnection(URL url, String server, int port, Map inCookies, boolean input, boolean output, boolean cache, boolean allowAllCerts) throws IOException { return getConnection(url, inCookies, input, output, cache, allowAllCerts); } /** * * Anyone using the proxy calls such as this one should have that managed by the jvm with -D parameters: * http.proxyHost * http.proxyPort (default: 80) * http.nonProxyHosts (should always include localhost) * https.proxyHost * https.proxyPort * * Example: -Dhttp.proxyHost=proxy.example.org -Dhttp.proxyPort=8080 -Dhttps.proxyHost=proxy.example.org -Dhttps.proxyPort=8080 -Dhttp.nonProxyHosts=*.foo.com|localhost|*.td.afg * @deprecated As of release 1.13.0, replaced by {@link #getConnection(java.net.URL, java.util.Map, boolean, boolean, boolean, boolean)} */ @Deprecated public static URLConnection getConnection(URL url, Map inCookies, boolean input, boolean output, boolean cache, Proxy proxy, SSLSocketFactory factory, HostnameVerifier verifier) throws IOException { return getConnection(url, inCookies, input, output, cache, true); } /** * Anyone using the proxy calls such as this one should have that managed by the jvm with -D parameters: * http.proxyHost * http.proxyPort (default: 80) * http.nonProxyHosts (should always include localhost) * https.proxyHost * https.proxyPort * * Example: -Dhttp.proxyHost=proxy.example.org -Dhttp.proxyPort=8080 -Dhttps.proxyHost=proxy.example.org -Dhttps.proxyPort=8080 -Dhttp.nonProxyHosts=*.foo.com|localhost|*.td.afg * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * byte[]. * * @param url URL to hit * @param proxy proxy to use to create connection * @return byte[] read from URL or null in the case of error. * @deprecated As of release 1.13.0, replaced by {@link #getContentFromUrl(String)} */ @Deprecated public static byte[] getContentFromUrl(String url, Proxy proxy) { return getContentFromUrl(url); } /** * Anyone using the proxy calls such as this one should have that managed by the jvm with -D parameters: * http.proxyHost * http.proxyPort (default: 80) * http.nonProxyHosts (should always include localhost) * https.proxyHost * https.proxyPort * * Example: -Dhttp.proxyHost=proxy.example.org -Dhttp.proxyPort=8080 -Dhttps.proxyHost=proxy.example.org -Dhttps.proxyPort=8080 -Dhttp.nonProxyHosts=*.foo.com|localhost|*.td.afg * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * byte[]. * * @param url URL to hit * @param proxy Proxy server to create connection (or null if not needed) * @param factory custom SSLSocket factory (or null if not needed) * @param verifier custom Hostnameverifier (or null if not needed) * @return byte[] of content fetched from URL. * @deprecated As of release 1.13.0, replaced by {@link #getContentFromUrl(String)} */ @Deprecated public static byte[] getContentFromUrl(String url, Proxy proxy, SSLSocketFactory factory, HostnameVerifier verifier) { return getContentFromUrl(url); } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * byte[]. * * Anyone using the proxy calls such as this one should have that managed by the jvm with -D parameters: * http.proxyHost * http.proxyPort (default: 80) * http.nonProxyHosts (should always include localhost) * https.proxyHost * https.proxyPort * * Example: -Dhttp.proxyHost=proxy.example.org -Dhttp.proxyPort=8080 -Dhttps.proxyHost=proxy.example.org -Dhttps.proxyPort=8080 -Dhttp.nonProxyHosts=*.foo.com|localhost|*.td.afg * @param url URL to hit * @param proxy Proxy server to create connection (or null if not needed) * @param factory custom SSLSocket factory (or null if not needed) * @param verifier custom Hostnameverifier (or null if not needed) * @return byte[] of content fetched from URL. * @deprecated As of release 1.13.0, replaced by {@link #getContentFromUrl(String)} */ @Deprecated public static byte[] getContentFromUrl(String url, Map inCookies, Map outCookies, Proxy proxy, SSLSocketFactory factory, HostnameVerifier verifier) { return getContentFromUrl(url, inCookies, outCookies, true); } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * byte[]. * * Anyone using the proxy calls such as this one should have that managed by the jvm with -D parameters: * http.proxyHost * http.proxyPort (default: 80) * http.nonProxyHosts (should always include localhost) * https.proxyHost * https.proxyPort * * Example: -Dhttp.proxyHost=proxy.example.org -Dhttp.proxyPort=8080 -Dhttps.proxyHost=proxy.example.org -Dhttps.proxyPort=8080 -Dhttp.nonProxyHosts=*.foo.com|localhost|*.td.afg * @param url URL to hit * @param proxy proxy to use to create connection * @return String read from URL or null in the case of error. * * @deprecated As of release 1.13.0, replaced by {@link #getContentFromUrl(String)} */ @Deprecated public static String getContentFromUrlAsString(String url, Proxy proxy) { byte[] bytes = getContentFromUrl(url, proxy); return bytes == null ? null : StringUtilities.createString(bytes, "UTF-8"); } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * byte[]. * * Anyone using the proxy calls such as this one should have that managed by the jvm with -D parameters: * http.proxyHost * http.proxyPort (default: 80) * http.nonProxyHosts (should always include localhost) * https.proxyHost * https.proxyPort * * Example: -Dhttp.proxyHost=proxy.example.org -Dhttp.proxyPort=8080 -Dhttps.proxyHost=proxy.example.org -Dhttps.proxyPort=8080 -Dhttp.nonProxyHosts=*.foo.com|localhost|*.td.afg * @param url URL to hit * @param inCookies Map of session cookies (or null if not needed) * @param outCookies Map of session cookies (or null if not needed) * @param proxy Proxy server to create connection (or null if not needed) * @return byte[] of content fetched from URL. * * @deprecated As of release 1.13.0, replaced by {@link #getContentFromUrl(String, java.util.Map, java.util.Map, boolean)} */ @Deprecated public static byte[] getContentFromUrl(URL url, Map inCookies, Map outCookies, Proxy proxy, boolean allowAllCerts) { return getContentFromUrl(url, inCookies, outCookies, allowAllCerts); } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * byte[]. * * Anyone using the proxy calls such as this one should have that managed by the jvm with -D parameters: * http.proxyHost * http.proxyPort (default: 80) * http.nonProxyHosts (should always include localhost) * https.proxyHost * https.proxyPort * * Example: -Dhttp.proxyHost=proxy.example.org -Dhttp.proxyPort=8080 -Dhttps.proxyHost=proxy.example.org -Dhttps.proxyPort=8080 -Dhttp.nonProxyHosts=*.foo.com|localhost|*.td.afg * @param url URL to hit * @param inCookies Map of session cookies (or null if not needed) * @param outCookies Map of session cookies (or null if not needed) * @param proxy Proxy server to create connection (or null if not needed) * @return byte[] of content fetched from URL. * * @deprecated As of release 1.13.0, replaced by {@link #getConnection(String, boolean, boolean, boolean)} */ @Deprecated public static byte[] getContentFromUrl(String url, Map inCookies, Map outCookies, Proxy proxy, boolean allowAllCerts) { try { return getContentFromUrl(getActualUrl(url), inCookies, outCookies, proxy, allowAllCerts); } catch (MalformedURLException e) { LOG.warn("Exception occurred fetching content from url: " + url, e); return null; } } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * byte[]. * * Anyone using the proxy calls such as this one should have that managed by the jvm with -D parameters: * http.proxyHost * http.proxyPort (default: 80) * http.nonProxyHosts (should always include localhost) * https.proxyHost * https.proxyPort * * Example: -Dhttp.proxyHost=proxy.example.org -Dhttp.proxyPort=8080 -Dhttps.proxyHost=proxy.example.org -Dhttps.proxyPort=8080 -Dhttp.nonProxyHosts=*.foo.com|localhost|*.td.afg * @param url URL to hit * @param proxyServer String named of proxy server * @param port port to access proxy server * @param inCookies Map of session cookies (or null if not needed) * @param outCookies Map of session cookies (or null if not needed) * @param allowAllCerts if true, SSL connection will always be trusted. * @return byte[] of content fetched from URL. * * @deprecated As of release 1.13.0, replaced by {@link #getContentFromUrl(String, java.util.Map, java.util.Map, boolean)} */ @Deprecated public static byte[] getContentFromUrl(String url, String proxyServer, int port, Map inCookies, Map outCookies, boolean allowAllCerts) { // if proxy server is passed Proxy proxy = null; if (proxyServer != null) { proxy = new Proxy(java.net.Proxy.Type.HTTP, new InetSocketAddress(proxyServer, port)); } return getContentFromUrl(url, inCookies, outCookies, proxy, allowAllCerts); } /** * Get content from the passed in URL. This code will open a connection to * the passed in server, fetch the requested content, and return it as a * String. * * Anyone using the proxy calls such as this one should have that managed by the jvm with -D parameters: * http.proxyHost * http.proxyPort (default: 80) always * https.proxyHost * https.proxyPort * * Example: -Dhttp.proxyHost=proxy.example.org -Dhttp.proxyPort=8080 -Dhttps.proxyHost=proxy.example.org -Dhttps.proxyPort=8080 -Dhttp.nonProxyHosts=*.foo.com|localhost|*.td.afg * @param url URL to hit * @param proxyServer String named of proxy server * @param port port to access proxy server * @param inCookies Map of session cookies (or null if not needed) * @param outCookies Map of session cookies (or null if not needed) * @param ignoreSec if true, SSL connection will always be trusted. * @return String of content fetched from URL. * * @deprecated As of release 1.13.0, replaced by {@link #getContentFromUrlAsString(String, java.util.Map, java.util.Map, boolean)} */ @Deprecated public static String getContentFromUrlAsString(String url, String proxyServer, int port, Map inCookies, Map outCookies, boolean ignoreSec) { return getContentFromUrlAsString(url, inCookies, outCookies, ignoreSec); } }
/* * Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above * copyright notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the United States Government nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package gov.hhs.fha.nhinc.policyengine.adapter.pip; import gov.hhs.fha.nhinc.common.nhinccommon.AddressType; import gov.hhs.fha.nhinc.common.nhinccommon.CeType; import gov.hhs.fha.nhinc.common.nhinccommon.PersonNameType; import gov.hhs.fha.nhinc.common.nhinccommonadapter.BinaryDocumentPolicyCriterionType; import gov.hhs.fha.nhinc.common.nhinccommonadapter.PatientPreferencesType; import gov.hhs.fha.nhinc.common.nhinccommonadapter.PolicyCustodianInfoType; import gov.hhs.fha.nhinc.common.nhinccommonadapter.PolicyDataEntererInfoType; import gov.hhs.fha.nhinc.common.nhinccommonadapter.PolicyLegalAuthenticatorType; import gov.hhs.fha.nhinc.common.nhinccommonadapter.PolicyOriginalAuthorInfoType; import gov.hhs.fha.nhinc.common.nhinccommonadapter.PolicyPatientInfoType; import gov.hhs.fha.nhinc.common.nhinccommonadapter.PolicyScannerAuthorInfoType; import gov.hhs.fha.nhinc.nhinclib.NhincConstants; import gov.hhs.fha.nhinc.properties.PropertyAccessor; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.List; import javax.xml.bind.JAXBElement; import javax.xml.datatype.XMLGregorianCalendar; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hl7.v3.ADExplicit; import org.hl7.v3.ActClassClinicalDocument; import org.hl7.v3.AdxpExplicitCity; import org.hl7.v3.AdxpExplicitCountry; import org.hl7.v3.AdxpExplicitPostalCode; import org.hl7.v3.AdxpExplicitState; import org.hl7.v3.AdxpExplicitStreetAddressLine; import org.hl7.v3.BinaryDataEncoding; import org.hl7.v3.CE; import org.hl7.v3.CS; import org.hl7.v3.EDExplicit; import org.hl7.v3.EnExplicitFamily; import org.hl7.v3.EnExplicitGiven; import org.hl7.v3.EnExplicitPrefix; import org.hl7.v3.EnExplicitSuffix; import org.hl7.v3.II; import org.hl7.v3.IVLTSExplicit; import org.hl7.v3.IVXBTSExplicit; import org.hl7.v3.ONExplicit; import org.hl7.v3.ObjectFactory; import org.hl7.v3.PNExplicit; import org.hl7.v3.POCDMT000040AssignedAuthor; import org.hl7.v3.POCDMT000040AssignedCustodian; import org.hl7.v3.POCDMT000040AssignedEntity; import org.hl7.v3.POCDMT000040Author; import org.hl7.v3.POCDMT000040AuthoringDevice; import org.hl7.v3.POCDMT000040ClinicalDocument; import org.hl7.v3.POCDMT000040Component2; import org.hl7.v3.POCDMT000040Custodian; import org.hl7.v3.POCDMT000040CustodianOrganization; import org.hl7.v3.POCDMT000040DataEnterer; import org.hl7.v3.POCDMT000040DocumentationOf; import org.hl7.v3.POCDMT000040InfrastructureRootTypeId; import org.hl7.v3.POCDMT000040LegalAuthenticator; import org.hl7.v3.POCDMT000040NonXMLBody; import org.hl7.v3.POCDMT000040Organization; import org.hl7.v3.POCDMT000040Patient; import org.hl7.v3.POCDMT000040PatientRole; import org.hl7.v3.POCDMT000040Person; import org.hl7.v3.POCDMT000040RecordTarget; import org.hl7.v3.POCDMT000040ServiceEvent; import org.hl7.v3.SCExplicit; import org.hl7.v3.STExplicit; import org.hl7.v3.TSExplicit; /** * This class creates CDA documents from BinaryDocumentPolicyCriteria objects. * * @author Les Westberg */ public class CdaPdfCreator { protected Log log = null; private static final String HL7_DATE_ONLY_FORMAT = "yyyyMMdd"; private static final SimpleDateFormat oHL7DateOnlyFormatter = new SimpleDateFormat(HL7_DATE_ONLY_FORMAT); private static final String HL7_DATE_TIME_FORMAT = "yyyyMMddHHmmssZ"; private static final SimpleDateFormat oHL7DateTimeFormatter = new SimpleDateFormat(HL7_DATE_TIME_FORMAT); /** * Default constructor. */ public CdaPdfCreator() { log = createLogger(); } /** * Sets up the logger object. */ protected Log createLogger() { return ((log != null) ? log : LogFactory.getLog(getClass())); } /** * This class creates an instance of an II with the given root and extension. * * @param sRoot The root value for the II object. * @param sExtension The extension for the II object. * * @return The II object that was constructed. */ private II createII(String sRoot, String sExtension) { II oII = new II(); boolean bHaveData = false; if (sRoot != null) { oII.setRoot(sRoot); bHaveData = true; } if (sExtension != null) { oII.setExtension(sExtension); bHaveData = true; } if (bHaveData) { return oII; } else { return null; } } /** * Create the template ID tag. * * @return The Template ID tag. */ private II createTemplateId() { return createII(CDAConstants.TEMPLATE_ID_ROOT_XDS_SD_DOCUMENT, null); } /** * This returns the home community ID from the gateway.properties file. * * @return The home community ID. * @throws AdapterPIPException This exception is thrown if there is an error. */ private String getHomeCommunityId() throws AdapterPIPException { String sHomeCommunityId = null; try { sHomeCommunityId = PropertyAccessor.getInstance().getProperty(NhincConstants.GATEWAY_PROPERTY_FILE, NhincConstants.HOME_COMMUNITY_ID_PROPERTY); } catch (Exception e) { String sErrorMessage = "Failed to retrieve home community ID from gateway properties file. Error: " + e.getMessage(); log.error(sErrorMessage, e); throw new AdapterPIPException(sErrorMessage, e); } return sHomeCommunityId; } /** * Creates the ID tag for the CDA document. * * @return The ID tag for the CDA document. * @throws AdapterPIPException This exception is thrown if any error occurs. */ private II createId(String sDocumentUniqueId) throws AdapterPIPException { String sHomeCommunityId = getHomeCommunityId(); return createII(sHomeCommunityId, sDocumentUniqueId); } /** * This creates the type ID tag and returns it. * * @return The TypeId tag. */ private POCDMT000040InfrastructureRootTypeId createTypeId() { POCDMT000040InfrastructureRootTypeId oTypeId = new POCDMT000040InfrastructureRootTypeId(); oTypeId.setExtension(CDAConstants.TYPE_ID_EXTENSION_POCD_HD000040); oTypeId.setRoot(CDAConstants.TYPE_ID_ROOT); return oTypeId; } /** * Transform the CE into an HL7 CE type. * * @param oCe The policy representation of the CE. * @return The HL7 representation of the CE. */ private CE createCode(CeType oCe) { CE oHl7Ce = new CE(); boolean bHaveData = false; if (oCe != null) { if (oCe.getCode() != null) { oHl7Ce.setCode(oCe.getCode()); bHaveData = true; } if (oCe.getDisplayName() != null) { oHl7Ce.setDisplayName(oCe.getDisplayName()); bHaveData = true; } if (oCe.getCodeSystem() != null) { oHl7Ce.setCodeSystem(oCe.getCodeSystem()); bHaveData = true; } if (oCe.getCodeSystemName() != null) { oHl7Ce.setCodeSystemName(oCe.getCodeSystemName()); bHaveData = true; } } // if (oCE != null) if (bHaveData) { return oHl7Ce; } else { return null; } } /** * This creates an STExplicit with the given string value. * * @param sValue The value to use when creating the node. * @return The STExplicit object containing the value. */ private STExplicit createST(String sValue) { STExplicit oHl7St = new STExplicit(); boolean bHaveData = false; if (sValue != null) { oHl7St.getContent().add(sValue); bHaveData = true; } if (bHaveData) { return oHl7St; } else { return null; } } /** * This method transforms the given XMLDate into an HL7 date. * * @param sHL7Date The date in HL7 format. * @return The HL7 date. */ private TSExplicit createTS(String sHL7Date) { TSExplicit oHL7Ts = new TSExplicit(); boolean bHaveData = false; if (sHL7Date != null) { oHL7Ts.setValue(sHL7Date); bHaveData = true; } if (bHaveData) { return oHL7Ts; } else { return null; } } /** * Transform the Ce into an HL7 CS type. * * @param oCe The policy representation of the CE. * @return The HL7 representation of the CS. */ private CS createCS(CeType oCs) { CS oHl7Cs = new CS(); boolean bHaveData = false; if (oCs != null) { if (oCs.getCode() != null) { oHl7Cs.setCode(oCs.getCode()); bHaveData = true; } if (oCs.getDisplayName() != null) { oHl7Cs.setDisplayName(oCs.getDisplayName()); bHaveData = true; } if (oCs.getCodeSystem() != null) { oHl7Cs.setCodeSystem(oCs.getCodeSystem()); bHaveData = true; } if (oCs.getCodeSystemName() != null) { oHl7Cs.setCodeSystemName(oCs.getCodeSystemName()); bHaveData = true; } } // if (oCs != null) if (bHaveData) { return oHl7Cs; } else { return null; } } /** * This method creates a CS using the given code. * * @param sCode The code to put into the CS. * @return The CS object to be returned.. */ private CS createCS(String sCode) { CS oHL7Cs = new CS(); boolean bHaveData = false; if (sCode != null) { oHL7Cs.setCode(sCode); bHaveData = true; } if (bHaveData) { return oHL7Cs; } else { return null; } } /** * This creates an HL7 address from an AddressType object. * * @param oAddress The address to get the information from. * @return The HL7 address to be returned. */ private ADExplicit createAD(AddressType oAddress) { ADExplicit oHL7Ad = new ADExplicit(); boolean bHaveData = false; org.hl7.v3.ObjectFactory oObjectFactory = new ObjectFactory(); if (oAddress != null) { // Street // ------- if (oAddress.getStreetAddress() != null) { AdxpExplicitStreetAddressLine oHL7StreetAddressLine = new AdxpExplicitStreetAddressLine(); oHL7StreetAddressLine.setContent(oAddress.getStreetAddress()); JAXBElement<AdxpExplicitStreetAddressLine> oElement = oObjectFactory .createADExplicitStreetAddressLine(oHL7StreetAddressLine); oHL7Ad.getContent().add(oElement); bHaveData = true; } // City // ------ if (oAddress.getCity() != null) { AdxpExplicitCity oHL7City = new AdxpExplicitCity(); oHL7City.setContent(oAddress.getCity()); JAXBElement<AdxpExplicitCity> oElement = oObjectFactory.createADExplicitCity(oHL7City); oHL7Ad.getContent().add(oElement); bHaveData = true; } // State // ------ if (oAddress.getState() != null) { AdxpExplicitState oHL7State = new AdxpExplicitState(); oHL7State.setContent(oAddress.getState()); JAXBElement<AdxpExplicitState> oElement = oObjectFactory.createADExplicitState(oHL7State); oHL7Ad.getContent().add(oElement); bHaveData = true; } // Zip Code // ---------- if (oAddress.getZipCode() != null) { AdxpExplicitPostalCode oHL7Zipcode = new AdxpExplicitPostalCode(); oHL7Zipcode.setContent(oAddress.getZipCode()); JAXBElement<AdxpExplicitPostalCode> oElement = oObjectFactory.createADExplicitPostalCode(oHL7Zipcode); oHL7Ad.getContent().add(oElement); bHaveData = true; } // Country // -------- if (oAddress.getCountry() != null) { AdxpExplicitCountry oHL7Country = new AdxpExplicitCountry(); oHL7Country.setContent(oAddress.getCountry()); JAXBElement<AdxpExplicitCountry> oElement = oObjectFactory.createADExplicitCountry(oHL7Country); oHL7Ad.getContent().add(oElement); bHaveData = true; } } // if (oAddress != null) if (bHaveData) { return oHL7Ad; } else { return null; } } /** * This creates an HL7 PN from a PersonNameType object. * * @param oName The name to get the information from. * @return The HL7 PN to be returned. */ private PNExplicit createPN(PersonNameType oName) { PNExplicit oHL7Pn = new PNExplicit(); boolean bHaveData = false; org.hl7.v3.ObjectFactory oObjectFactory = new ObjectFactory(); if (oName != null) { // Prefix // ------- if (oName.getPrefix() != null) { EnExplicitPrefix oHL7Prefix = new EnExplicitPrefix(); oHL7Prefix.setContent(oName.getPrefix()); JAXBElement<EnExplicitPrefix> oElement = oObjectFactory.createENExplicitPrefix(oHL7Prefix); oHL7Pn.getContent().add(oElement); bHaveData = true; } // Given // ------ if (oName.getGivenName() != null) { EnExplicitGiven oHL7Given = new EnExplicitGiven(); oHL7Given.setContent(oName.getGivenName()); JAXBElement<EnExplicitGiven> oElement = oObjectFactory.createENExplicitGiven(oHL7Given); oHL7Pn.getContent().add(oElement); bHaveData = true; } // Family // -------- if (oName.getFamilyName() != null) { EnExplicitFamily oHL7Family = new EnExplicitFamily(); oHL7Family.setContent(oName.getFamilyName()); JAXBElement<EnExplicitFamily> oElement = oObjectFactory.createENExplicitFamily(oHL7Family); oHL7Pn.getContent().add(oElement); bHaveData = true; } // Suffix // ---------- if (oName.getSuffix() != null) { EnExplicitSuffix oHL7Suffix = new EnExplicitSuffix(); oHL7Suffix.setContent(oName.getSuffix()); JAXBElement<EnExplicitSuffix> oElement = oObjectFactory.createENExplicitSuffix(oHL7Suffix); oHL7Pn.getContent().add(oElement); bHaveData = true; } } // if (oName != null) if (bHaveData) { return oHL7Pn; } else { return null; } } /** * This method creates a Record Target from the given data fields. * * @param sAssigningAuthority The assigning authority for the patient ID. * @param sPatientId The patient ID. * @param oPatientInfo The patient information from the criterion object. * @return The RecordTarget object. */ private POCDMT000040RecordTarget createRecordTarget(String sAssigningAuthority, String sPatientId, PolicyPatientInfoType oPatientInfo) throws AdapterPIPException { POCDMT000040RecordTarget oRecordTarget = new POCDMT000040RecordTarget(); boolean bHaveData = false; POCDMT000040PatientRole oPatientRole = new POCDMT000040PatientRole(); oRecordTarget.setPatientRole(oPatientRole); // Patient Assigning Authority and ID // ------------------------------------ II oII = createII(sAssigningAuthority, sPatientId); if (oII != null) { oPatientRole.getId().add(oII); bHaveData = true; } if (oPatientInfo != null) { // Patient Address // ---------------- if ((oPatientInfo.getAddr() != null) && (oPatientInfo.getAddr().getAddress() != null) && (oPatientInfo.getAddr().getAddress().size() > 0)) { for (AddressType oAddress : oPatientInfo.getAddr().getAddress()) { ADExplicit oHL7Address = createAD(oAddress); if (oHL7Address != null) { oPatientRole.getAddr().add(oHL7Address); bHaveData = true; } } // for (AddressType oAddress : oPatientInfo.getAddr().getAddress()) } // if ((oPatientInfo.getAddr() != null) && // Fill in the patient tag. // ------------------------- POCDMT000040Patient oPatientTag = new POCDMT000040Patient(); boolean bHavePatientTag = false; // Patient Name // ------------- if (oPatientInfo.getName() != null) { PNExplicit oHL7Name = createPN(oPatientInfo.getName()); if (oHL7Name != null) { oPatientTag.getName().add(oHL7Name); bHavePatientTag = true; } } // Gender // -------- if (oPatientInfo.getGender() != null) { CE oHL7Ce = createCode(oPatientInfo.getGender()); if (oHL7Ce != null) { oPatientTag.setAdministrativeGenderCode(oHL7Ce); bHavePatientTag = true; } } // BirthTime // ----------- if (oPatientInfo.getBirthTime() != null) { TSExplicit oHL7BirthTime = createTS(oPatientInfo.getBirthTime()); if (oHL7BirthTime != null) { oPatientTag.setBirthTime(oHL7BirthTime); bHavePatientTag = true; } } if (bHavePatientTag) { oPatientRole.setPatient(oPatientTag); bHaveData = true; } } // if (oPatientInfo != null) if (bHaveData) { return oRecordTarget; } else { return null; } } /** * Create an HL7 ON from the given name. It will be placed into a single string value in the object. * * @param sOrgName The name of the organization * @return The HL7 ON object that is returned. */ private ONExplicit createON(String sOrgName) { ONExplicit oHL7On = new ONExplicit(); boolean bHaveData = false; if (sOrgName != null) { oHL7On.getContent().add(sOrgName); bHaveData = true; } if (bHaveData) { return oHL7On; } else { return null; } } /** * This creates an HL7 organization object with the given data. * * @param sIdRoot The root attribute for the ID tag. * @param sIdExtension The extension attribute for the ID tag. * @param sOrgName The name of the organization. * @param oAddress The address of the orgnization. * @return */ private POCDMT000040Organization createOrganization(String sIdRoot, String sIdExtension, String sOrgName, AddressType oAddress) { POCDMT000040Organization oHL7Org = new POCDMT000040Organization(); boolean bHaveData = false; II oId = createII(sIdRoot, sIdExtension); if (oId != null) { oHL7Org.getId().add(oId); bHaveData = true; } ONExplicit oName = createON(sOrgName); if (oName != null) { oHL7Org.getName().add(oName); bHaveData = true; } ADExplicit oHL7Addr = createAD(oAddress); if (oHL7Addr != null) { oHL7Org.getAddr().add(oHL7Addr); bHaveData = true; } if (bHaveData) { return oHL7Org; } else { return null; } } /** * Create an HL7 Author from the information in the given author object. * * @param oAuthor The author information to use when creating the HL7 object. * @return The HL7 author object. */ private POCDMT000040Author createAuthorOriginal(PolicyOriginalAuthorInfoType oAuthor) { POCDMT000040Author oHL7Author = new POCDMT000040Author(); boolean bHaveData = false; // Template ID // ------------- II oTemplateId = createII(CDAConstants.TEMPLATE_ID_ROOT_AUTHOR_ORIGINAL, null); if (oTemplateId != null) { oHL7Author.getTemplateId().add(oTemplateId); bHaveData = true; } if (oAuthor != null) { // Time // ------ TSExplicit oTime = createTS(oAuthor.getAuthorTime()); if (oTime != null) { oHL7Author.setTime(oTime); bHaveData = true; } // Assigned Author // ---------------- POCDMT000040AssignedAuthor oAssignedAuthor = new POCDMT000040AssignedAuthor(); boolean bHaveAssignedAuthorData = false; // Author ID // ---------- II oAuthorId = createII(oAuthor.getAuthorIdAssigningAuthority(), oAuthor.getAuthorId()); if (oAuthorId != null) { oAssignedAuthor.getId().add(oAuthorId); bHaveAssignedAuthorData = true; } // Name // ----- PNExplicit oName = createPN(oAuthor.getName()); if (oName != null) { POCDMT000040Person oAssignedPerson = new POCDMT000040Person(); oAssignedAuthor.setAssignedPerson(oAssignedPerson); oAssignedPerson.getName().add(oName); bHaveAssignedAuthorData = true; } // Represented Organization... // ----------------------------- POCDMT000040Organization oRepOrg = createOrganization( oAuthor.getRepresentedOrganizationIdAssigningAuthority(), oAuthor.getRepresentedOrganizationId(), oAuthor.getRepresentedOrganizationName(), null); if (oRepOrg != null) { oAssignedAuthor.setRepresentedOrganization(oRepOrg); bHaveAssignedAuthorData = true; } if (bHaveAssignedAuthorData) { oHL7Author.setAssignedAuthor(oAssignedAuthor); bHaveData = true; } } if (bHaveData) { return oHL7Author; } else { return null; } } /** * Create an SC type with the given value. * * @param sValue The string value to place in the SC * @return The HL7 SC object containing the value. */ private SCExplicit creteSC(String sValue) { SCExplicit oHL7Sc = new SCExplicit(); boolean bHaveData = false; if (sValue != null) { oHL7Sc.getContent().add(sValue); bHaveData = true; } if (bHaveData) { return oHL7Sc; } else { return null; } } /** * Create an HL7 Authoring device node with the given data. * * @param oAuthoringDeviceCode The authroing device coded data - identifies the device. * @param sDeviceManufactureModelName The device manufacutre model and name. * @param sDeviceSoftwareName The device software name and version. * @return */ private POCDMT000040AuthoringDevice createAuthoringDevice(CeType oAuthoringDeviceCode, String sDeviceManufactureModelName, String sDeviceSoftwareName) { POCDMT000040AuthoringDevice oHL7AuthoringDevice = new POCDMT000040AuthoringDevice(); boolean bHaveData = false; // Code // ------ CE oHL7AuthoringDeviceCode = createCode(oAuthoringDeviceCode); if (oHL7AuthoringDeviceCode != null) { oHL7AuthoringDevice.setCode(oHL7AuthoringDeviceCode); bHaveData = true; } // Manufacture Model Name // ------------------------ SCExplicit oManufacturerModelName = creteSC(sDeviceManufactureModelName); if (oManufacturerModelName != null) { oHL7AuthoringDevice.setManufacturerModelName(oManufacturerModelName); bHaveData = true; } // Software Name // --------------- SCExplicit oSoftwareName = creteSC(sDeviceSoftwareName); if (oSoftwareName != null) { oHL7AuthoringDevice.setSoftwareName(oSoftwareName); bHaveData = true; } if (bHaveData) { return oHL7AuthoringDevice; } else { return null; } } /** * Create an HL7 Author (Scanner) from the information in the given author object. * * @param oAuthor The author information to use when creating the HL7 object. * @return The HL7 author object. */ private POCDMT000040Author createAuthorScanner(PolicyScannerAuthorInfoType oAuthor) { POCDMT000040Author oHL7Author = new POCDMT000040Author(); boolean bHaveData = false; // Template ID // ------------- II oTemplateId = createII(CDAConstants.TEMPLATE_ID_ROOT_AUTHOR_SCANNER, null); if (oTemplateId != null) { oHL7Author.getTemplateId().add(oTemplateId); bHaveData = true; } if (oAuthor != null) { // Time // ------ TSExplicit oTime = createTS(oAuthor.getAuthorTime()); if (oTime != null) { oHL7Author.setTime(oTime); bHaveData = true; } // Assigned Author // ---------------- POCDMT000040AssignedAuthor oAssignedAuthor = new POCDMT000040AssignedAuthor(); boolean bHaveAssignedAuthorData = false; // Author ID // ---------- II oAuthorId = createII(oAuthor.getAuthorIdAssigningAuthority(), oAuthor.getAuthorId()); if (oAuthorId != null) { oAssignedAuthor.getId().add(oAuthorId); bHaveAssignedAuthorData = true; } // Assigned Authoring Device // --------------------------- POCDMT000040AuthoringDevice oAuthoringDevice = createAuthoringDevice(oAuthor.getAuthoringDevice(), oAuthor.getDeviceManufactureModelName(), oAuthor.getDeviceSoftwareName()); if (oAuthoringDevice != null) { oAssignedAuthor.setAssignedAuthoringDevice(oAuthoringDevice); bHaveAssignedAuthorData = true; } // Represented Organization... // ----------------------------- POCDMT000040Organization oRepOrg = createOrganization( oAuthor.getRepresentedOrganizationIdAssigningAuthority(), null, oAuthor.getRepresentedOrganizationName(), oAuthor.getRepresentedOrganizationAddress()); if (oRepOrg != null) { oAssignedAuthor.setRepresentedOrganization(oRepOrg); bHaveAssignedAuthorData = true; } if (bHaveAssignedAuthorData) { oHL7Author.setAssignedAuthor(oAssignedAuthor); bHaveData = true; } } if (bHaveData) { return oHL7Author; } else { return null; } } /** * Create the HL7 data enterer information from the given data. * * @param oDataEnterer The data enterer information. * @return The HL7 Data enterer information. */ private POCDMT000040DataEnterer createDataEnterer(PolicyDataEntererInfoType oDataEnterer) { POCDMT000040DataEnterer oHL7DataEnterer = new POCDMT000040DataEnterer(); boolean bHaveData = false; // Template ID - If the template ID is the only thing, we consider this object null... // -------------------------------------------------------------------------------------- II oTemplateId = createII(CDAConstants.TEMPLATE_ID_ROOT_DATA_ENTERER, null); if (oTemplateId != null) { oHL7DataEnterer.getTemplateId().add(oTemplateId); } if (oDataEnterer != null) { // Time // ------ TSExplicit oTime = createTS(oDataEnterer.getDataEntererTime()); if (oTime != null) { oHL7DataEnterer.setTime(oTime); bHaveData = true; } POCDMT000040AssignedEntity oAssignedEntity = new POCDMT000040AssignedEntity(); boolean bHaveAssignedEntityData = false; // Assigned Entity ID // -------------------- II oId = createII(oDataEnterer.getDataEntererIdAssigningAuthority(), oDataEnterer.getDataEntererId()); if (oId != null) { oAssignedEntity.getId().add(oId); bHaveAssignedEntityData = true; } // Assigned Person/Name // ---------------------- PNExplicit oName = createPN(oDataEnterer.getName()); if (oName != null) { oAssignedEntity.setAssignedPerson(new POCDMT000040Person()); oAssignedEntity.getAssignedPerson().getName().add(oName); bHaveAssignedEntityData = true; } if (bHaveAssignedEntityData) { oHL7DataEnterer.setAssignedEntity(oAssignedEntity); bHaveData = true; } } if (bHaveData) { return oHL7DataEnterer; } else { return null; } } /** * Create the HL7 custodian from the given data. * * @param oCustodian The data to be transformed. * @return The HL7 custodian object, */ private POCDMT000040Custodian createCustodian(PolicyCustodianInfoType oCustodian) { POCDMT000040Custodian oHL7Custodian = new POCDMT000040Custodian(); boolean bHaveData = false; oHL7Custodian.setAssignedCustodian(new POCDMT000040AssignedCustodian()); POCDMT000040CustodianOrganization oOrg = new POCDMT000040CustodianOrganization(); oHL7Custodian.getAssignedCustodian().setRepresentedCustodianOrganization(oOrg); if (oCustodian != null) { // Id // ---- II oId = createII(oCustodian.getOrganizationIdAssigningAuthority(), oCustodian.getOrganizationId()); if (oId != null) { oOrg.getId().add(oId); bHaveData = true; } // Name // ----- ONExplicit oName = createON(oCustodian.getOrganizationName()); if (oName != null) { oOrg.setName(oName); bHaveData = true; } // Address // -------- ADExplicit oAddr = createAD(oCustodian.getOrganizationAddress()); if (oAddr != null) { oOrg.setAddr(oAddr); bHaveData = true; } } if (bHaveData) { return oHL7Custodian; } else { return null; } } /** * Create an HL7 legal authenticator object from the given data. * * @param oLegalAuthenticator The legal authenticator data to be placd in the HL7 object. * @return The HL7 legal authenticator object to be returned. */ private POCDMT000040LegalAuthenticator createLegalAuthenticator(PolicyLegalAuthenticatorType oLegalAuthenticator) { POCDMT000040LegalAuthenticator oHL7Auth = new POCDMT000040LegalAuthenticator(); boolean bHaveData = false; if (oLegalAuthenticator != null) { // Time // ------ TSExplicit oTime = createTS(oLegalAuthenticator.getAuthenticationTime()); if (oTime != null) { oHL7Auth.setTime(oTime); bHaveData = true; } // Signature code // --------------- CS oSignatureCode = createCS(oLegalAuthenticator.getSignatureCode()); if (oSignatureCode != null) { oHL7Auth.setSignatureCode(oSignatureCode); bHaveData = true; } POCDMT000040AssignedEntity oAssignedEntity = new POCDMT000040AssignedEntity(); boolean bHaveAssignedEntityData = false; // Assigned Entity ID // -------------------- II oId = createII(oLegalAuthenticator.getAuthenticatorIdAssigningAuthority(), oLegalAuthenticator.getAuthenticatorId()); if (oId != null) { oAssignedEntity.getId().add(oId); bHaveAssignedEntityData = true; } // Assigned Person/Name // ---------------------- PNExplicit oName = createPN(oLegalAuthenticator.getAuthenticatorPersonName()); if (oName != null) { oAssignedEntity.setAssignedPerson(new POCDMT000040Person()); oAssignedEntity.getAssignedPerson().getName().add(oName); bHaveAssignedEntityData = true; } if (bHaveAssignedEntityData) { oHL7Auth.setAssignedEntity(oAssignedEntity); bHaveData = true; } } if (bHaveData) { return oHL7Auth; } else { return null; } } /** * Create a time interval based on the given low and high date. * * @param sLowHL7Date The lower date of the range. * @param sHighHL7Date The higher date of the range. * @return The time interval that has been created. */ private IVLTSExplicit createIVLTS(String sLowHL7Date, String sHighHL7Date) { IVLTSExplicit oTimeInterval = new IVLTSExplicit(); boolean bHaveData = false; org.hl7.v3.ObjectFactory oFactory = new ObjectFactory(); TSExplicit oTSLowDate = createTS(sLowHL7Date); if ((oTSLowDate != null) && (oTSLowDate.getValue() != null) && (oTSLowDate.getValue().length() > 0)) { IVXBTSExplicit oLowDateElement = new IVXBTSExplicit(); oLowDateElement.setValue(oTSLowDate.getValue()); JAXBElement<IVXBTSExplicit> oLowDateJaxbElement = oFactory.createIVLTSExplicitLow(oLowDateElement); oTimeInterval.getContent().add(oLowDateJaxbElement); bHaveData = true; } TSExplicit oTSHighDate = createTS(sHighHL7Date); if ((oTSHighDate != null) && (oTSHighDate.getValue() != null) && (oTSHighDate.getValue().length() > 0)) { IVXBTSExplicit oHighDateElement = new IVXBTSExplicit(); oHighDateElement.setValue(oTSHighDate.getValue()); JAXBElement<IVXBTSExplicit> oHighDateJaxbElement = oFactory.createIVLTSExplicitHigh(oHighDateElement); oTimeInterval.getContent().add(oHighDateJaxbElement); bHaveData = true; } if (bHaveData) { return oTimeInterval; } else { return null; } } /** * Create the DocumentationOf tag with the given data. * * @param oCriterion The object containing the data to be in the documentation of tag. * @return The HL7 documentation of object that was created. */ private POCDMT000040DocumentationOf createDocumentationOf(BinaryDocumentPolicyCriterionType oCriterion) { POCDMT000040DocumentationOf oDocOf = new POCDMT000040DocumentationOf(); oDocOf.getTypeCode().add(CDAConstants.DOCUMENTATION_OF_TYPE_CODE); POCDMT000040ServiceEvent oServiceEvent = new POCDMT000040ServiceEvent(); oDocOf.setServiceEvent(oServiceEvent); oServiceEvent.getMoodCode().add(CDAConstants.SERVICE_EVENT_MOOD_CODE_EVENT); oServiceEvent.getClassCode().add(CDAConstants.SERVICE_EVENT_CLASS_CODE_ACT); II oTemplateId = createII(CDAConstants.SERVICE_EVENT_TEMPLATE_ID_ROOT, null); oServiceEvent.getTemplateId().add(oTemplateId); CE oHL7ConsentCode = new CE(); oHL7ConsentCode.setCode(CDAConstants.CONSENT_CODE_YES); oHL7ConsentCode.setDisplayName(CDAConstants.CONSENT_CODE_YES_DISPLAY_NAME); oHL7ConsentCode.setCodeSystem(CDAConstants.SNOMED_CT_CODE_SYSTEM); oHL7ConsentCode.setCodeSystemName(CDAConstants.SNOMED_CT_CODE_SYSTEM_DISPLAY_NAME); oServiceEvent.setCode(oHL7ConsentCode); IVLTSExplicit oTimeInterval = createIVLTS(oCriterion.getStartDate(), oCriterion.getEndDate()); oServiceEvent.setEffectiveTime(oTimeInterval); return oDocOf; // Always return one of these - even if it is only with our default values. } /** * Create the component tag. * * @param oCriterion The information to be used in the object. * @return The component object that was created. */ private POCDMT000040Component2 createComponent(BinaryDocumentPolicyCriterionType oCriterion) { POCDMT000040Component2 oComponent = new POCDMT000040Component2(); boolean bHaveData = false; POCDMT000040NonXMLBody oNonXMLBody = new POCDMT000040NonXMLBody(); oComponent.setNonXMLBody(oNonXMLBody); EDExplicit oED = new EDExplicit(); oNonXMLBody.setText(oED); oED.setRepresentation(BinaryDataEncoding.B_64); // MediaType // ----------- if (oCriterion.getMimeType() != null) { oED.setMediaType(oCriterion.getMimeType()); bHaveData = true; } // Content // -------- if ((oCriterion.getBinaryDocument() != null) && (oCriterion.getBinaryDocument().length > 0)) { String sBinaryDocument = new String(oCriterion.getBinaryDocument()); oED.getContent().add(sBinaryDocument); bHaveData = true; } if (bHaveData) { return oComponent; } else { return null; } } /** * This method creates a single CDA document from the given BinaryDocumentPolicyCriterionType. * * @param oPtPref This contains the patient preference information. There is some infomration in this that is common * to each of the criterion that need to be available when we create the CDA document. * @param oCriterion The binary document criterion containing the data. * @return The CDA document returned. * @throws AdapterPIPException This exception is thrown if there are any errors. */ public POCDMT000040ClinicalDocument createCDA(PatientPreferencesType oPtPref, BinaryDocumentPolicyCriterionType oCriterion) throws AdapterPIPException { POCDMT000040ClinicalDocument oCda = new POCDMT000040ClinicalDocument(); boolean bHaveData = false; if (oCriterion != null) { bHaveData = true; // Class code and mood code // ------------------------- oCda.setClassCode(ActClassClinicalDocument.DOCCLIN); oCda.getMoodCode().add(CDAConstants.CDA_MOOD_CODE); // Type ID // --------- oCda.setTypeId(createTypeId()); // Template ID // ------------- oCda.getTemplateId().add(createTemplateId()); // ID // --- oCda.setId(createId(oCriterion.getDocumentUniqueId())); // Code // ----- oCda.setCode(createCode(oCriterion.getDocumentTypeCode())); // Title // ------- oCda.setTitle(createST(oCriterion.getDocumentTitle())); // EffectiveTime // --------------- oCda.setEffectiveTime(createTS(oCriterion.getEffectiveTime())); // ConfidentialityCode // --------------------- oCda.setConfidentialityCode(createCode(oCriterion.getConfidentialityCode())); // Language Code // -------------- oCda.setLanguageCode(createCS(CDAConstants.LANGUAGE_CODE_ENGLISH)); // Record Target // --------------- oCda.getRecordTarget().add( createRecordTarget(oPtPref.getAssigningAuthority(), oPtPref.getPatientId(), oCriterion.getPatientInfo())); // Author (Original) // ------------------ oCda.getAuthor().add(createAuthorOriginal(oCriterion.getAuthorOriginal())); // Author (Scanner) // ------------------ oCda.getAuthor().add(createAuthorScanner(oCriterion.getAuthorScanner())); // Data Enterer // -------------- oCda.setDataEnterer(createDataEnterer(oCriterion.getDataEnterer())); // Custodian // ---------- oCda.setCustodian(createCustodian(oCriterion.getCustodian())); // Legal Authenticator // --------------------- oCda.setLegalAuthenticator(createLegalAuthenticator(oCriterion.getLegalAuthenticator())); // Documentation Of // ------------------ oCda.getDocumentationOf().add(createDocumentationOf(oCriterion)); // Component // ---------- oCda.setComponent(createComponent(oCriterion)); } // if (oCriterion != null) if (bHaveData) { return oCda; } else { return null; } } /** * This method creates a set of CDA documents from the given BinaryDocumentPolicyCriterion objects. * * @param oPtPref The patient preferences information for the CDA. * @return The list of CDA documents returned. * @throws AdapterPIPException This is thrown if any exception occurs in the process. */ public List<POCDMT000040ClinicalDocument> createCDA(PatientPreferencesType oPtPref) throws AdapterPIPException { ArrayList<POCDMT000040ClinicalDocument> olCda = new ArrayList<POCDMT000040ClinicalDocument>(); if ((oPtPref != null) && (oPtPref.getBinaryDocumentPolicyCriteria() != null) && (oPtPref.getBinaryDocumentPolicyCriteria().getBinaryDocumentPolicyCriterion() != null) && (oPtPref.getBinaryDocumentPolicyCriteria().getBinaryDocumentPolicyCriterion().size() > 0)) { for (BinaryDocumentPolicyCriterionType oCriterion : oPtPref.getBinaryDocumentPolicyCriteria() .getBinaryDocumentPolicyCriterion()) { POCDMT000040ClinicalDocument oCda = createCDA(oPtPref, oCriterion); if (oCda != null) { olCda.add(oCda); } } } if (olCda.size() > 0) { return olCda; } else { return null; } } }
/* * Copyright (c) 2012-2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.developerstudio.eclipse.esb.project.provider; import java.io.File; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.eclipse.core.resources.IFile; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.Path; import org.eclipse.core.runtime.content.IContentDescription; import org.eclipse.core.runtime.content.IContentType; import org.eclipse.jface.action.Action; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.TreeSelection; import org.eclipse.ui.IActionBars; import org.eclipse.ui.IWorkbenchPage; import org.eclipse.ui.IWorkbenchWindow; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.navigator.CommonActionProvider; import org.eclipse.ui.navigator.ICommonActionConstants; import org.eclipse.ui.navigator.ICommonActionExtensionSite; import org.eclipse.ui.part.FileEditorInput; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.wso2.developerstudio.eclipse.esb.project.Activator; import org.wso2.developerstudio.eclipse.gmf.esb.ArtifactType; import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog; import org.wso2.developerstudio.eclipse.logging.core.Logger; import org.wso2.developerstudio.eclipse.platform.ui.editor.Openable; import org.wso2.developerstudio.eclipse.platform.ui.startup.ESBGraphicalEditor; import org.wso2.developerstudio.eclipse.utils.file.FileUtils; /** * Custom NavigatorActionProvider for handling editor switching for ESB files * */ public class NavigatorActionProvider extends CommonActionProvider { private static IDeveloperStudioLog log = Logger.getLog(Activator.PLUGIN_ID); private static final String SEQUENCE="sequence"; private static final String ENDPOINT="endpoint"; private static final String PROXY="proxy"; private static final String LOCAL_ENTRY="localentry"; private static final String TEMPLATE="template"; private static final String TASK="task"; private static final String API="api"; private static final String MESSAGE_STORE="messageStore"; private static final String MESSAGE_PROCESSOR="messageProcessor"; private static final String INBOUND_ENDPOINT="inboundEndpoint"; private OpenEditorAction openEditorAction; private static Map<String,String> prefixMap = new HashMap<String,String>(); static{ prefixMap.put("application/vnd.wso2.sequence","sequence_"); prefixMap.put("application/vnd.wso2.esb.endpoint","endpoint_"); prefixMap.put("application/vnd.wso2.esb.localentry", "localentry_"); } @Override public void fillActionBars(IActionBars actionBars) { IStructuredSelection selection = (IStructuredSelection) getContext().getSelection(); if (selection instanceof TreeSelection) { TreeSelection treeSelection = (TreeSelection) selection; Object firstElement = treeSelection.getFirstElement(); if (firstElement instanceof IFile) { IFile file = (IFile) firstElement; try { IContentDescription contentDescription = file.getContentDescription(); if (contentDescription != null) { IContentType contentType = contentDescription.getContentType(); if (contentType.getId() != null) { if ("org.wso2.developerstudio.eclipse.esb.contenttype.esbconfxml" .equals(contentType.getId())) { openEditorAction.setSelection(file); actionBars.setGlobalActionHandler(ICommonActionConstants.OPEN, openEditorAction); } } } } catch (CoreException e) { /* ignore */ } } } } @Override public void init(ICommonActionExtensionSite aSite) { super.init(aSite); openEditorAction = new OpenEditorAction(); } private static class OpenEditorAction extends Action { private IFile selection; @Override public void run() { IFile fileTobeOpen = null; String synFilePath = selection.getFullPath().toOSString(); synFilePath=synFilePath.replaceAll(Pattern.quote("\\"), "/"); IWorkbenchWindow window = PlatformUI.getWorkbench().getActiveWorkbenchWindow(); IWorkbenchPage page = window.getActivePage(); try { String[] type=getType(selection.getLocation().toOSString()); if(type.length==0 || "full-synapse".equals(type[0])){ fileTobeOpen = selection.getWorkspace().getRoot().getFile(new Path(synFilePath)); page.openEditor(new FileEditorInput(fileTobeOpen),"org.wso2.developerstudio.eclipse.esb.presentation.EsbEditor"); }else{ String location=selection.getLocation().toOSString(); String source = FileUtils.getContentAsString(new File(location)); String name =selection.getName(); String fullPath=selection.getFullPath().removeLastSegments(1).toOSString()+"/"; Openable openable = ESBGraphicalEditor.getOpenable(); openable.editorOpen(name,type[0],fullPath, source); } }catch (Exception e) { log.error("Can't open " + fileTobeOpen, e); } } public void setSelection(IFile selection) { this.selection = selection; } /* * This method will return the type of the artifact(sequence, endpoint etc.) by reading the .xml file */ private String[] getType(String path) throws Exception { List<String> lines; DocumentBuilderFactory dbFactory = DocumentBuilderFactory .newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); /* * Here we are using two approaches to get the correct synapse type * from the .xml file. * 1. Use org.apache.commons.io.FileUtils.readLines(File) to read xml * basically and identify the correct type. * 2. If further investigation required other than step 1, use dom parser to parse * whole xml to get nested informations for evaluate for advanced * types such as main_sequence, template.endpoints etc. * * 'type' is required in this stage due to architectural problem. So * we have to get-rid of following code after fixing the * architectural issues. */ lines = org.apache.commons.io.FileUtils.readLines(new File(path)); int lineCount = lines.size(); for (int i = 0; i < lineCount; ++i) { String currentLine = lines.get(i); if (currentLine.contains("<sequence")) { Document doc = dBuilder.parse(new File(path)); NodeList nodes = doc.getElementsByTagName("sequence"); if (nodes.item(0).getNodeType() == Node.ELEMENT_NODE) { Element eElement = (Element) nodes.item(0); if ("main".equals(eElement.getAttribute("name"))) { return new String[] { "main_sequence", SEQUENCE }; } else { return new String[] { "sequence", SEQUENCE }; } } } else if (currentLine.contains("<proxy")) { return new String[] { "proxy", PROXY }; } else if (currentLine.contains("<endpoint")) { Document doc = dBuilder.parse(new File(path)); Node node = doc.getElementsByTagName("endpoint").item(0); if (node.getNodeType() == Node.ELEMENT_NODE) { Element eElement = (Element) node; NodeList endpointChildNodes = eElement.getChildNodes(); for (int j = 0; j < endpointChildNodes.getLength(); ++j) { if ("default".equals(endpointChildNodes.item(j) .getNodeName())) { return new String[] { "endpoint-0", ENDPOINT }; } else if ("address".equals(endpointChildNodes .item(j).getNodeName())) { return new String[] { "endpoint-1", ENDPOINT }; } else if ("wsdl".equals(endpointChildNodes.item(j) .getNodeName())) { return new String[] { "endpoint-2", ENDPOINT }; } else if ("loadbalance".equals(endpointChildNodes .item(j).getNodeName())) { return new String[] { "endpoint-3", ENDPOINT }; } else if ("failover".equals(endpointChildNodes .item(j).getNodeName())) { return new String[] { "endpoint-4", ENDPOINT }; } else if ("recipientlist" .equals(endpointChildNodes.item(j) .getNodeName())) { return new String[] { "endpoint-5", ENDPOINT }; } else if ("http".equals(endpointChildNodes.item(j) .getNodeName())) { return new String[] { "endpoint-6", ENDPOINT }; } } if (currentLine.contains("template")) { return new String[] { "endpoint-7", ENDPOINT }; } } } else if (currentLine.contains("<localEntry")) { return new String[] { "localentry", LOCAL_ENTRY }; } else if (currentLine.contains("<template")) { Document doc = dBuilder.parse(new File(path)); Node template = doc.getElementsByTagName("template").item(0); if (template.getNodeType() == Node.ELEMENT_NODE) { Element eElement = (Element) template; NodeList templateChildNodes = eElement.getChildNodes(); for (int j = 0; j < templateChildNodes.getLength(); ++j) { if ("sequence".equals(templateChildNodes.item(j).getNodeName())) { return new String[] { "template.sequence",TEMPLATE }; } else if ("endpoint".equals(templateChildNodes.item(j).getNodeName())) { NodeList endpointChildNodes = templateChildNodes.item(j).getChildNodes(); for (int k = 0; k < endpointChildNodes.getLength(); ++k) { if ("default".equals(endpointChildNodes.item(k).getNodeName())) { return new String[] {ArtifactType.TEMPLATE_ENDPOINT_DEFAULT.getLiteral(),TEMPLATE }; } else if ("address".equals(endpointChildNodes.item(k).getNodeName())) { return new String[] {ArtifactType.TEMPLATE_ENDPOINT_ADDRESS.getLiteral(),TEMPLATE }; } else if ("wsdl".equals(endpointChildNodes.item(k).getNodeName())) { return new String[] {ArtifactType.TEMPLATE_ENDPOINT_WSDL.getLiteral(),TEMPLATE }; } else if ("http".equals(endpointChildNodes.item(k).getNodeName())) { return new String[] {ArtifactType.TEMPLATE_ENDPOINT_HTTP.getLiteral(),TEMPLATE }; } } } } } } else if (currentLine.contains("<task")) { return new String[] { "task", TASK }; } else if (currentLine.contains("<api")) { return new String[] { "api", API }; } else if (currentLine.contains("<messageStore")) { return new String[] { MESSAGE_STORE, MESSAGE_STORE }; } else if (currentLine.contains("<messageProcessor")) { return new String[] { MESSAGE_PROCESSOR, MESSAGE_PROCESSOR }; } else if (currentLine.contains("<inboundEndpoint")) { return new String[] { INBOUND_ENDPOINT, INBOUND_ENDPOINT }; } else if (currentLine.contains("<definitions")) { return new String[] { "full-synapse", "full-synapse" }; } } return new String[] {}; } } }
package org.batfish.representation.aws; import static com.google.common.base.Preconditions.checkArgument; import static org.batfish.representation.aws.Utils.checkNonNull; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.MoreObjects; import java.io.Serializable; import java.util.List; import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.ParametersAreNonnullByDefault; import org.batfish.datamodel.HeaderSpace; import org.batfish.datamodel.IpProtocol; import org.batfish.datamodel.SubRange; import org.batfish.representation.aws.LoadBalancer.Protocol; /** * Represent a listener for an elastic load balancer v2 * https://docs.aws.amazon.com/elasticloadbalancing/. */ @JsonIgnoreProperties(ignoreUnknown = true) @ParametersAreNonnullByDefault public final class LoadBalancerListener implements AwsVpcEntity, Serializable { enum ActionType { FORWARD, AUTHENTICATE_OIDC, AUTHENTICATE_COGNITO, REDIRECT, FIXED_RESPONSE, } @JsonIgnoreProperties(ignoreUnknown = true) @ParametersAreNonnullByDefault public static class DefaultAction implements Serializable { @Nullable private final Integer _order; @Nonnull private final String _targetGroupArn; @Nonnull private final ActionType _type; @JsonCreator private static DefaultAction create( @Nullable @JsonProperty(JSON_KEY_ORDER) Integer order, @Nullable @JsonProperty(JSON_KEY_TARGET_GROUP_ARN) String targetGroupArn, @Nullable @JsonProperty(JSON_KEY_TYPE) String type) { checkNonNull(targetGroupArn, JSON_KEY_TARGET_GROUP_ARN, "Load balancer listener"); checkNonNull(type, JSON_KEY_TARGET_TYPE, "Load balancer listener"); return new DefaultAction( order, targetGroupArn, ActionType.valueOf(type.toUpperCase().replace('-', '_'))); } DefaultAction(@Nullable Integer order, String targetGroupArn, ActionType type) { _order = order; _targetGroupArn = targetGroupArn; _type = type; } @Nullable public Integer getOrder() { return _order; } @Nonnull public String getTargetGroupArn() { return _targetGroupArn; } @Nonnull public ActionType getType() { return _type; } @Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (!(o instanceof DefaultAction)) { return false; } DefaultAction that = (DefaultAction) o; return Objects.equals(_order, that._order) && _targetGroupArn.equals(that._targetGroupArn) && _type == that._type; } @Override public int hashCode() { return Objects.hash(_order, _targetGroupArn, _type); } } @JsonIgnoreProperties(ignoreUnknown = true) @ParametersAreNonnullByDefault public static class Listener implements AwsVpcEntity, Serializable { @Nonnull private String _listenerArn; @Nonnull private final List<DefaultAction> _defaultActions; @Nonnull private final Protocol _protocol; private final int _port; @JsonCreator private static Listener create( @Nullable @JsonProperty(JSON_KEY_LISTENER_ARN) String listenerArn, @Nullable @JsonProperty(JSON_KEY_DEFAULT_ACTIONS) List<DefaultAction> defaultActions, @Nullable @JsonProperty(JSON_KEY_PROTOCOL) String protocol, @Nullable @JsonProperty(JSON_KEY_PORT) Integer port) { checkNonNull(listenerArn, JSON_KEY_LISTENER_ARN, "LoadBalancer listener"); checkNonNull(defaultActions, JSON_KEY_DEFAULT_ACTIONS, "LoadBalancer listener"); checkNonNull(protocol, JSON_KEY_PROTOCOL, "LoadBalancer listener"); checkNonNull(port, JSON_KEY_PORT, "LoadBalancer listener"); checkDefaultActions(defaultActions); return new Listener( listenerArn, defaultActions, Protocol.valueOf(protocol.toUpperCase()), port); } Listener(String listenerArn, List<DefaultAction> defaultActions, Protocol protocol, int port) { _listenerArn = listenerArn; _defaultActions = defaultActions; _protocol = protocol; _port = port; } /** * Gets {@code HeaderSpace} that this listener will match on for incoming packets. * * @throws IllegalArgumentException if this mapping cannot be made. */ HeaderSpace getMatchingHeaderSpace() { HeaderSpace.Builder matchHeaderSpace = HeaderSpace.builder().setDstPorts(SubRange.singleton(_port)); switch (_protocol) { case TCP: case TLS: matchHeaderSpace.setIpProtocols(IpProtocol.TCP); break; case TCP_UDP: matchHeaderSpace.setIpProtocols(IpProtocol.TCP, IpProtocol.UDP); break; case UDP: matchHeaderSpace.setIpProtocols(IpProtocol.TCP); break; default: throw new IllegalArgumentException( String.format( "Cannot get matching header space for listener protocol %s", _protocol)); } return matchHeaderSpace.build(); } /** * Checks for the sanity of the list of default actions, based on criteria at * https://boto3.amazonaws.com/v1/documentation/api/1.9.42/reference/services/elbv2.html#ElasticLoadBalancingv2.Client.describe_listeners */ private static void checkDefaultActions(List<DefaultAction> defaultActions) { // The order for the action. This value is required for rules with multiple actions. boolean nullOrderExists = defaultActions.stream().anyMatch(da -> da.getOrder() == null); checkArgument( !nullOrderExists || defaultActions.size() == 1, "Order cannot be null if multiple DefaultActions exist"); // Each rule must include exactly one of the following types of actions: forward , // fixed-response , or redirect . long count = defaultActions.stream() .filter( da -> da.getType() == ActionType.FORWARD || da.getType() == ActionType.FIXED_RESPONSE || da.getType() == ActionType.REDIRECT) .count(); checkArgument( count == 1L, "There must be exactly 1 action of type 'forward', 'fixed-response', or 'redirect'." + " Found %s", count); // The final action to be performed must be a forward or a fixed-response action DefaultAction lastAction = defaultActions.get(defaultActions.size() - 1); checkArgument( lastAction.getType() == ActionType.FORWARD || lastAction.getType() == ActionType.FIXED_RESPONSE, "Last action must be 'forward' or 'fixed-response'"); } @Override public String getId() { return _listenerArn; } @Nonnull public List<DefaultAction> getDefaultActions() { return _defaultActions; } @Nonnull public Protocol getProtocol() { return _protocol; } public int getPort() { return _port; } @Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (!(o instanceof Listener)) { return false; } Listener that = (Listener) o; return _listenerArn.equals(that._listenerArn) && _defaultActions.equals(that._defaultActions) && _protocol == that._protocol && _port == that._port; } @Override public int hashCode() { return Objects.hash(_listenerArn, _defaultActions, _protocol, _port); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("_listenerArn", _listenerArn) .add("_defaultActions", _defaultActions) .add("_protocol", _protocol) .add("_port", _port) .toString(); } } @Nonnull private String _loadBalancerArn; @Nonnull private final List<Listener> _listeners; @JsonCreator private static LoadBalancerListener create( @Nullable @JsonProperty(JSON_KEY_LOAD_BALANCER_ARN) String loadBalancerArn, @Nullable @JsonProperty(JSON_KEY_LISTENERS) List<Listener> listeners) { checkNonNull(listeners, JSON_KEY_LISTENERS, "LoadBalancer listeners"); checkNonNull(loadBalancerArn, JSON_KEY_LOAD_BALANCER_ARN, "LoadBalancer listener"); return new LoadBalancerListener(loadBalancerArn, listeners); } LoadBalancerListener(String loadBalancerArn, List<Listener> listeners) { _loadBalancerArn = loadBalancerArn; _listeners = listeners; } @Override @Nonnull public String getId() { return _loadBalancerArn; } @Nonnull public List<Listener> getListeners() { return _listeners; } @Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (!(o instanceof LoadBalancerListener)) { return false; } LoadBalancerListener that = (LoadBalancerListener) o; return _loadBalancerArn.equals(that._loadBalancerArn) && _listeners.equals(that._listeners); } @Override public int hashCode() { return Objects.hash(_loadBalancerArn, _listeners); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("_loadBalancerArn", _loadBalancerArn) .add("_listeners", _listeners) .toString(); } }
/* * Copyright 1999,2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.xmlrpc.applet; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Enumeration; import java.util.Hashtable; import java.util.Stack; import java.util.Vector; import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.EncoderException; import org.xml.sax.AttributeList; import org.xml.sax.HandlerBase; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import uk.co.wilson.xml.MinML; /** * A simple XML-RPC client. * * FIXME: This code is VERY out of date with the rest of the package. * * @version $Id: SimpleXmlRpcClient.java,v 1.10 2005/04/22 10:25:58 hgomez Exp $ */ public class SimpleXmlRpcClient { URL url; /** * Construct a XML-RPC client with this URL. */ public SimpleXmlRpcClient(URL url) { this.url = url; } /** * Construct a XML-RPC client for the URL represented by this String. */ public SimpleXmlRpcClient(String url) throws MalformedURLException { this.url = new URL(url); } /** * Construct a XML-RPC client for the specified hostname and port. */ public SimpleXmlRpcClient(String hostname, int port) throws MalformedURLException { this.url = new URL("http://" + hostname + ":" + port + "/RPC2"); } /** * * @param method * @param params * @return * @throws XmlRpcException * @throws IOException */ public Object execute(String method, Vector params) throws XmlRpcException, IOException { return new XmlRpcSupport (url).execute (method, params); } } /** * FIXME: Leverage the XmlRpc class. */ class XmlRpcSupport extends HandlerBase { URL url; String methodName; boolean fault = false; Object result = null; Base64 base64 = new Base64(); // the stack we're parsing our values into. Stack values; Value currentValue; boolean readCdata; // formats for parsing and generating dateTime values static final DateFormat format = new SimpleDateFormat("yyyyMMdd'T'HH:mm:ss"); // used to collect character data of parameter values StringBuffer cdata = new StringBuffer (); // XML RPC parameter types used for dataMode static final int STRING = 0; static final int INTEGER = 1; static final int BOOLEAN = 2; static final int DOUBLE = 3; static final int DATE = 4; static final int BASE64 = 5; static final int STRUCT = 6; static final int ARRAY = 7; // for debugging output public static boolean debug = false; final static String types[] = {"String", "Integer", "Boolean", "Double", "Date", "Base64", "Struct", "Array"}; /** * * @param url */ public XmlRpcSupport(URL url) { this.url = url; } /** * Switch debugging output on/off. */ public static void setDebug(boolean val) { debug = val; } /** * Parse the input stream. For each root level object, method * <code>objectParsed</code> is called. */ synchronized void parse(InputStream is) throws Exception { values = new Stack(); long now = System.currentTimeMillis(); MinML parser = new MinML(); parser.setDocumentHandler(this); parser.setErrorHandler(this); parser.parse(new InputSource(is)); if (debug) { System.out.println("Spent " + (System.currentTimeMillis() - now) + " parsing"); } } /** * Writes the XML representation of a supported Java object to the XML writer. */ void writeObject (Object what, XmlWriter writer) throws IOException { writer.startElement("value"); if (what instanceof String) { writer.write(what.toString()); } else if (what instanceof Integer) { writer.startElement("int"); writer.write (what.toString()); writer.endElement("int"); } else if (what instanceof Boolean) { writer.startElement("boolean"); writer.write(((Boolean) what).booleanValue() ? "1" : "0"); writer.endElement("boolean"); } else if (what instanceof Double) { writer.startElement("double"); writer.write (what.toString()); writer.endElement("double"); } else if (what instanceof Date) { writer.startElement("dateTime.iso8601"); Date d = (Date) what; writer.write(format.format(d)); writer.endElement("dateTime.iso8601"); } else if (what instanceof byte[]) { writer.startElement("base64"); try { writer.write((byte[]) base64.encode(what)); } catch (EncoderException e) { throw new RuntimeException("Possibly incompatible version " + "of '" + Base64.class.getName() + "' used: " + e); } writer.endElement("base64"); } else if (what instanceof Vector) { writer.startElement("array"); writer.startElement("data"); Vector v = (Vector) what; int l2 = v.size(); for (int i2 = 0; i2 < l2; i2++) { writeObject(v.elementAt(i2), writer); } writer.endElement("data"); writer.endElement("array"); } else if (what instanceof Hashtable) { writer.startElement("struct"); Hashtable h = (Hashtable) what; for (Enumeration e = h.keys (); e.hasMoreElements (); ) { String nextkey = (String) e.nextElement (); Object nextval = h.get(nextkey); writer.startElement("member"); writer.startElement("name"); writer.write(nextkey); writer.endElement("name"); writeObject(nextval, writer); writer.endElement("member"); } writer.endElement("struct"); } else { String unsupportedType = what == null ? "null" : what.getClass().toString(); throw new IOException("unsupported Java type: " + unsupportedType); } writer.endElement("value"); } /** * Generate an XML-RPC request and send it to the server. Parse the result * and return the corresponding Java object. * * @exception XmlRpcException If the remote host returned a fault message. * @exception IOException If the call could not be made for lower level * problems. */ public Object execute(String method, Vector arguments) throws XmlRpcException, IOException { fault = false; long now = System.currentTimeMillis(); try { StringBuffer strbuf = new StringBuffer(); XmlWriter writer = new XmlWriter(strbuf); writeRequest(writer, method, arguments); byte[] request = strbuf.toString().getBytes(); URLConnection con = url.openConnection(); con.setDoOutput(true); con.setDoInput(true); con.setUseCaches(false); con.setAllowUserInteraction(false); con.setRequestProperty("Content-Length", Integer.toString(request.length)); con.setRequestProperty("Content-Type", "text/xml"); // con.connect (); OutputStream out = con.getOutputStream(); out.write(request); out.flush(); InputStream in = con.getInputStream(); parse(in); System.out.println("result = " + result); } catch (Exception x) { x.printStackTrace(); throw new IOException(x.getMessage()); } if (fault) { // generate an XmlRpcException XmlRpcException exception = null; try { Hashtable f = (Hashtable) result; String faultString = (String) f.get("faultString"); int faultCode = Integer.parseInt(f.get("faultCode").toString()); exception = new XmlRpcException(faultCode, faultString.trim()); } catch (Exception x) { throw new XmlRpcException(0, "Invalid fault response"); } throw exception; } System.out.println("Spent " + (System.currentTimeMillis() - now) + " in request"); return result; } /** * Called when the return value has been parsed. */ void objectParsed(Object what) { result = what; } /** * Generate an XML-RPC request from a method name and a parameter vector. */ void writeRequest (XmlWriter writer, String method, Vector params) throws IOException { writer.startElement("methodCall"); writer.startElement("methodName"); writer.write(method); writer.endElement("methodName"); writer.startElement("params"); int l = params.size(); for (int i = 0; i < l; i++) { writer.startElement("param"); writeObject(params.elementAt (i), writer); writer.endElement("param"); } writer.endElement("params"); writer.endElement("methodCall"); } //////////////////////////////////////////////////////////////// // methods called by XML parser /** * Method called by SAX driver. */ public void characters(char ch[], int start, int length) throws SAXException { if (! readCdata) { return; } cdata.append (ch, start, length); } /** * Method called by SAX driver. */ public void endElement(String name) throws SAXException { if (debug) { System.err.println("endElement: " + name); } // finalize character data, if appropriate if (currentValue != null && readCdata) { currentValue.characterData(cdata.toString()); cdata.setLength(0); readCdata = false; } if ("value".equals(name)) { int depth = values.size(); // Only handle top level objects or objects contained in arrays here. // For objects contained in structs, wait for </member> (see code below). if (depth < 2 || values.elementAt (depth - 2).hashCode () != STRUCT) { Value v = currentValue; values.pop(); if (depth < 2) { // This is a top-level object objectParsed(v.value); currentValue = null; } else { // add object to sub-array; if current container is a struct, add later (at </member>) currentValue = (Value) values.peek(); currentValue.endElement(v); } } } // Handle objects contained in structs. if ("member".equals(name)) { Value v = currentValue; values.pop(); currentValue = (Value) values.peek(); currentValue.endElement(v); } else if ("methodName".equals(name)) { methodName = cdata.toString(); cdata.setLength(0); readCdata = false; } } /** * Method called by SAX driver. */ public void startElement (String name, AttributeList atts) throws SAXException { if (debug) { System.err.println("startElement: " + name); } if ("value".equals(name)) { // System.err.println ("starting value"); Value v = new Value(); values.push(v); currentValue = v; // cdata object is reused cdata.setLength(0); readCdata = true; } else if ("methodName".equals(name)) { cdata.setLength(0); readCdata = true; } else if ("name".equals(name)) { cdata.setLength(0); readCdata = true; } else if ("string".equals(name)) { // currentValue.setType (STRING); cdata.setLength(0); readCdata = true; } else if ("i4".equals(name) || "int".equals(name)) { currentValue.setType(INTEGER); cdata.setLength(0); readCdata = true; } else if ("boolean".equals(name)) { currentValue.setType(BOOLEAN); cdata.setLength(0); readCdata = true; } else if ("double".equals(name)) { currentValue.setType(DOUBLE); cdata.setLength(0); readCdata = true; } else if ("dateTime.iso8601".equals(name)) { currentValue.setType(DATE); cdata.setLength(0); readCdata = true; } else if ("base64".equals(name)) { currentValue.setType(BASE64); cdata.setLength(0); readCdata = true; } else if ("struct".equals(name)) { currentValue.setType(STRUCT); } else if ("array".equals(name)) { currentValue.setType(ARRAY); } } /** * * @param e * @throws SAXException */ public void error(SAXParseException e) throws SAXException { System.err.println("Error parsing XML: " + e); // errorLevel = RECOVERABLE; // errorMsg = e.toString (); } /** * * @param e * @throws SAXException */ public void fatalError(SAXParseException e) throws SAXException { System.err.println("Fatal error parsing XML: " + e); // errorLevel = FATAL; // errorMsg = e.toString (); } /** * This represents an XML-RPC Value while the request is being parsed. */ class Value { int type; Object value; // the name to use for the next member of struct values String nextMemberName; Hashtable struct; Vector array; /** * Constructor. */ public Value() { this.type = STRING; } /** * Notification that a new child element has been parsed. */ public void endElement(Value child) { if (type == ARRAY) { array.addElement(child.value); } else if (type == STRUCT) { struct.put(nextMemberName, child.value); } } /** * Set the type of this value. If it's a container, create the * corresponding java container. */ public void setType(int type) { // System.err.println ("setting type to "+types[type]); this.type = type; if (type == ARRAY) { value = array = new Vector(); } if (type == STRUCT) { value = struct = new Hashtable(); } } /** * Set the character data for the element and interpret it according to * the element type */ public void characterData (String cdata) { switch (type) { case INTEGER: value = new Integer(cdata.trim()); break; case BOOLEAN: value = new Boolean("1".equals(cdata.trim())); break; case DOUBLE: value = new Double(cdata.trim()); break; case DATE: try { value = format.parse(cdata.trim()); } catch (ParseException p) { // System.err.println ("Exception while parsing date: "+p); throw new RuntimeException(p.getMessage()); } break; case BASE64: try { value = base64.decode((Object) cdata.getBytes()); } catch (DecoderException e) { /* FIXME: We should Probably throw an * Exception here. Punting because this class * is slated for complete overhaul using the * core library. */ value = cdata; } break; case STRING: value = cdata; break; case STRUCT: // this is the name to use for the next member of this struct nextMemberName = cdata; break; } } /** * This is a performance hack to get the type of a value without casting * the Object. It breaks the contract of method hashCode, but it doesn't * matter since Value objects are never used as keys in Hashtables. */ public int hashCode () { return type; } /** * * @return */ public String toString () { return (types[type] + " element " + value); } } /** * A quick and dirty XML writer. * TODO: Replace with core package's XmlWriter class. * * @see <a * href="http://nagoya.apache.org/bugzilla/show_bug.cgi?id=28982">Bugzilla * bug 28982</a> */ class XmlWriter { StringBuffer buf; String enc; /** * * @param buf */ public XmlWriter(StringBuffer buf) { this.buf = buf; buf.append("<?xml version=\"1.0\"?>"); } /** * * @param elem */ public void startElement(String elem) { buf.append("<"); buf.append(elem); buf.append(">"); } /** * * @param elem */ public void endElement(String elem) { buf.append("</"); buf.append(elem); buf.append(">"); } /** * * @param elem */ public void emptyElement(String elem) { buf.append("<"); buf.append(elem); buf.append("/>"); } /** * * @param text */ public void chardata(String text) { int l = text.length(); for (int i = 0; i < l; i++) { char c = text.charAt(i); switch (c) { case '<' : buf.append("&lt;"); break; case '>' : buf.append("&gt;"); break; case '&' : buf.append("&amp;"); break; default : buf.append(c); } } } /** * * @param text */ public void write(byte[] text) { // ### This may cause encoding complications for // ### multi-byte characters. This should be properly // ### fixed by implementing Bugzilla issue 28982. for (int i = 0; i < text.length; i++) { buf.append((char) text[i]); } } /** * * @param text */ public void write(char[] text) { buf.append(text); } /** * * @param text */ public void write(String text) { buf.append(text); } /** * * @return */ public String toString() { return buf.toString(); } /** * * @return * @throws UnsupportedEncodingException */ public byte[] getBytes() throws UnsupportedEncodingException { return buf.toString().getBytes(); } } }
package jp.gr.java_conf.dyama.rink.parser.core; import jp.gr.java_conf.dyama.rink.common.Resource; /** * State for Parsing Progress * @author Hiroyasu Yamada */ class State implements Comparable<State> { static class StatePool { private int size_ ; private State[] pool_ ; /** * @param size the size of maximum pooling. the size is set to 1 if the size is less than 1. */ StatePool(int size){ if (size < 1) size = 1; pool_ = new State[size]; for(int size_ = 0 ; size_ < pool_.length; size_++) pool_[size_] = new State(); } /** * Creates a new state from this pool. The new instance is generated if the pool is empty. * @return a new state. */ State create(){ if (size_ == 0) return new State(); State state = pool_[size_ - 1]; pool_[size_ - 1] = null; size_ --; return state ; } /** * Releases the used state to this pool. * @param state the used state. * @throws IllegalArgumentException if the used state is null. */ void release(State state){ if (state == null) throw new IllegalArgumentException("the state is null."); if (size_ == pool_.length) return ; pool_[size_++] = state; } } /** the progress of parsing process */ private int[] nodes_ ; /** the size of parsing progress */ private int size_; /** the current position */ private int position_; /** the flag whether the parsing is complete or not*/ private boolean complete_; /** the last applied actions */ private ActionImpl last_action_; /** analyzed dependencies */ private DependencyRelations deps_ ; /** score */ private double score_ ; /** the number of applied actions */ int num_actions_ ; State(){ nodes_ = new int[Resource.MAXIMUM_NUMBER_OF_WORDS]; size_ = 0; position_ = 0 ; complete_ = true ; last_action_ = null; deps_ = new DependencyRelations(); score_ = 0.0; num_actions_ = 0; } /** * Copies internal information from the source state. * @param state the source state. * @throws IllegalArgumentException if the source state is null. */ void copy(State state){ if (state == null) throw new IllegalArgumentException("the source state is null."); size_ = state.size_; for(int i = 0; i < size_; i++) nodes_[i] = state.nodes_[i]; position_ = state.position_; complete_ = state.complete_; last_action_ = state.last_action_; deps_.copy(state.deps_); score_ = state.score_; num_actions_ = state.num_actions_; } /** * Initializes this state as the input sentence. * @param sentence the input sentence. * @throws IllegalArgumentException if the input sentence is null. */ void setup(SentenceImpl sentence){ if (sentence == null) throw new IllegalArgumentException("the sentence is null."); deps_.setup(sentence); position_ = 0 ; complete_ = true; last_action_ = null; size_ = deps_.size(); score_ = 0.0; num_actions_ = 0; for(int i = 0 ; i < deps_.size(); i ++) nodes_[i] = i; } private void removeNode(int position){ for(int k = position; k < size_-1; k++){ nodes_[k] = nodes_[k+1]; } size_ --; } private void setLastAction(ActionImpl action){ last_action_ = action ; if (action.getType() == Action.Type.LEFT || action.getType() == Action.Type.RIGHT) // TODO : score_ += action.getScore(); num_actions_ ++ ; } /** * Applies the parsing action to this state. * @param action the parsing action. * @return true if the parsing action could be applied, otherwise false. * @throws IllegalArgumentException if the parsing action is null. */ boolean apply(Action action_){ if (action_ == null) throw new IllegalArgumentException("the action is null."); ActionImpl action = (ActionImpl) action_ ; int left = getLeftTarget(); int right = getRightTarget(); if ( left < 0) return false; if (action.getType() == Action.Type.SHIFT || action.getType() == Action.Type.WAIT){ int r = deps_.size(); if (right >= 0) r = right; action.setTarget(left, r); setLastAction(action); if (position_ < size()) position_ ++; return true; } if (right < 0) return false; complete_ = false; action.setTarget(left, right); if ( action.getType() == Action.Type.LEFT){ deps_.depend(left, right); removeNode(position_ + 1); } else if ( action.getType() == Action.Type.RIGHT ) { deps_.depend(right, left); removeNode(position_); } else { throw new IllegalArgumentException("unknown type of actions"); // OK } setLastAction(action); if ( position_ > 0) position_ --; return true; } /** * Checks whether the parsing process is complete or not (= No reduce action can be applied) * @return true if the parsing process is complete, otherwise false. * the conditions of the completeness:<br> * [1] {@link #size()} < 2 <br> * [2] the number of applied actions is more than 2 * #words - 1. (#words means the number of words in the input sentence.)<br> * [3] No Left/Right actions have been applied when the parsing position arrived at the end of the input sentence. */ boolean isComplete(){ if (size_ < 2) return true; if (num_actions_ > ( 2 * deps_.size() - 1)) return true; return isEOS() && complete_ ; } void disable(){ // TODO : num_actions_ = Integer.MAX_VALUE; } /** * Returns the current position. * @return the current position */ int getPosition(){ return position_ ; } /** * Returns the ID of the left target node at the current position * @return the ID of the left target node. return -1 if the current position is out of range. */ int getLeftTarget(){ if (position_ < 0 || position_ >= size_) return -1 ; return nodes_[position_]; } /** * Returns the ID of the left node corresponding to the relative position from the current position. * @param position the relative position from the current position. * @return the ID of the left node. return -1 if the absolute position arrives at BOS. * @throws IllegalArgumentException if the relative position is greater than 0. */ int getIDofLeftNode(int position){ if (position > 0) throw new IllegalArgumentException("the relative position is greater than 0."); int p = position_ + position ; if (p < 0 || p >= size()) return -1 ; return nodes_[p]; } /** * Returns the ID of the right target node at the current position * @return the ID of the right target node. return -1 if the current position is out of range. */ int getRightTarget(){ if (position_ + 1 < 0 || position_ + 1 >= size_) return -1; return nodes_[position_ + 1]; } /** * Returns the ID of the right node corresponding to the relative position from the current position. * @param position the relative position from the current position. * @return the ID of right node corresponding the relative position. return -1 if the absolute position arrives at EOS. * @throws IllegalArgumentException if the relative position is greater than 0. */ int getRightNode(int position){ if (position < 0) throw new IllegalArgumentException("the relative position is less than 0."); int p = position_ + 1 + position ; if (p < 0 || p >= size_) return -1 ; return nodes_[p]; } /** * Returns the last action. * @return the action. return null if no action have been applied. */ Action getLastAction(){ return last_action_ ; } /** * Sets the parsing position. * @param position the parsing position. * @throws IllegalArgumentException if the parsing position is out of range. */ void setPosition(int position){ if (position < 0 || position > size()) throw new IllegalArgumentException("the position is out of range."); if (position == 0) complete_ = true; position_ = position; } /** * Returns the number of active dependency nodes * @return the number of target dependency nodes */ int size(){ return size_; } /** * Checks whether the current position arrives at EOS. * @return true if the current position arrives at EOS, otherwise false. */ boolean isEOS(){ return position_ == size_; } /** * Returns the analyzed dependency relations. * @return dependency relations */ DependencyRelations getDependencies() { return deps_; } /** * Returns the score of this state. * @return the score of this state. */ double getScore(){ return score_ ; } @Override public int compareTo(State state) { if (state == null) return -1; if (score_ > state.score_) return -1 ; if (score_ < state.score_) return 1 ; return 0 ; } }
/* * * This file was generated by LLRP Code Generator * see http://llrp-toolkit.cvs.sourceforge.net/llrp-toolkit/ * for more information * Generated on: Sun Apr 08 14:14:08 EDT 2012; * */ /* * Copyright 2007 ETH Zurich * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions * and limitations under the License. * */ package org.llrp.ltk.generated.messages; import maximsblog.blogspot.com.llrpexplorer.Logger; import org.jdom2.Document; import org.jdom2.Element; import org.jdom2.Namespace; import org.llrp.ltk.exceptions.InvalidLLRPMessageException; import org.llrp.ltk.exceptions.MissingParameterException; import org.llrp.ltk.generated.LLRPConstants; import org.llrp.ltk.generated.enumerations.GetReaderCapabilitiesRequestedData; import org.llrp.ltk.generated.parameters.Custom; import org.llrp.ltk.types.BitList; import org.llrp.ltk.types.LLRPBitList; import org.llrp.ltk.types.LLRPMessage; import org.llrp.ltk.types.SignedShort; import org.llrp.ltk.types.UnsignedInteger; import org.llrp.ltk.types.UnsignedShort; import java.util.LinkedList; import java.util.List; /** * This message is sent from the Client to the Reader. The Client is able to request only a subset or all the capabilities from the Reader. See also {@link <a href="http://www.epcglobalinc.org/standards/llrp/llrp_1_0_1-standard-20070813.pdf#page=44&view=fit">LLRP Specification Section 9.1.1</a>} and {@link <a href="http://www.epcglobalinc.org/standards/llrp/llrp_1_0_1-standard-20070813.pdf#page=117&view=fit">LLRP Specification Section 16.1.1</a>} */ public class GET_READER_CAPABILITIES extends LLRPMessage { public static final SignedShort TYPENUM = new SignedShort(1); private static final Logger LOGGER = Logger.getLogger(GET_READER_CAPABILITIES.class); public static final String RESPONSETYPE = "GET_READER_CAPABILITIES_RESPONSE"; protected GetReaderCapabilitiesRequestedData requestedData; protected List<Custom> customList = new LinkedList<Custom>(); /** * empty constructor to create new message * with LLRP version set to 1.0 (0x1). */ public GET_READER_CAPABILITIES() { setVersion(new BitList(0, 0, 1)); } /** * Constructor to create message from binary encoded message * calls decodeBinary to decode message. * @param list to be decoded */ public GET_READER_CAPABILITIES(final LLRPBitList list) throws InvalidLLRPMessageException { decodeBinary(list.toByteArray()); } /** * Constructor to create message from binary encoded message * calls decodeBinary to decode message. * @param byteArray representing message */ public GET_READER_CAPABILITIES(final byte[] byteArray) throws InvalidLLRPMessageException { decodeBinary(byteArray); } /** * Constructor to create message from xml encoded message * calls decodeXML to decode message. * @param document to be decoded */ public GET_READER_CAPABILITIES(final Document document) throws InvalidLLRPMessageException { decodeXML(document); } /** * {@inheritDoc} */ protected LLRPBitList encodeBinarySpecific() throws InvalidLLRPMessageException { LLRPBitList resultBits = new LLRPBitList(); if (requestedData == null) { LOGGER.warn(" requestedData not set"); throw new InvalidLLRPMessageException( " requestedData not set for Parameter of Type GET_READER_CAPABILITIES"); } resultBits.append(requestedData.encodeBinary()); if (customList == null) { //just warn - it is optional LOGGER.info(" customList not set"); } else { for (Custom field : customList) { resultBits.append(field.encodeBinary()); } } return resultBits; } /** * {@inheritDoc} */ public Document encodeXML() throws InvalidLLRPMessageException { try { Namespace ns = Namespace.getNamespace("llrp", LLRPConstants.LLRPNAMESPACE); Element root = new Element("GET_READER_CAPABILITIES", ns); // Element root = new Element("GET_READER_CAPABILITIES"); root.addNamespaceDeclaration(Namespace.getNamespace("llrp", LLRPConstants.LLRPNAMESPACE)); if (version == null) { throw new InvalidLLRPMessageException("Version not set"); } else { root.setAttribute("Version", version.toInteger().toString()); } if (messageID == null) { throw new InvalidLLRPMessageException("MessageID not set"); } else { root.setAttribute("MessageID", messageID.toString(10)); } if (requestedData == null) { LOGGER.warn(" requestedData not set"); throw new MissingParameterException(" requestedData not set"); } else { root.addContent(requestedData.encodeXML("RequestedData", ns)); } //parameters if (customList == null) { LOGGER.info("customList not set"); } else { for (Custom field : customList) { root.addContent(field.encodeXML(field.getClass().getName() .replaceAll(field.getClass() .getPackage() .getName() + ".", ""), ns)); } } Document doc = new Document(root); if (isValidXMLMessage(doc, LLRPConstants.LLRPMESSAGESCHEMAPATH)) { return doc; } else { return null; } } catch (IllegalArgumentException e) { throw new InvalidLLRPMessageException(e.getMessage()); } catch (MissingParameterException e) { throw new InvalidLLRPMessageException(e.getMessage()); } } /** * {@inheritDoc} */ protected void decodeBinarySpecific(LLRPBitList binary) throws InvalidLLRPMessageException { int position = 0; int tempByteLength; int tempLength = 0; int count; SignedShort type; int fieldCount; Custom custom; requestedData = new GetReaderCapabilitiesRequestedData(binary.subList( position, GetReaderCapabilitiesRequestedData.length())); position += GetReaderCapabilitiesRequestedData.length(); // list of parameters customList = new LinkedList<Custom>(); LOGGER.debug("decoding parameter customList "); while (position < binary.length()) { // store if one parameter matched boolean atLeastOnce = false; // look ahead to see type // if first bit is one it is a TV Parameter if (binary.get(position)) { // do not take the first bit as it is always 1 type = new SignedShort(binary.subList(position + 1, 7)); } else { type = new SignedShort(binary.subList(position + RESERVEDLENGTH, TYPENUMBERLENGTH)); tempByteLength = new UnsignedShort(binary.subList(position + RESERVEDLENGTH + TYPENUMBERLENGTH, UnsignedShort.length())).toShort(); tempLength = 8 * tempByteLength; } // custom if ((type != null) && type.equals(Custom.TYPENUM)) { Custom cus = new Custom(binary.subList(position, tempLength)); //allowed custom parameters for this parameter //end allowed parameters //if none matched continue wasn't called and we add just cus as we found no specific vendor implementation customList.add(cus); position += tempLength; atLeastOnce = true; } if (!atLeastOnce) { //no parameter matched therefore we jump out of the loop break; } } //if list is still empty no parameter matched if (customList.isEmpty()) { LOGGER.info( "encoded message does not contain parameter for optional customList"); } } /** * {@inheritDoc} */ public void decodeXML(final Document document) throws InvalidLLRPMessageException { Element temp = null; Custom custom; // child element are always in default LLRP namespace Namespace ns = Namespace.getNamespace(LLRPConstants.LLRPNAMESPACE); try { isValidXMLMessage(document, LLRPConstants.LLRPMESSAGESCHEMAPATH); Element root = (Element) document.getRootElement().clone(); List<Element> tempList = null; // the version field is always 3 bit long // if the version attribute is not set in the LTK-XML message, // it is set to version 001 String versionAttribute = root.getAttributeValue("Version"); if (versionAttribute != null) { version = new BitList(3); version.setValue(new Integer(versionAttribute)); } else { version = new BitList(0, 0, 1); } messageID = new UnsignedInteger(root.getAttributeValue("MessageID")); temp = root.getChild("RequestedData", root.getNamespace()); //check if this element exist if (temp != null) { requestedData = new GetReaderCapabilitiesRequestedData(temp); } else { LOGGER.warn( "Element requestedData not provided in xml as child of " + root.getName()); throw new MissingParameterException( "Element requestedData not provided"); } root.removeChild("RequestedData", root.getNamespace()); //parameter - not choices - no special actions needed //we expect a list of parameters customList = new LinkedList<Custom>(); tempList = root.getChildren("Custom", ns); if ((tempList == null) || tempList.isEmpty()) { LOGGER.info( "GET_READER_CAPABILITIES misses optional parameter of type customList"); } else { for (Element e : tempList) { customList.add(new Custom(e)); LOGGER.debug("adding Custom to customList "); } } root.removeChildren("Custom", ns); //custom parameter //end custom if (root.getChildren().size() > 0) { String message = "GET_READER_CAPABILITIES has unknown element " + ((Element) root.getChildren().get(0)).getName(); throw new InvalidLLRPMessageException(message); } } catch (IllegalArgumentException e) { throw new InvalidLLRPMessageException(e.getMessage()); } catch (MissingParameterException e) { throw new InvalidLLRPMessageException(e.getMessage()); } } //setters /** * set requestedData of type GetReaderCapabilitiesRequestedData . * @param requestedData to be set */ public void setRequestedData( final GetReaderCapabilitiesRequestedData requestedData) { this.requestedData = requestedData; } /** * set customList of type List &lt;Custom>. * @param customList to be set */ public void setCustomList(final List<Custom> customList) { this.customList = customList; } // end setter //getters /** * get requestedData of type GetReaderCapabilitiesRequestedData. * @return GetReaderCapabilitiesRequestedData */ public GetReaderCapabilitiesRequestedData getRequestedData() { return requestedData; } /** * get customList of type List &lt;Custom> . * @return List &lt;Custom> */ public List<Custom> getCustomList() { return customList; } // end getters //add methods /** * add element custom of type Custom . * @param custom of type Custom */ public void addToCustomList(Custom custom) { if (this.customList == null) { this.customList = new LinkedList<Custom>(); } this.customList.add(custom); } // end add @Override public String getResponseType() { return RESPONSETYPE; } @Override public String getName() { return "GET_READER_CAPABILITIES"; } /** * return uniuque type number. * @return SignedShort holding unique type number */ public SignedShort getTypeNum() { return TYPENUM; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.unix; /** * <p>Equivalent to UNIX's "struct stat", a FileStatus instance contains * various bits of metadata about a directory entry. * * <p>The Java SDK provides access to some but not all of the information * available via the stat(2) and lstat(2) syscalls, but often requires that * multiple calls be made to obtain it. By reifying stat buffers as Java * objects and providing a wrapper around the stat/lstat calls, we give client * applications access to the richer file metadata and enable a reduction in * the number of system calls, which is critical for high-performance tools. * * <p>This class is optimized for memory usage. Operations that are not yet * required for any client are intentionally unimplemented to save space. * Currently, we only support these fields: st_mode, st_size, st_atime, * st_atimensec, st_mtime, st_mtimensec, st_ctime, st_ctimensec, st_dev, st_ino. * Methods that require other fields throw UnsupportedOperationException. */ public class FileStatus { private final int st_mode; private final int st_atime; // (unsigned) private final int st_atimensec; // (unsigned) private final int st_mtime; // (unsigned) private final int st_mtimensec; // (unsigned) private final int st_ctime; // (unsigned) private final int st_ctimensec; // (unsigned) private final long st_size; private final int st_dev; private final long st_ino; /** * Constructs a FileStatus instance. (Called only from JNI code.) */ protected FileStatus(int st_mode, int st_atime, int st_atimensec, int st_mtime, int st_mtimensec, int st_ctime, int st_ctimensec, long st_size, int st_dev, long st_ino) { this.st_mode = st_mode; this.st_atime = st_atime; this.st_atimensec = st_atimensec; this.st_mtime = st_mtime; this.st_mtimensec = st_mtimensec; this.st_ctime = st_ctime; this.st_ctimensec = st_ctimensec; this.st_size = st_size; this.st_dev = st_dev; this.st_ino = st_ino; } /** * Returns the device number of this inode. */ public int getDeviceNumber() { return st_dev; } /** * Returns the number of this inode. Inode numbers are (usually) unique for * a given device. */ public long getInodeNumber() { return st_ino; } /** * Returns true iff this file is a regular file. */ public boolean isRegularFile() { return (st_mode & S_IFMT) == S_IFREG; } /** * Returns true iff this file is a directory. */ public boolean isDirectory() { return (st_mode & S_IFMT) == S_IFDIR; } /** * Returns true iff this file is a symbolic link. */ public boolean isSymbolicLink() { return (st_mode & S_IFMT) == S_IFLNK; } /** * Returns true iff this file is a character device. */ public boolean isCharacterDevice() { return (st_mode & S_IFMT) == S_IFCHR; } /** * Returns true iff this file is a block device. */ public boolean isBlockDevice() { return (st_mode & S_IFMT) == S_IFBLK; } /** * Returns true iff this file is a FIFO. */ public boolean isFIFO() { return (st_mode & S_IFMT) == S_IFIFO; } /** * Returns true iff this file is a UNIX-domain socket. */ public boolean isSocket() { return (st_mode & S_IFMT) == S_IFSOCK; } /** * Returns true iff this file has its "set UID" bit set. */ public boolean isSetUserId() { return (st_mode & S_ISUID) != 0; } /** * Returns true iff this file has its "set GID" bit set. */ public boolean isSetGroupId() { return (st_mode & S_ISGID) != 0; } /** * Returns true iff this file has its "sticky" bit set. See UNIX manuals for * explanation. */ public boolean isSticky() { return (st_mode & S_ISVTX) != 0; } /** * Returns the user/group/other permissions part of the mode bits (i.e. * st_mode masked with 0777), interpreted according to longstanding UNIX * tradition. */ public int getPermissions() { return st_mode & S_IRWXA; } /** * Returns the total size, in bytes, of this file. */ public long getSize() { return st_size; } /** * Returns the last access time of this file (seconds since UNIX epoch). */ public long getLastAccessTime() { return unsignedIntToLong(st_atime); } /** * Returns the fractional part of the last access time of this file (nanoseconds). */ public long getFractionalLastAccessTime() { return unsignedIntToLong(st_atimensec); } /** * Returns the last modified time of this file (seconds since UNIX epoch). */ public long getLastModifiedTime() { return unsignedIntToLong(st_mtime); } /** * Returns the fractional part of the last modified time of this file (nanoseconds). */ public long getFractionalLastModifiedTime() { return unsignedIntToLong(st_mtimensec); } /** * Returns the last change time of this file (seconds since UNIX epoch). */ public long getLastChangeTime() { return unsignedIntToLong(st_ctime); } /** * Returns the fractional part of the last change time of this file (nanoseconds). */ public long getFractionalLastChangeTime() { return unsignedIntToLong(st_ctimensec); } //////////////////////////////////////////////////////////////////////// @Override public String toString() { return String.format("FileStatus(mode=0%06o,size=%d,mtime=%d)", st_mode, st_size, st_mtime); } @Override public int hashCode() { return st_mode; } //////////////////////////////////////////////////////////////////////// // Platform-specific details. These fields are public so that they can // be used from other packages. See POSIX and/or Linux manuals for details. // // These need to be kept in sync with the native code and system call // interface. (The unit tests ensure that.) Of course, this decoding could // be done in the JNI code to ensure maximum portability, but (a) we don't // expect we'll need that any time soon, and (b) that would require eager // rather than on-demand bitmunging of all attributes. In any case, it's not // part of the interface so it can be easily changed later if necessary. public static final int S_IFMT = 0170000; // mask: filetype bitfields public static final int S_IFSOCK = 0140000; // socket public static final int S_IFLNK = 0120000; // symbolic link public static final int S_IFREG = 0100000; // regular file public static final int S_IFBLK = 0060000; // block device public static final int S_IFDIR = 0040000; // directory public static final int S_IFCHR = 0020000; // character device public static final int S_IFIFO = 0010000; // fifo public static final int S_ISUID = 0004000; // set UID bit public static final int S_ISGID = 0002000; // set GID bit (see below) public static final int S_ISVTX = 0001000; // sticky bit (see below) public static final int S_IRWXA = 00777; // mask: all permissions public static final int S_IRWXU = 00700; // mask: file owner permissions public static final int S_IRUSR = 00400; // owner has read permission public static final int S_IWUSR = 00200; // owner has write permission public static final int S_IXUSR = 00100; // owner has execute permission public static final int S_IRWXG = 00070; // mask: group permissions public static final int S_IRGRP = 00040; // group has read permission public static final int S_IWGRP = 00020; // group has write permission public static final int S_IXGRP = 00010; // group has execute permission public static final int S_IRWXO = 00007; // mask: other permissions public static final int S_IROTH = 00004; // others have read permission public static final int S_IWOTH = 00002; // others have write permisson public static final int S_IXOTH = 00001; // others have execute permission public static final int S_IEXEC = 00111; // owner, group, world execute static long unsignedIntToLong(int i) { return (i & 0x7FFFFFFF) - (long) (i & 0x80000000); } public static boolean isFile(int rawType) { int type = rawType & S_IFMT; return type == S_IFREG || isSpecialFile(rawType); } public static boolean isSpecialFile(int rawType) { int type = rawType & S_IFMT; return type == S_IFSOCK || type == S_IFBLK || type == S_IFCHR || type == S_IFIFO; } public static boolean isDirectory(int rawType) { int type = rawType & S_IFMT; return type == S_IFDIR; } public static boolean isSymbolicLink(int rawType) { int type = rawType & S_IFMT; return type == S_IFLNK; } }
package com.intellij.tasks.redmine; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.ComboBox; import com.intellij.openapi.util.text.StringUtil; import com.intellij.tasks.config.BaseRepositoryEditor; import com.intellij.tasks.impl.TaskUiUtil; import com.intellij.tasks.redmine.model.RedmineProject; import com.intellij.ui.ListCellRendererWrapper; import com.intellij.ui.components.JBCheckBox; import com.intellij.ui.components.JBLabel; import com.intellij.util.Consumer; import com.intellij.util.containers.Stack; import com.intellij.util.ui.FormBuilder; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.ArrayList; import java.util.List; /** * @author Mikhail Golubev * @author Dennis.Ushakov */ public class RedmineRepositoryEditor extends BaseRepositoryEditor<RedmineRepository> { private ComboBox myProjectCombo; private JTextField myAPIKey; private JCheckBox myAllAssigneesCheckBox; private JBLabel myProjectLabel; private JBLabel myAPIKeyLabel; public RedmineRepositoryEditor(final Project project, final RedmineRepository repository, Consumer<? super RedmineRepository> changeListener) { super(project, repository, changeListener); myTestButton.setEnabled(myRepository.isConfigured()); myAPIKey.setText(repository.getAPIKey()); myAllAssigneesCheckBox.setSelected(!repository.isAssignedToMe()); installListener(myProjectCombo); installListener(myAPIKey); installListener(myAllAssigneesCheckBox); toggleCredentialsVisibility(); UIUtil.invokeLaterIfNeeded(() -> initialize()); } @Override protected void afterTestConnection(boolean connectionSuccessful) { if (connectionSuccessful) { new FetchProjectsTask().queue(); } else { myProjectCombo.removeAllItems(); } } private void initialize() { final RedmineProject currentProject = myRepository.getCurrentProject(); if (currentProject != null && myRepository.isConfigured()) { new FetchProjectsTask().queue(); } else { myProjectCombo.removeAllItems(); } } @Override public void apply() { super.apply(); RedmineProjectItem selected = (RedmineProjectItem)myProjectCombo.getSelectedItem(); myRepository.setCurrentProject(selected != null ? selected.myProject : null); myRepository.setAPIKey(myAPIKey.getText().trim()); myRepository.setAssignedToMe(!myAllAssigneesCheckBox.isSelected()); myTestButton.setEnabled(myRepository.isConfigured()); toggleCredentialsVisibility(); } private void toggleCredentialsVisibility() { myPasswordLabel.setVisible(myRepository.isUseHttpAuthentication()); myPasswordText.setVisible(myRepository.isUseHttpAuthentication()); myUsernameLabel.setVisible(myRepository.isUseHttpAuthentication()); myUserNameText.setVisible(myRepository.isUseHttpAuthentication()); myAPIKeyLabel.setVisible(!myRepository.isUseHttpAuthentication()); myAPIKey.setVisible(!myRepository.isUseHttpAuthentication()); } @Nullable @Override protected JComponent createCustomPanel() { myProjectLabel = new JBLabel("Project:", SwingConstants.RIGHT); myProjectCombo = new ComboBox(300); //myProjectCombo.setRenderer(new TaskUiUtil.SimpleComboBoxRenderer("Set URL and password/token first")); myProjectCombo.setRenderer(new ListCellRendererWrapper<RedmineProjectItem>() { @Override public void customize(JList list, RedmineProjectItem value, int index, boolean selected, boolean hasFocus) { if (value == null) { setText("Set URL and password/token first"); } else { if (myProjectCombo.isPopupVisible()) { //if (value.myLevel == 0 && value.myProject != RedmineRepository.UNSPECIFIED_PROJECT) { //setFont(UIUtil.getListFont().deriveFont(Font.BOLD)); //} setText(StringUtil.repeat(" ", value.myLevel) + value.myProject.getName()); } else { // Do not indent selected project setText(value.myProject.getName()); } } } }); myAPIKeyLabel = new JBLabel("API Token:", SwingConstants.RIGHT); myAPIKey = new JPasswordField(); myAllAssigneesCheckBox = new JBCheckBox("Include issues not assigned to me"); return FormBuilder.createFormBuilder() .addLabeledComponent(myAPIKeyLabel, myAPIKey) .addLabeledComponent(myProjectLabel, myProjectCombo) .addComponentToRightColumn(myAllAssigneesCheckBox) .getPanel(); } @Override public void setAnchor(@Nullable final JComponent anchor) { super.setAnchor(anchor); myProjectLabel.setAnchor(anchor); myAPIKeyLabel.setAnchor(anchor); } private static class RedmineProjectItem { public final RedmineProject myProject; public final int myLevel; RedmineProjectItem(@NotNull RedmineProject project, int level) { myProject = project; myLevel = level; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null) return false; if (o instanceof RedmineProject) { return myProject.equals(o); } else if (o instanceof RedmineProjectItem) { return myProject.equals(((RedmineProjectItem)o).myProject); } return false; } @Override public int hashCode() { return myProject.hashCode(); } } private class FetchProjectsTask extends TaskUiUtil.ComboBoxUpdater<RedmineProjectItem> { private FetchProjectsTask() { super(RedmineRepositoryEditor.this.myProject, "Downloading Redmine projects...", myProjectCombo); } @Override public RedmineProjectItem getExtraItem() { return new RedmineProjectItem(RedmineRepository.UNSPECIFIED_PROJECT, 0); } @Nullable @Override public RedmineProjectItem getSelectedItem() { RedmineProject currentProject = myRepository.getCurrentProject(); return currentProject != null ? new RedmineProjectItem(currentProject, -1) : null; } @NotNull @Override protected List<RedmineProjectItem> fetch(@NotNull ProgressIndicator indicator) throws Exception { // Seems that Redmine always return its project hierarchy in DFS order. // So it's easy to find level of each project using stack of parents. Stack<RedmineProject> parents = new Stack<>(); List<RedmineProjectItem> items = new ArrayList<>(); for (RedmineProject project : myRepository.fetchProjects()) { RedmineProject parentProject = project.getParent(); if (parentProject == null) { items.add(new RedmineProjectItem(project, 0)); parents.clear(); } else { while (!parents.isEmpty() && !parents.peek().equals(parentProject)) { parents.pop(); } items.add(new RedmineProjectItem(project, parents.size())); } parents.push(project); } return items; } } }
/* * Copyright 2011 Kazuhiro Sera * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package com.github.seratch.signedrequest4j; import org.apache.log4j.Logger; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * OAuth Signed Request Verifier * * @author <a href="mailto:seratch@gmail.com">Kazuhiro Sera</a> * @see <a href="http://oauth.net/core/1.0/">OAuth Core 1.0</a> */ public class SignedRequestVerifier { private static Logger log = Logger.getLogger(SignedRequestVerifier.class); public static Map<String, String> parseAuthorizationHeader(String authorizationHeader) { Map<String, String> urlDecodedElements = new HashMap<String, String>(); String[] keyAndValueArray = authorizationHeader.split(","); for (String keyAndValue : keyAndValueArray) { String[] arr = keyAndValue.split("="); String key = arr[0].trim().replaceAll("\"", ""); String value = arr[1].trim().replaceAll("\"", ""); // all the elements should be url-decoded. urlDecodedElements.put(key, OAuthEncoding.decode(value)); } return urlDecodedElements; } public static boolean verifyHMacGetRequest(String url, String queryString, String authorizationHeader, OAuthConsumer consumer) { return verify(url, queryString, authorizationHeader, consumer, HttpMethod.GET, SignatureMethod.HMAC_SHA1); } public static boolean verifyHMacPostRequest(String url, String queryString, String authorizationHeader, OAuthConsumer consumer) { return verify(url, queryString, authorizationHeader, consumer, HttpMethod.POST, SignatureMethod.HMAC_SHA1); } private static final Set<String> oAuthElementNames = new HashSet<String>(); static { oAuthElementNames.add("oauth_consumer_key"); oAuthElementNames.add("oauth_signature_method"); oAuthElementNames.add("oauth_signature"); oAuthElementNames.add("oauth_timestamp"); oAuthElementNames.add("oauth_nonce"); oAuthElementNames.add("oauth_token"); oAuthElementNames.add("oauth_version"); } @Deprecated public static boolean verifyLegacyUncorrected(String url, String authorizationHeader, OAuthConsumer consumer, HttpMethod httpMethod, SignatureMethod signatureMethod) { if (log.isDebugEnabled()) { log.debug("AuthorizationHeader: " + authorizationHeader); } if (authorizationHeader == null) { return false; } Map<String, String> urlDecodedElements = parseAuthorizationHeader(authorizationHeader); SignedRequest req = SignedRequestFactory.create(consumer, signatureMethod); Map<String, Object> additionalParams = new HashMap<String, Object>(); for (String name : urlDecodedElements.keySet()) { String _name = name.replaceFirst("OAuth\\s+", ""); if (!oAuthElementNames.contains(_name)) { // the element already should be url-encoded additionalParams.put(name, urlDecodedElements.get(name)); } } req.setAdditionalAuthorizationHeaderParams(additionalParams); String nonce = urlDecodedElements.get("oauth_nonce"); Long timestamp = Long.valueOf(urlDecodedElements.get("oauth_timestamp")); String signature = req.getSignature(url, httpMethod, nonce, timestamp); debugLogSignature(signature, signatureMethod, urlDecodedElements); return signature.equals(urlDecodedElements.get("oauth_signature")); } public static boolean verify(String url, String queryString, String authorizationHeader, OAuthConsumer consumer, HttpMethod httpMethod, SignatureMethod signatureMethod) { if (log.isDebugEnabled()) { log.debug("AuthorizationHeader: " + authorizationHeader); } if (authorizationHeader == null) { return false; } if (httpMethod.equals(HttpMethod.POST)) { throw new IllegalArgumentException("Please use verifyPOST instead for POST requests."); } Map<String, String> urlDecodedElements = parseAuthorizationHeader(authorizationHeader); SignedRequest req = SignedRequestFactory.create(consumer, signatureMethod); Map<String, Object> additionalParams = new HashMap<String, Object>(); for (String name : urlDecodedElements.keySet()) { String _name = name.replaceFirst("OAuth\\s+", ""); if (!oAuthElementNames.contains(_name)) { // the element already should be url-encoded additionalParams.put(name, urlDecodedElements.get(name)); } } req.setAdditionalAuthorizationHeaderParams(additionalParams); String nonce = urlDecodedElements.get("oauth_nonce"); Long timestamp = Long.valueOf(urlDecodedElements.get("oauth_timestamp")); if (queryString == null) { queryString = ""; } req.readQueryStringAndAddToSignatureBaseString(url + "?" + queryString); String signature = req.getSignature(url, httpMethod, nonce, timestamp); debugLogSignature(signature, signatureMethod, urlDecodedElements); return signature.equals(urlDecodedElements.get("oauth_signature")); } public static boolean verifyPOST(String url, String queryString, String authorizationHeader, OAuthConsumer consumer, SignatureMethod signatureMethod, Map<String, String> formParams) { if (log.isDebugEnabled()) { log.debug("AuthorizationHeader: " + authorizationHeader); } if (authorizationHeader == null) { return false; } Map<String, String> urlDecodedElements = parseAuthorizationHeader(authorizationHeader); if (formParams != null) { for (String key : formParams.keySet()) { String value = formParams.get(key); if (value != null) { urlDecodedElements.put(key, value); } } } SignedRequest req = SignedRequestFactory.create(consumer, signatureMethod); Map<String, Object> additionalParams = new HashMap<String, Object>(); for (String name : urlDecodedElements.keySet()) { String _name = name.replaceFirst("OAuth\\s+", ""); if (!oAuthElementNames.contains(_name)) { // the element already should be url-encoded additionalParams.put(name, urlDecodedElements.get(name)); } } req.setAdditionalAuthorizationHeaderParams(additionalParams); String nonce = urlDecodedElements.get("oauth_nonce"); Long timestamp = Long.valueOf(urlDecodedElements.get("oauth_timestamp")); if (queryString == null) { queryString = ""; } req.readQueryStringAndAddToSignatureBaseString(url + "?" + queryString); String signature = req.getSignature(url, HttpMethod.POST, nonce, timestamp); debugLogSignature(signature, signatureMethod, urlDecodedElements); return signature.equals(urlDecodedElements.get("oauth_signature")); } public static boolean verifyHMacGetRequest( String url, String queryString, String authorizationHeader, OAuthConsumer consumer, OAuthAccessToken accessToken) { return verify(url, queryString, authorizationHeader, consumer, accessToken, HttpMethod.GET, SignatureMethod.HMAC_SHA1); } public static boolean verifyHMacPostRequest( String url, String queryString, String authorizationHeader, OAuthConsumer consumer, OAuthAccessToken accessToken) { return verify(url, queryString, authorizationHeader, consumer, accessToken, HttpMethod.POST, SignatureMethod.HMAC_SHA1); } @Deprecated public static boolean verifyLegacyUncorrected( String url, String authorizationHeader, OAuthConsumer consumer, OAuthAccessToken accessToken, HttpMethod httpMethod, SignatureMethod signatureMethod) { if (log.isDebugEnabled()) { log.debug("AuthorizationHeader: " + authorizationHeader); } if (authorizationHeader == null) { return false; } Map<String, String> urlDecodedElements = parseAuthorizationHeader(authorizationHeader); SignedRequest req = SignedRequestFactory.create(consumer, accessToken, signatureMethod); Map<String, Object> additionalParams = new HashMap<String, Object>(); for (String name : urlDecodedElements.keySet()) { String _name = name.replaceFirst("OAuth\\s+", ""); if (!oAuthElementNames.contains(_name)) { // the element already should be url-encoded additionalParams.put(name, urlDecodedElements.get(name)); } } req.setAdditionalAuthorizationHeaderParams(additionalParams); String signature = req.getSignature(url, httpMethod, urlDecodedElements.get("oauth_nonce"), Long.valueOf(urlDecodedElements.get("oauth_timestamp"))); debugLogSignature(signature, signatureMethod, urlDecodedElements); return signature.equals(urlDecodedElements.get("oauth_signature")); } public static boolean verify( String url, String queryString, String authorizationHeader, OAuthConsumer consumer, OAuthAccessToken accessToken, HttpMethod httpMethod, SignatureMethod signatureMethod) { if (log.isDebugEnabled()) { log.debug("AuthorizationHeader: " + authorizationHeader); } if (authorizationHeader == null) { return false; } if (httpMethod.equals(HttpMethod.POST)) { throw new IllegalArgumentException("Please use verifyPOST instead for POST requests."); } Map<String, String> urlDecodedElements = parseAuthorizationHeader(authorizationHeader); SignedRequest req = SignedRequestFactory.create(consumer, accessToken, signatureMethod); Map<String, Object> additionalParams = new HashMap<String, Object>(); for (String name : urlDecodedElements.keySet()) { String _name = name.replaceFirst("OAuth\\s+", ""); if (!oAuthElementNames.contains(_name)) { // the element already should be url-encoded additionalParams.put(name, urlDecodedElements.get(name)); } } req.setAdditionalAuthorizationHeaderParams(additionalParams); if (queryString == null) { queryString = ""; } req.readQueryStringAndAddToSignatureBaseString(url + "?" + queryString); String signature = req.getSignature(url, httpMethod, urlDecodedElements.get("oauth_nonce"), Long.valueOf(urlDecodedElements.get("oauth_timestamp"))); debugLogSignature(signature, signatureMethod, urlDecodedElements); return signature.equals(urlDecodedElements.get("oauth_signature")); } public static boolean verifyPOST(String url, String queryString, String authorizationHeader, OAuthConsumer consumer, OAuthAccessToken accessToken, SignatureMethod signatureMethod, Map<String, String> formParams) { if (log.isDebugEnabled()) { log.debug("AuthorizationHeader: " + authorizationHeader); } if (authorizationHeader == null) { return false; } Map<String, String> urlDecodedElements = parseAuthorizationHeader(authorizationHeader); if (formParams != null) { for (String key : formParams.keySet()) { String value = formParams.get(key); if (value != null) { urlDecodedElements.put(key, value); } } } SignedRequest req = SignedRequestFactory.create(consumer, accessToken, signatureMethod); Map<String, Object> additionalParams = new HashMap<String, Object>(); for (String name : urlDecodedElements.keySet()) { String _name = name.replaceFirst("OAuth\\s+", ""); if (!oAuthElementNames.contains(_name)) { // the element already should be url-encoded additionalParams.put(name, urlDecodedElements.get(name)); } } req.setAdditionalAuthorizationHeaderParams(additionalParams); if (queryString == null) { queryString = ""; } req.readQueryStringAndAddToSignatureBaseString(url + "?" + queryString); String signature = req.getSignature(url, HttpMethod.POST, urlDecodedElements.get("oauth_nonce"), Long.valueOf(urlDecodedElements.get("oauth_timestamp"))); debugLogSignature(signature, signatureMethod, urlDecodedElements); return signature.equals(urlDecodedElements.get("oauth_signature")); } private static void debugLogSignature(String signature, SignatureMethod signatureMethod, Map<String, String> urlDecodedElements) { if (log.isDebugEnabled()) { log.debug("Signature by Verifier: " + signature + ", method: " + signatureMethod.toString()); log.debug("Signature in Authorization header: " + urlDecodedElements.get("oauth_signature")); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.server.protocol; import com.google.common.base.VerifyException; import com.google.common.collect.ImmutableList; import io.prestosql.Session; import io.prestosql.client.ClientTypeSignature; import io.prestosql.client.Column; import io.prestosql.spi.Page; import io.prestosql.spi.PrestoException; import io.prestosql.spi.type.BigintType; import io.prestosql.spi.type.BooleanType; import io.prestosql.spi.type.IntegerType; import io.prestosql.spi.type.TimestampType; import io.prestosql.spi.type.TimestampWithTimeZoneType; import io.prestosql.spi.type.Type; import io.prestosql.testing.TestingSession; import io.prestosql.tests.BogusType; import org.testng.annotations.Test; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.function.Consumer; import java.util.function.Function; import static io.prestosql.RowPagesBuilder.rowPagesBuilder; import static io.prestosql.client.ClientStandardTypes.BIGINT; import static io.prestosql.client.ClientStandardTypes.BOOLEAN; import static io.prestosql.client.ClientStandardTypes.INTEGER; import static io.prestosql.client.ClientStandardTypes.TIMESTAMP; import static io.prestosql.client.ClientStandardTypes.TIMESTAMP_WITH_TIME_ZONE; import static io.prestosql.server.protocol.QueryResultRows.queryResultRowsBuilder; import static org.assertj.core.api.Assertions.assertThat; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; import static org.testng.collections.Lists.newArrayList; public class TestQueryResultRows { private static final Function<String, Column> BOOLEAN_COLUMN = name -> new Column(name, BOOLEAN, new ClientTypeSignature(BOOLEAN)); private static final Function<String, Column> BIGINT_COLUMN = name -> new Column(name, BIGINT, new ClientTypeSignature(BIGINT)); private static final Function<String, Column> INT_COLUMN = name -> new Column(name, INTEGER, new ClientTypeSignature(INTEGER)); @Test public void shouldNotReturnValues() { QueryResultRows rows = QueryResultRows.empty(getSession()); assertThat((Iterable<? extends List<Object>>) rows).as("rows").isEmpty(); assertThat(getAllValues(rows)).hasSize(0); assertThat(rows.getColumns()).isEmpty(); assertThat(rows.iterator().hasNext()).isFalse(); } @Test public void shouldReturnSingleValue() { Column column = BOOLEAN_COLUMN.apply("_col0"); QueryResultRows rows = queryResultRowsBuilder(getSession()) .withSingleBooleanValue(column, true) .build(); assertThat((Iterable<? extends List<Object>>) rows).as("rows").isNotEmpty(); assertThat(getAllValues(rows)).hasSize(1).containsOnly(ImmutableList.of(true)); assertThat(rows.getColumns().orElseThrow()).containsOnly(column); assertThat(rows.iterator().hasNext()).isFalse(); } @Test public void shouldReturnUpdateCount() { Column column = BIGINT_COLUMN.apply("_col0"); long value = 10123; QueryResultRows rows = queryResultRowsBuilder(getSession()) .withColumnsAndTypes(ImmutableList.of(column), ImmutableList.of(BigintType.BIGINT)) .addPages(rowPagesBuilder(BigintType.BIGINT).row(value).build()) .build(); assertThat((Iterable<? extends List<Object>>) rows).as("rows").isNotEmpty(); assertThat(rows.getUpdateCount()).isPresent(); assertThat(rows.getUpdateCount().get()).isEqualTo(value); assertThat(getAllValues(rows)).containsExactly(ImmutableList.of(value)); assertThat(rows.getColumns().orElseThrow()).containsOnly(column); assertThat(rows.iterator()).isExhausted(); } @Test public void shouldNotHaveUpdateCount() { Column column = BOOLEAN_COLUMN.apply("_col0"); QueryResultRows rows = queryResultRowsBuilder(getSession()) .withSingleBooleanValue(column, false) .build(); assertThat((Iterable<? extends List<Object>>) rows).as("rows").isNotEmpty(); assertThat(rows.getUpdateCount()).isEmpty(); assertThat(rows.iterator()).hasNext(); } @Test public void shouldReadAllValuesFromMultiplePages() { List<Column> columns = ImmutableList.of(INT_COLUMN.apply("_col0"), BIGINT_COLUMN.apply("_col1")); List<Type> types = ImmutableList.of(IntegerType.INTEGER, BigintType.BIGINT); List<Page> pages = rowPagesBuilder(types) .row(0, 10L) .row(1, 11L) .row(2, 12L) .row(3, 13L) .row(4, 14L) .pageBreak() .row(100, 110L) .row(101, 111L) .row(102, 112L) .row(103, 113L) .row(104, 114L) .build(); TestExceptionConsumer exceptionConsumer = new TestExceptionConsumer(); QueryResultRows rows = queryResultRowsBuilder(getSession()) .withColumnsAndTypes(columns, types) .addPages(pages) .withExceptionConsumer(exceptionConsumer) .build(); assertThat((Iterable<? extends List<Object>>) rows).as("rows").isNotEmpty(); assertThat(rows.getTotalRowsCount()).isEqualTo(10); assertThat(rows.getColumns()).isEqualTo(Optional.of(columns)); assertThat(rows.getUpdateCount()).isEmpty(); assertThat(getAllValues(rows)).containsExactly( ImmutableList.of(0, 10L), ImmutableList.of(1, 11L), ImmutableList.of(2, 12L), ImmutableList.of(3, 13L), ImmutableList.of(4, 14L), ImmutableList.of(100, 110L), ImmutableList.of(101, 111L), ImmutableList.of(102, 112L), ImmutableList.of(103, 113L), ImmutableList.of(104, 114L)); assertThat(exceptionConsumer.getExceptions()).isEmpty(); } @Test public void shouldOmitBadRows() { List<Column> columns = ImmutableList.of(BOOLEAN_COLUMN.apply("_col0"), BOOLEAN_COLUMN.apply("_col1")); List<Type> types = ImmutableList.of(BogusType.BOGUS, BogusType.BOGUS); List<Page> pages = rowPagesBuilder(types) .row(0, 1) .row(0, 0) .row(0, 1) .row(1, 0) .row(0, 1) .build(); TestExceptionConsumer exceptionConsumer = new TestExceptionConsumer(); QueryResultRows rows = queryResultRowsBuilder(getSession()) .withColumnsAndTypes(columns, types) .withExceptionConsumer(exceptionConsumer) .addPages(pages) .build(); assertFalse(rows.isEmpty(), "rows are empty"); assertThat(rows.getTotalRowsCount()).isEqualTo(5); assertThat(rows.getColumns()).isEqualTo(Optional.of(columns)); assertTrue(rows.getUpdateCount().isEmpty()); assertThat(getAllValues(rows)) .containsExactly(ImmutableList.of(0, 0)); List<Throwable> exceptions = exceptionConsumer.getExceptions(); assertThat(exceptions) .isNotEmpty(); assertThat(exceptions) .hasSize(4); assertThat(exceptions.get(0)) .isInstanceOf(PrestoException.class) .hasMessage("Could not serialize column '_col1' of type 'Bogus' at position 1:2") .hasRootCauseMessage("This is bogus exception"); assertThat(exceptions.get(1)) .isInstanceOf(PrestoException.class) .hasMessage("Could not serialize column '_col1' of type 'Bogus' at position 3:2") .hasRootCauseMessage("This is bogus exception"); assertThat(exceptions.get(2)) .isInstanceOf(PrestoException.class) .hasMessage("Could not serialize column '_col0' of type 'Bogus' at position 4:1") .hasRootCauseMessage("This is bogus exception"); assertThat(exceptions.get(3)) .isInstanceOf(PrestoException.class) .hasMessage("Could not serialize column '_col1' of type 'Bogus' at position 5:2") .hasRootCauseMessage("This is bogus exception"); } @Test public void shouldHandleNullValues() { List<Column> columns = ImmutableList.of(new Column("_col0", INTEGER, new ClientTypeSignature(INTEGER)), new Column("_col1", BOOLEAN, new ClientTypeSignature(BOOLEAN))); List<Type> types = ImmutableList.of(IntegerType.INTEGER, BooleanType.BOOLEAN); List<Page> pages = rowPagesBuilder(types) .row(0, null) .pageBreak() .row(1, null) .pageBreak() .row(2, true) .build(); TestExceptionConsumer exceptionConsumer = new TestExceptionConsumer(); QueryResultRows rows = queryResultRowsBuilder(getSession()) .withColumnsAndTypes(columns, types) .withExceptionConsumer(exceptionConsumer) .addPages(pages) .build(); assertFalse(rows.isEmpty(), "rows are empty"); assertThat(rows.getTotalRowsCount()).isEqualTo(3); assertThat(getAllValues(rows)) .hasSize(3) .containsExactly(newArrayList(0, null), newArrayList(1, null), newArrayList(2, true)); } @Test public void shouldHandleNullTimestamps() { List<Column> columns = ImmutableList.of( new Column("_col0", TIMESTAMP, new ClientTypeSignature(TIMESTAMP)), new Column("_col1", TIMESTAMP_WITH_TIME_ZONE, new ClientTypeSignature(TIMESTAMP_WITH_TIME_ZONE))); List<Type> types = ImmutableList.of(TimestampType.TIMESTAMP, TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE); List<Page> pages = rowPagesBuilder(types) .row(null, null) .build(); TestExceptionConsumer exceptionConsumer = new TestExceptionConsumer(); QueryResultRows rows = queryResultRowsBuilder(getSession()) .withColumnsAndTypes(columns, types) .withExceptionConsumer(exceptionConsumer) .addPages(pages) .build(); assertThat(exceptionConsumer.getExceptions()).isEmpty(); assertFalse(rows.isEmpty(), "rows are empty"); assertThat(rows.getTotalRowsCount()).isEqualTo(1); assertThat(getAllValues(rows)) .hasSize(1) .containsExactly(newArrayList(null, null)); } @Test public void shouldNotThrowWhenDataAndColumnsAreMissing() { QueryResultRows.empty(getSession()); } @Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "columns and types size mismatch") public void shouldThrowWhenColumnsAndTypesSizeMismatch() { List<Column> columns = ImmutableList.of(INT_COLUMN.apply("_col0")); List<Type> types = ImmutableList.of(IntegerType.INTEGER, BooleanType.BOOLEAN); List<Page> pages = rowPagesBuilder(types) .row(0, null) .build(); queryResultRowsBuilder(getSession()) .addPages(pages) .withColumnsAndTypes(columns, types) .build(); } @Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "columns and types must be present at the same time") public void shouldThrowWhenColumnsAreNull() { List<Type> types = ImmutableList.of(IntegerType.INTEGER, BooleanType.BOOLEAN); List<Page> pages = rowPagesBuilder(types) .row(0, null) .build(); queryResultRowsBuilder(getSession()) .addPages(pages) .withColumnsAndTypes(null, types) .build(); } @Test public void shouldAcceptNullColumnsAndTypes() { queryResultRowsBuilder(getSession()) .withColumnsAndTypes(null, null) .build(); } @Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "columns and types must be present at the same time") public void shouldThrowWhenTypesAreNull() { List<Column> columns = ImmutableList.of(INT_COLUMN.apply("_col0")); List<Type> types = ImmutableList.of(IntegerType.INTEGER, BooleanType.BOOLEAN); List<Page> pages = rowPagesBuilder(types) .row(0, null) .build(); queryResultRowsBuilder(getSession()) .addPages(pages) .withColumnsAndTypes(columns, null) .build(); } @Test(expectedExceptions = VerifyException.class, expectedExceptionsMessageRegExp = "data present without columns and types") public void shouldThrowWhenDataIsPresentWithoutColumns() { List<Page> pages = rowPagesBuilder(ImmutableList.of(IntegerType.INTEGER, BooleanType.BOOLEAN)) .row(0, null) .build(); queryResultRowsBuilder(getSession()) .addPages(pages) .build(); } private static List<List<Object>> getAllValues(QueryResultRows rows) { ImmutableList.Builder<List<Object>> builder = ImmutableList.builder(); for (List<Object> values : rows) { builder.add(values); } return builder.build(); } private static Session getSession() { return TestingSession.testSessionBuilder() .build(); } private static final class TestExceptionConsumer implements Consumer<Throwable> { private List<Throwable> exceptions = new ArrayList<>(); @Override public void accept(Throwable throwable) { exceptions.add(throwable); } public List<Throwable> getExceptions() { return exceptions; } } }
package com.studiogobo.fi.spatialmatchmaking.servlet; import com.studiogobo.fi.spatialmatchmaking.model.*; import java.util.ArrayList; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; public class Matchmaker { private void Log(String message) { System.out.println(" MM: " + message); } public void UpdateClient(int id) { Log("UpdateClient(" + id + ")"); ServletClientRecord primaryClientRecord = clientData.get(id); if (primaryClientRecord == null) { Log("client id " + id + " not found"); return; } if (primaryClientRecord.match_id != 0) { Log("client id " + id + " already matched"); return; } if (!primaryClientRecord.active) { Log("client id " + id + " is not active. Return"); return; } final int wantedClients = 2; // Start with a list containing only the client we're trying to match ArrayList<ServletClientRecord> foundClients = new ArrayList<ServletClientRecord>(); foundClients.add(primaryClientRecord); // Find compatible clients and add them to the list for (ServletClientRecord record : clientData.values()) { //delete for now // Expire clients who have been idle for a long time if (record.AgeMillis() > 60000) { Log("client " + record.clientRecord.id +" has been idle for a long time. Delete"); DeleteClient(record.clientRecord.id, true); continue; } // // Ignore inactive clients // if (!record.active) // continue; // Ignore the client we're trying to match if (record.clientRecord.id == id) continue; // Ignore incompatible clients if (!primaryClientRecord.RequirementsPass(record) || !record.RequirementsPass(primaryClientRecord)) { continue; } //get the match ID of record. If primaryClientRecord doesn't pass requirements of all clients in matchID, don't add to foundClients //ignore clients who already belong to a match session, and <id> does not pass the requirement for all those in the session if (record.match_id != 0) //ie, client we might match with, already belongs to a match session { boolean addRecord = true; MatchRecord match = matchData.get(record.match_id); for (int clientId : match.clients) { ServletClientRecord recordMatches = clientData.get(clientId); if (!primaryClientRecord.RequirementsPass(recordMatches) || !recordMatches.RequirementsPass(primaryClientRecord)) { Log(" "+id+" does not pass the requirements of " +clientId + "(or vice versa)"); addRecord = false; } } if (!addRecord) continue; } foundClients.add(record); // Stop looking if we've found enough clients now. // // If finding compatible clients is a slow process, some may have quit by the time we finish. In that // case, we should verify that the foundClients are still valid, in the loop, before breaking out. So we // should be able to be quite confident that the client list is fairly reliable when we leave the loop. if (foundClients.size() == wantedClients) break; } if (foundClients.size() == wantedClients) { ServletClientRecord host = foundClients.get(foundClients.size() - 1); if (host.match_id == 0) //we are creating a new match record { // Make a list of client IDs int[] clientIdList = new int[foundClients.size()]; for (int i = 0; i < foundClients.size(); ++i) clientIdList[i] = foundClients.get(i).clientRecord.id; // Create a MatchRecord MatchRecord match = new MatchRecord(lastMatchId.incrementAndGet(), clientIdList); matchData.put(match.id, match); Log(" new match id " + match.id); // Mark these clients as part of the session. for (ServletClientRecord record : foundClients) { Log(" client " + record.clientRecord.id); record.match_id = match.id; // Signal anybody watching the record to say that it has changed record.waitUntilMatched.countDown(); } } else //we are appending to a previously existing match record { Log(" reusing old match id " + host.match_id); MatchRecord existingMatch = matchData.get(host.match_id ); //matchData.remove(host.match_id); int[] updatedClients = new int[existingMatch.clients.length + 1]; //the new array of clients for (int i = 0; i < updatedClients.length - 1; i++) updatedClients[i] = existingMatch.clients[i]; //assign the values for the clients to the new array updatedClients[updatedClients.length - 1] = id; //add the current client to end of the list of clients existingMatch.clients = updatedClients; existingMatch.clients = updatedClients; matchData.replace(host.match_id, existingMatch); //mark the <id> client as part of host.match_id session primaryClientRecord.match_id = host.match_id; for (int clientId : updatedClients) { Log(" client " + clientId); ServletClientRecord record = clientData.get(clientId); record.waitUntilMatched.countDown(); // Signal anybody watching the record to say that it has changed } } } } //param: match id public void VerifyMatch(int id) { MatchRecord match = matchData.get(id); if (match == null) return; // Check all the clients are still compatible with the match, and cancel the match if any are unhappy for (int clientId : match.clients) { ServletClientRecord client = clientData.get(clientId); if (client == null) { if (match.clients.length == 2) RemoveMatch(id); else RemoveClientFromMatch(id,clientId); return; } for (int otherClientId : match.clients) { ServletClientRecord otherClient = clientData.get(otherClientId); if (!client.RequirementsPass(otherClient)) { if (match.clients.length == 2) RemoveMatch(id); else if (clientId == match.clients[0]) //don't delete the host, delete the other client instead RemoveClientFromMatch(id, otherClientId); else RemoveClientFromMatch(id, clientId); } } } } //This is different from RemoveMatch() in that we don't delete the match record itself, just a client from the record public void RemoveClientFromMatch(int matchId, int clientId) { MatchRecord match = matchData.get(matchId); Log(" Remove client ("+ clientId + ") from match " + matchId); int[] newClients = new int[match.clients.length-1]; int index = 0; for (int i = 0; i < match.clients.length; i++) { if (match.clients[i] != clientId) { newClients[index] = match.clients[i]; //keep all but the client we want to remove index++; } } match.clients = newClients; matchData.replace(matchId, match); //replace the old matchRecord, with the new one (which has one less client) // Remove the match reference from this client record ServletClientRecord client = clientData.get(clientId); if (client.match_id == matchId) { client.ClearMatch(); if (client.deleted) clientData.remove(clientId); } UpdateClient(clientId); // Search again for match for this client, as it may be able to connect to different host/client } //remove the match record from the hash map public void RemoveMatch(int id) { Log(" Remove match " + id); MatchRecord match = matchData.remove(id); if (match == null) return; // Remove the match reference from the client records for (int clientId : match.clients) { ServletClientRecord client = clientData.get(clientId); if (client == null) continue; if (client.match_id == id) { client.ClearMatch(); if (client.deleted) //I think with the new way that clients are not deleted upon connecting, client.deleted is always false clientData.remove(clientId); } } // Search again for matches for these clients for (int clientId : match.clients) { UpdateClient(clientId); } } //removeFromMatch is set only to "true" after clicking the "quit" button in the Unity example. //if a client sets removeFromMatch=true, this will keep the match record in the map, deleting only the clients id from the clients array //deleting the host will delete the match record, regardless of removeFromMatch public void DeleteClient(int id, boolean removeFromMatch) { //clientData will not contain <id>, for example, when the host leaves the session, deleting the match record and clients in the process //a client of this previous session then quits the service, but as far as the code (and clientData) is concerned, it has already been deleted if (clientData.containsKey(id)) { final ServletClientRecord client = clientData.get(id); client.deleted = true; if (client.match_id == 0) { clientData.remove(id); } else { MatchRecord match = GetMatchRecord(client.match_id); if (match == null) { clientData.remove(id); return; } boolean okToDelete = true; for (int clientId : match.clients) { ServletClientRecord otherClient = clientData.get(clientId); if (otherClient != null) { if (!otherClient.deleted) okToDelete = false; } } if (okToDelete || match.clients[0] == id) { for (int clientId : match.clients) { clientData.remove(clientId); } RemoveMatch(client.match_id); } else if (removeFromMatch && matchData.get(client.match_id).clients.length > 1) RemoveClientFromMatch(client.match_id, id); } } } public Matchmaker(ConcurrentHashMap<Integer, ServletClientRecord> data) { clientData = data; } public MatchRecord GetMatchRecord(int id) { return matchData.get(id); } public int NumMatches() { return matchData.size(); } private ConcurrentHashMap<Integer, ServletClientRecord> clientData; private ConcurrentHashMap<Integer, MatchRecord> matchData = new ConcurrentHashMap<Integer, MatchRecord>(); private AtomicInteger lastMatchId = new AtomicInteger(); }
package seedu.todolist.model; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import javafx.collections.ObservableList; import seedu.todolist.commons.core.UnmodifiableObservableList; import seedu.todolist.model.tag.Tag; import seedu.todolist.model.tag.UniqueTagList; import seedu.todolist.model.todo.ReadOnlyTodo; import seedu.todolist.model.todo.Todo; import seedu.todolist.model.todo.UniqueTodoList; import seedu.todolist.model.todo.UniqueTodoList.DuplicateTodoException; import seedu.todolist.model.todo.UniqueTodoList.TodoNotFoundException; /** * Wraps all data at the todolist level * Duplicates are not allowed (by .equals comparison) */ public class TodoList implements ReadOnlyTodoList { private final UniqueTodoList todos; private final UniqueTagList tags; /* * The 'unusual' code block below is an non-static initialization block, sometimes used to avoid duplication * between constructors. See https://docs.oracle.com/javase/tutorial/java/javaOO/initial.html * * Note that non-static init blocks are not recommended to use. There are other ways to avoid duplication * among constructors. */ { todos = new UniqueTodoList(); tags = new UniqueTagList(); } public TodoList() {} /** * Creates an TodoList using the todos and Tags in the {@code toBeCopied} */ public TodoList(ReadOnlyTodoList toBeCopied) { this(); resetData(toBeCopied); } public Todo getTodo(int index) throws TodoNotFoundException { return this.todos.getTodo(index); } //// list overwrite operations public void settodos(List<? extends ReadOnlyTodo> todos) throws UniqueTodoList.DuplicateTodoException { this.todos.setTodos(todos); } public void setTags(Collection<Tag> tags) throws UniqueTagList.DuplicateTagException { this.tags.setTags(tags); } public void resetData(ReadOnlyTodoList newData) { assert newData != null; try { settodos(newData.getTodoList()); } catch (UniqueTodoList.DuplicateTodoException e) { assert false : "TodoLists should not have duplicate todos"; } try { setTags(newData.getTagList()); } catch (UniqueTagList.DuplicateTagException e) { assert false : "TodoLists should not have duplicate tags"; } syncMasterTagListWith(todos); } //// todo-level operations /** * Adds a todo to the todo list. * Also checks the new todo's tags and updates {@link #tags} with any new tags found, * and updates the Tag objects in the todo to point to those in {@link #tags}. * * @throws UniqueTodoList.DuplicateTodoException if an equivalent todo already exists. */ public void addTodo(Todo p) throws UniqueTodoList.DuplicateTodoException { syncMasterTagListWith(p); todos.add(p); } //@@author A0165043M /** * Updates the todo in the list at position {@code index} with {@code editedReadOnlyTodo}. * {@code TodoList}'s tag list will be updated with the tags of {@code editedReadOnlyTodo}. * @see #syncMasterTagListWith(Todo) * * @throws DuplicateTodoException if updating the todo's details causes the todo to be equivalent to * another existing todo in the list. * @throws IndexOutOfBoundsException if {@code index} < 0 or >= the size of the list. */ public void updateTodo(int index, ReadOnlyTodo editedReadOnlyTodo) throws UniqueTodoList.DuplicateTodoException { assert editedReadOnlyTodo != null; Todo editedTodo = createEditedTodoWithTime(editedReadOnlyTodo, todos.asObservableList().get(index)); syncMasterTagListWith(editedTodo); // Todo: the tags master list will be updated even though the below line fails. // This can cause the tags master list to have additional tags that are not tagged to any todo // in the todo list. todos.updateTodo(index, editedTodo); } //@@author //@@author A0163786N /** * Completes the todo in the list at position {@code index} with {@code completeTime}. */ public void completeTodo(int index, Date completeTime) { todos.completeTodo(index, completeTime); } //@@author //@@author A0163786N /** * Uncompletes the todo in the list at position {@code index}. */ public void uncompleteTodo(int index) { todos.uncompleteTodo(index); } //@@author //@@author A0165043M /** * return a Todo with proper time. * if the original todo already has time and the edited todo * doesn't include time, then edited todo won't replace new todo new * empty time. * @param editedReadOnlyTodo * @param originalTodo * @return Todo */ private Todo createEditedTodoWithTime(ReadOnlyTodo editedReadOnlyTodo, Todo originalTodo) { Todo editedTodo = null; if (editedReadOnlyTodo.getStartTime() != null && editedReadOnlyTodo.getEndTime() != null) { editedTodo = new Todo(editedReadOnlyTodo.getName(), editedReadOnlyTodo.getStartTime(), editedReadOnlyTodo.getEndTime(), editedReadOnlyTodo.getTags()); } else if (editedReadOnlyTodo.getStartTime() == null && editedReadOnlyTodo.getEndTime() != null) { editedTodo = new Todo(editedReadOnlyTodo.getName(), editedReadOnlyTodo.getEndTime(), editedReadOnlyTodo.getTags()); } else if (originalTodo.getStartTime() != null && originalTodo.getEndTime() != null) { editedTodo = new Todo(editedReadOnlyTodo.getName(), originalTodo.getStartTime(), originalTodo.getEndTime(), editedReadOnlyTodo.getTags()); } else if (originalTodo.getStartTime() == null && originalTodo.getEndTime() != null) { editedTodo = new Todo(editedReadOnlyTodo.getName(), originalTodo.getEndTime(), editedReadOnlyTodo.getTags()); } else { editedTodo = new Todo(editedReadOnlyTodo); } return editedTodo; } //@@author /** * Ensures that every tag in this todo: * - exists in the master list {@link #tags} * - points to a Tag object in the master list */ private void syncMasterTagListWith(Todo todo) { final UniqueTagList todoTags = todo.getTags(); tags.mergeFrom(todoTags); // Create map with values = tag object references in the master list // used for checking todo tag references final Map<Tag, Tag> masterTagObjects = new HashMap<>(); tags.forEach(tag -> masterTagObjects.put(tag, tag)); // Rebuild the list of todo tags to point to the relevant tags in the master tag list. final Set<Tag> correctTagReferences = new HashSet<>(); todoTags.forEach(tag -> correctTagReferences.add(masterTagObjects.get(tag))); todo.setTags(new UniqueTagList(correctTagReferences)); } /** * Ensures that every tag in these todos: * - exists in the master list {@link #tags} * - points to a Tag object in the master list * @see #syncMasterTagListWith(Todo) */ private void syncMasterTagListWith(UniqueTodoList todos) { todos.forEach(this::syncMasterTagListWith); } public boolean removeTodo(ReadOnlyTodo key) throws UniqueTodoList.TodoNotFoundException { if (todos.remove(key)) { return true; } else { throw new UniqueTodoList.TodoNotFoundException(); } } //// tag-level operations public void addTag(Tag t) throws UniqueTagList.DuplicateTagException { tags.add(t); } //// util methods @Override public String toString() { return todos.asObservableList().size() + " todos, " + tags.asObservableList().size() + " tags"; // Todo: refine later } @Override public ObservableList<ReadOnlyTodo> getTodoList() { return new UnmodifiableObservableList<>(todos.asObservableList()); } @Override public ObservableList<Tag> getTagList() { return new UnmodifiableObservableList<>(tags.asObservableList()); } @Override public boolean equals(Object other) { return other == this // short circuit if same object || (other instanceof TodoList // instanceof handles nulls && this.todos.equals(((TodoList) other).todos) && this.tags.equalsOrderInsensitive(((TodoList) other).tags)); } @Override public int hashCode() { // use this method for custom fields hashing instead of implementing your own return Objects.hash(todos, tags); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pinot.controller; import com.google.common.base.Preconditions; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Random; import org.apache.commons.configuration.Configuration; import org.apache.helix.controller.rebalancer.strategy.AutoRebalanceStrategy; import org.apache.pinot.common.protocols.SegmentCompletionProtocol; import org.apache.pinot.common.utils.CommonConstants; import org.apache.pinot.spi.env.PinotConfiguration; import org.apache.pinot.spi.filesystem.LocalPinotFS; import org.apache.pinot.spi.utils.TimeUtils; import static org.apache.pinot.common.utils.CommonConstants.Controller.CONFIG_OF_CONTROLLER_METRICS_PREFIX; import static org.apache.pinot.common.utils.CommonConstants.Controller.DEFAULT_METRICS_PREFIX; public class ControllerConf extends PinotConfiguration { public static final List<String> SUPPORTED_PROTOCOLS = Arrays.asList( CommonConstants.HTTP_PROTOCOL, CommonConstants.HTTPS_PROTOCOL); public static final String CONTROLLER_VIP_HOST = "controller.vip.host"; public static final String CONTROLLER_VIP_PORT = "controller.vip.port"; public static final String CONTROLLER_VIP_PROTOCOL = "controller.vip.protocol"; public static final String CONTROLLER_BROKER_PROTOCOL = "controller.broker.protocol"; public static final String CONTROLLER_BROKER_PORT_OVERRIDE = "controller.broker.port.override"; public static final String CONTROLLER_BROKER_TLS_PREFIX = "controller.broker.tls"; public static final String CONTROLLER_TLS_PREFIX = "controller.tls"; public static final String CONTROLLER_HOST = "controller.host"; public static final String CONTROLLER_PORT = "controller.port"; public static final String CONTROLLER_ACCESS_PROTOCOLS = "controller.access.protocols"; public static final String DATA_DIR = "controller.data.dir"; // Potentially same as data dir if local public static final String LOCAL_TEMP_DIR = "controller.local.temp.dir"; public static final String ZK_STR = "controller.zk.str"; // boolean: Update the statemodel on boot? public static final String UPDATE_SEGMENT_STATE_MODEL = "controller.update_segment_state_model"; public static final String HELIX_CLUSTER_NAME = "controller.helix.cluster.name"; public static final String CLUSTER_TENANT_ISOLATION_ENABLE = "cluster.tenant.isolation.enable"; public static final String CONSOLE_WEBAPP_ROOT_PATH = "controller.query.console"; public static final String EXTERNAL_VIEW_ONLINE_TO_OFFLINE_TIMEOUT = "controller.upload.onlineToOfflineTimeout"; public static final String CONTROLLER_MODE = "controller.mode"; public static final String LEAD_CONTROLLER_RESOURCE_REBALANCE_STRATEGY = "controller.resource.rebalance.strategy"; public enum ControllerMode { DUAL, PINOT_ONLY, HELIX_ONLY } public static class ControllerPeriodicTasksConf { // frequency configs public static final String RETENTION_MANAGER_FREQUENCY_IN_SECONDS = "controller.retention.frequencyInSeconds"; @Deprecated // The ValidationManager has been split up into 3 separate tasks, each having their own frequency config settings public static final String DEPRECATED_VALIDATION_MANAGER_FREQUENCY_IN_SECONDS = "controller.validation.frequencyInSeconds"; public static final String OFFLINE_SEGMENT_INTERVAL_CHECKER_FREQUENCY_IN_SECONDS = "controller.offline.segment.interval.checker.frequencyInSeconds"; public static final String REALTIME_SEGMENT_VALIDATION_FREQUENCY_IN_SECONDS = "controller.realtime.segment.validation.frequencyInSeconds"; public static final String BROKER_RESOURCE_VALIDATION_FREQUENCY_IN_SECONDS = "controller.broker.resource.validation.frequencyInSeconds"; public static final String BROKER_RESOURCE_VALIDATION_INITIAL_DELAY_IN_SECONDS = "controller.broker.resource.validation.initialDelayInSeconds"; public static final String STATUS_CHECKER_FREQUENCY_IN_SECONDS = "controller.statuschecker.frequencyInSeconds"; public static final String STATUS_CHECKER_WAIT_FOR_PUSH_TIME_IN_SECONDS = "controller.statuschecker.waitForPushTimeInSeconds"; public static final String TASK_MANAGER_FREQUENCY_IN_SECONDS = "controller.task.frequencyInSeconds"; public static final String MINION_INSTANCES_CLEANUP_TASK_FREQUENCY_IN_SECONDS = "controller.minion.instances.cleanup.task.frequencyInSeconds"; public static final String MINION_INSTANCES_CLEANUP_TASK_INITIAL_DELAY_SECONDS = "controller.minion.instances.cleanup.task.initialDelaySeconds"; public static final String PINOT_TASK_MANAGER_SCHEDULER_ENABLED = "controller.task.scheduler.enabled"; @Deprecated // RealtimeSegmentRelocator has been rebranded as SegmentRelocator public static final String DEPRECATED_REALTIME_SEGMENT_RELOCATOR_FREQUENCY = "controller.realtime.segment.relocator.frequency"; public static final String SEGMENT_RELOCATOR_FREQUENCY_IN_SECONDS = "controller.segment.relocator.frequencyInSeconds"; // Because segment level validation is expensive and requires heavy ZK access, we run segment level validation with a // separate interval public static final String SEGMENT_LEVEL_VALIDATION_INTERVAL_IN_SECONDS = "controller.segment.level.validation.intervalInSeconds"; // Initial delays public static final String STATUS_CHECKER_INITIAL_DELAY_IN_SECONDS = "controller.statusChecker.initialDelayInSeconds"; public static final String RETENTION_MANAGER_INITIAL_DELAY_IN_SECONDS = "controller.retentionManager.initialDelayInSeconds"; public static final String OFFLINE_SEGMENT_INTERVAL_CHECKER_INITIAL_DELAY_IN_SECONDS = "controller.offlineSegmentIntervalChecker.initialDelayInSeconds"; @Deprecated // RealtimeSegmentRelocator has been rebranded as SegmentRelocator public static final String DEPRECATED_REALTIME_SEGMENT_RELOCATION_INITIAL_DELAY_IN_SECONDS = "controller.realtimeSegmentRelocation.initialDelayInSeconds"; public static final String SEGMENT_RELOCATOR_INITIAL_DELAY_IN_SECONDS = "controller.segmentRelocator.initialDelayInSeconds"; public static final int MIN_INITIAL_DELAY_IN_SECONDS = 120; public static final int MAX_INITIAL_DELAY_IN_SECONDS = 300; private static final Random RANDOM = new Random(); private static long getRandomInitialDelayInSeconds() { return MIN_INITIAL_DELAY_IN_SECONDS + RANDOM.nextInt(MAX_INITIAL_DELAY_IN_SECONDS - MIN_INITIAL_DELAY_IN_SECONDS); } // Default values private static final int DEFAULT_RETENTION_CONTROLLER_FREQUENCY_IN_SECONDS = 6 * 60 * 60; // 6 Hours. private static final int DEFAULT_OFFLINE_SEGMENT_INTERVAL_CHECKER_FREQUENCY_IN_SECONDS = 24 * 60 * 60; // 24 Hours. private static final int DEFAULT_REALTIME_SEGMENT_VALIDATION_FREQUENCY_IN_SECONDS = 60 * 60; // 1 Hour. private static final int DEFAULT_BROKER_RESOURCE_VALIDATION_FREQUENCY_IN_SECONDS = 60 * 60; // 1 Hour. private static final int DEFAULT_STATUS_CONTROLLER_FREQUENCY_IN_SECONDS = 5 * 60; // 5 minutes private static final int DEFAULT_STATUS_CONTROLLER_WAIT_FOR_PUSH_TIME_IN_SECONDS = 10 * 60; // 10 minutes private static final int DEFAULT_TASK_MANAGER_FREQUENCY_IN_SECONDS = -1; // Disabled private static final int DEFAULT_MINION_INSTANCES_CLEANUP_TASK_FREQUENCY_IN_SECONDS = 60 * 60; // 1 Hour. private static final int DEFAULT_SEGMENT_LEVEL_VALIDATION_INTERVAL_IN_SECONDS = 24 * 60 * 60; private static final int DEFAULT_SEGMENT_RELOCATOR_FREQUENCY_IN_SECONDS = 60 * 60; } private static final String SERVER_ADMIN_REQUEST_TIMEOUT_SECONDS = "server.request.timeoutSeconds"; private static final String SEGMENT_COMMIT_TIMEOUT_SECONDS = "controller.realtime.segment.commit.timeoutSeconds"; private static final String DELETED_SEGMENTS_RETENTION_IN_DAYS = "controller.deleted.segments.retentionInDays"; public static final String TABLE_MIN_REPLICAS = "table.minReplicas"; public static final String ENABLE_SPLIT_COMMIT = "controller.enable.split.commit"; private static final String JERSEY_ADMIN_API_PORT = "jersey.admin.api.port"; private static final String JERSEY_ADMIN_IS_PRIMARY = "jersey.admin.isprimary"; public static final String ACCESS_CONTROL_FACTORY_CLASS = "controller.admin.access.control.factory.class"; // Amount of the time the segment can take from the beginning of upload to the end of upload. Used when parallel push // protection is enabled. If the upload does not finish within the timeout, next upload can override the previous one. private static final String SEGMENT_UPLOAD_TIMEOUT_IN_MILLIS = "controller.segment.upload.timeoutInMillis"; private static final String REALTIME_SEGMENT_METADATA_COMMIT_NUMLOCKS = "controller.realtime.segment.metadata.commit.numLocks"; private static final String ENABLE_STORAGE_QUOTA_CHECK = "controller.enable.storage.quota.check"; private static final String ENABLE_BATCH_MESSAGE_MODE = "controller.enable.batch.message.mode"; // It is used to disable the HLC realtime segment completion and disallow HLC table in the cluster. True by default. // If it's set to false, existing HLC realtime tables will stop consumption, and creation of new HLC tables will be disallowed. // Please make sure there is no HLC table running in the cluster before disallowing it. public static final String ALLOW_HLC_TABLES = "controller.allow.hlc.tables"; public static final String DIM_TABLE_MAX_SIZE = "controller.dimTable.maxSize"; // Defines the kind of storage and the underlying PinotFS implementation private static final String PINOT_FS_FACTORY_CLASS_LOCAL = "controller.storage.factory.class.file"; private static final long DEFAULT_EXTERNAL_VIEW_ONLINE_TO_OFFLINE_TIMEOUT_MILLIS = 120_000L; // 2 minutes private static final int DEFAULT_SERVER_ADMIN_REQUEST_TIMEOUT_SECONDS = 30; private static final int DEFAULT_DELETED_SEGMENTS_RETENTION_IN_DAYS = 7; private static final int DEFAULT_TABLE_MIN_REPLICAS = 1; private static final boolean DEFAULT_ENABLE_SPLIT_COMMIT = false; private static final int DEFAULT_JERSEY_ADMIN_PORT = 21000; private static final String DEFAULT_ACCESS_CONTROL_FACTORY_CLASS = "org.apache.pinot.controller.api.access.AllowAllAccessFactory"; private static final long DEFAULT_SEGMENT_UPLOAD_TIMEOUT_IN_MILLIS = 600_000L; // 10 minutes private static final int DEFAULT_REALTIME_SEGMENT_METADATA_COMMIT_NUMLOCKS = 64; private static final boolean DEFAULT_ENABLE_STORAGE_QUOTA_CHECK = true; private static final boolean DEFAULT_ENABLE_BATCH_MESSAGE_MODE = false; private static final boolean DEFAULT_ALLOW_HLC_TABLES = true; private static final String DEFAULT_CONTROLLER_MODE = ControllerMode.DUAL.name(); private static final String DEFAULT_LEAD_CONTROLLER_RESOURCE_REBALANCE_STRATEGY = AutoRebalanceStrategy.class.getName(); private static final String DEFAULT_DIM_TABLE_MAX_SIZE = "200M"; private static final String DEFAULT_PINOT_FS_FACTORY_CLASS_LOCAL = LocalPinotFS.class.getName(); public ControllerConf() { super(new HashMap<>()); } public ControllerConf(Map<String, Object> baseProperties) { super(baseProperties); } public ControllerConf(Configuration baseConfiguration) { super(baseConfiguration); } public void setLocalTempDir(String localTempDir) { setProperty(LOCAL_TEMP_DIR, localTempDir); } public String getLocalTempDir() { return getProperty(LOCAL_TEMP_DIR); } public void setPinotFSFactoryClasses(Configuration pinotFSFactoryClasses) { setProperty(PINOT_FS_FACTORY_CLASS_LOCAL, DEFAULT_PINOT_FS_FACTORY_CLASS_LOCAL); if (pinotFSFactoryClasses != null) { pinotFSFactoryClasses.getKeys() .forEachRemaining(key -> setProperty((String) key, pinotFSFactoryClasses.getProperty((String) key))); } } public void setSplitCommit(boolean isSplitCommit) { setProperty(ENABLE_SPLIT_COMMIT, isSplitCommit); } public void setQueryConsolePath(String path) { setProperty(CONSOLE_WEBAPP_ROOT_PATH, path); } public String getQueryConsoleWebappPath() { return Optional.ofNullable(getProperty(CONSOLE_WEBAPP_ROOT_PATH)) .orElseGet(() -> ControllerConf.class.getClassLoader().getResource("webapp").toExternalForm()); } public void setJerseyAdminPrimary(String jerseyAdminPrimary) { setProperty(JERSEY_ADMIN_IS_PRIMARY, jerseyAdminPrimary); } public void setHelixClusterName(String clusterName) { setProperty(HELIX_CLUSTER_NAME, clusterName); } public void setControllerHost(String host) { setProperty(CONTROLLER_HOST, host); } public void setControllerVipHost(String vipHost) { setProperty(CONTROLLER_VIP_HOST, vipHost); } public void setControllerVipPort(String vipPort) { setProperty(CONTROLLER_VIP_PORT, vipPort); } public void setControllerVipProtocol(String vipProtocol) { setProperty(CONTROLLER_VIP_PROTOCOL, vipProtocol); } public void setControllerBrokerProtocol(String protocol) { setProperty(CONTROLLER_BROKER_PROTOCOL, protocol); } public void setControllerPort(String port) { setProperty(CONTROLLER_PORT, port); } public void setDataDir(String dataDir) { setProperty(DATA_DIR, dataDir); } public void setRealtimeSegmentCommitTimeoutSeconds(int timeoutSec) { setProperty(SEGMENT_COMMIT_TIMEOUT_SECONDS, Integer.toString(timeoutSec)); } public void setUpdateSegmentStateModel(String updateStateModel) { setProperty(UPDATE_SEGMENT_STATE_MODEL, updateStateModel); } public void setZkStr(String zkStr) { setProperty(ZK_STR, zkStr); } public void setDimTableMaxSize(String size) { setProperty(DIM_TABLE_MAX_SIZE, size); } public String getDimTableMaxSize() { return getProperty(DIM_TABLE_MAX_SIZE, DEFAULT_DIM_TABLE_MAX_SIZE); } // A boolean to decide whether Jersey API should be the primary one. For now, we set this to be false, // but we turn it on to true when we are sure that jersey api has no backward compatibility problems. public boolean isJerseyAdminPrimary() { return getProperty(JERSEY_ADMIN_IS_PRIMARY, true); } public String getHelixClusterName() { return getProperty(HELIX_CLUSTER_NAME); } public String getControllerHost() { return getProperty(CONTROLLER_HOST); } public String getControllerPort() { return getProperty(CONTROLLER_PORT); } public List<String> getControllerAccessProtocols() { return getProperty(CONTROLLER_ACCESS_PROTOCOLS, getControllerPort() == null ? Arrays.asList("http") : Arrays.asList()); } public String getControllerAccessProtocolProperty(String protocol, String property) { return getProperty(CONTROLLER_ACCESS_PROTOCOLS + "." + protocol + "." + property); } public String getControllerAccessProtocolProperty(String protocol, String property, String defaultValue) { return getProperty(CONTROLLER_ACCESS_PROTOCOLS + "." + protocol + "." + property, defaultValue); } public boolean getControllerAccessProtocolProperty(String protocol, String property, boolean defaultValue) { return getProperty(CONTROLLER_ACCESS_PROTOCOLS + "." + protocol + "." + property, defaultValue); } public String getDataDir() { return getProperty(DATA_DIR); } public int getSegmentCommitTimeoutSeconds() { return getProperty(SEGMENT_COMMIT_TIMEOUT_SECONDS, SegmentCompletionProtocol.getDefaultMaxSegmentCommitTimeSeconds()); } public boolean isUpdateSegmentStateModel() { return getProperty(UPDATE_SEGMENT_STATE_MODEL, false); } public String generateVipUrl() { return getControllerVipProtocol() + "://" + getControllerVipHost() + ":" + getControllerVipPort(); } public String getZkStr() { return getProperty(ZK_STR); } @Override public String toString() { return super.toString(); } public boolean getAcceptSplitCommit() { return getProperty(ENABLE_SPLIT_COMMIT, DEFAULT_ENABLE_SPLIT_COMMIT); } public String getControllerVipHost() { return Optional.ofNullable(getProperty(CONTROLLER_VIP_HOST)) .filter(controllerVipHost -> !controllerVipHost.isEmpty()) .orElseGet(() -> getProperty(CONTROLLER_HOST)); } public String getControllerVipPort() { return Optional.ofNullable(getProperty(CONTROLLER_VIP_PORT)) .filter(controllerVipPort -> !controllerVipPort.isEmpty()) .orElseGet(() -> getControllerAccessProtocols().stream() .filter(protocol -> getControllerAccessProtocolProperty(protocol, "vip", false)) .map(protocol -> Optional.ofNullable(getControllerAccessProtocolProperty(protocol, "port"))) .filter(Optional::isPresent) .map(Optional::get) .findFirst() // No protocol defines a port as VIP. Fallback on legacy controller.port property. .orElseGet(this::getControllerPort)); } public String getControllerVipProtocol() { return getSupportedProtocol(CONTROLLER_VIP_PROTOCOL); } public String getControllerBrokerProtocol() { return getSupportedProtocol(CONTROLLER_BROKER_PROTOCOL); } public int getRetentionControllerFrequencyInSeconds() { return getProperty(ControllerPeriodicTasksConf.RETENTION_MANAGER_FREQUENCY_IN_SECONDS, ControllerPeriodicTasksConf.DEFAULT_RETENTION_CONTROLLER_FREQUENCY_IN_SECONDS); } public void setRetentionControllerFrequencyInSeconds(int retentionFrequencyInSeconds) { setProperty(ControllerPeriodicTasksConf.RETENTION_MANAGER_FREQUENCY_IN_SECONDS, Integer.toString(retentionFrequencyInSeconds)); } /** * Returns the config value for controller.offline.segment.interval.checker.frequencyInSeconds if it exists. * If it doesn't exist, returns the segment level validation interval. This is done in order to retain the current behavior, * wherein the offline validation tasks were done at segment level validation interval frequency * The default value is the new DEFAULT_OFFLINE_SEGMENT_INTERVAL_CHECKER_FREQUENCY_IN_SECONDS * @return */ public int getOfflineSegmentIntervalCheckerFrequencyInSeconds() { return getProperty(ControllerPeriodicTasksConf.OFFLINE_SEGMENT_INTERVAL_CHECKER_FREQUENCY_IN_SECONDS, ControllerPeriodicTasksConf.DEFAULT_OFFLINE_SEGMENT_INTERVAL_CHECKER_FREQUENCY_IN_SECONDS); } public void setOfflineSegmentIntervalCheckerFrequencyInSeconds(int validationFrequencyInSeconds) { setProperty(ControllerPeriodicTasksConf.OFFLINE_SEGMENT_INTERVAL_CHECKER_FREQUENCY_IN_SECONDS, Integer.toString(validationFrequencyInSeconds)); } /** * Returns the config value for controller.realtime.segment.validation.frequencyInSeconds if it exists. * If it doesn't exist, returns the validation controller frequency. This is done in order to retain the current behavior, * wherein the realtime validation tasks were done at validation controller frequency * The default value is the new DEFAULT_REALTIME_SEGMENT_VALIDATION_FREQUENCY_IN_SECONDS * @return */ public int getRealtimeSegmentValidationFrequencyInSeconds() { return Optional .ofNullable( getProperty(ControllerPeriodicTasksConf.REALTIME_SEGMENT_VALIDATION_FREQUENCY_IN_SECONDS, Integer.class)) .orElseGet(() -> getProperty(ControllerPeriodicTasksConf.DEPRECATED_VALIDATION_MANAGER_FREQUENCY_IN_SECONDS, ControllerPeriodicTasksConf.DEFAULT_REALTIME_SEGMENT_VALIDATION_FREQUENCY_IN_SECONDS)); } public void setRealtimeSegmentValidationFrequencyInSeconds(int validationFrequencyInSeconds) { setProperty(ControllerPeriodicTasksConf.REALTIME_SEGMENT_VALIDATION_FREQUENCY_IN_SECONDS, Integer.toString(validationFrequencyInSeconds)); } /** * Returns the config value for controller.broker.resource.validation.frequencyInSeconds if it exists. * If it doesn't exist, returns the validation controller frequency. This is done in order to retain the current behavior, * wherein the broker resource validation tasks were done at validation controller frequency * The default value is the new DEFAULT_BROKER_RESOURCE_VALIDATION_FREQUENCY_IN_SECONDS * @return */ public int getBrokerResourceValidationFrequencyInSeconds() { return Optional .ofNullable( getProperty(ControllerPeriodicTasksConf.BROKER_RESOURCE_VALIDATION_FREQUENCY_IN_SECONDS, Integer.class)) .orElseGet(() -> getProperty(ControllerPeriodicTasksConf.DEPRECATED_VALIDATION_MANAGER_FREQUENCY_IN_SECONDS, ControllerPeriodicTasksConf.DEFAULT_BROKER_RESOURCE_VALIDATION_FREQUENCY_IN_SECONDS)); } public void setBrokerResourceValidationFrequencyInSeconds(int validationFrequencyInSeconds) { setProperty(ControllerPeriodicTasksConf.BROKER_RESOURCE_VALIDATION_FREQUENCY_IN_SECONDS, Integer.toString(validationFrequencyInSeconds)); } public long getBrokerResourceValidationInitialDelayInSeconds() { return getProperty(ControllerPeriodicTasksConf.BROKER_RESOURCE_VALIDATION_INITIAL_DELAY_IN_SECONDS, getPeriodicTaskInitialDelayInSeconds()); } public int getStatusCheckerFrequencyInSeconds() { return getProperty(ControllerPeriodicTasksConf.STATUS_CHECKER_FREQUENCY_IN_SECONDS, ControllerPeriodicTasksConf.DEFAULT_STATUS_CONTROLLER_FREQUENCY_IN_SECONDS); } public void setStatusCheckerFrequencyInSeconds(int statusCheckerFrequencyInSeconds) { setProperty(ControllerPeriodicTasksConf.STATUS_CHECKER_FREQUENCY_IN_SECONDS, Integer.toString(statusCheckerFrequencyInSeconds)); } public int getStatusCheckerWaitForPushTimeInSeconds() { return getProperty(ControllerPeriodicTasksConf.STATUS_CHECKER_WAIT_FOR_PUSH_TIME_IN_SECONDS, ControllerPeriodicTasksConf.DEFAULT_STATUS_CONTROLLER_WAIT_FOR_PUSH_TIME_IN_SECONDS); } public void setStatusCheckerWaitForPushTimeInSeconds(int statusCheckerWaitForPushTimeInSeconds) { setProperty(ControllerPeriodicTasksConf.STATUS_CHECKER_WAIT_FOR_PUSH_TIME_IN_SECONDS, Integer.toString(statusCheckerWaitForPushTimeInSeconds)); } /** * RealtimeSegmentRelocator has been rebranded to SegmentRelocator. * Check for SEGMENT_RELOCATOR_FREQUENCY_IN_SECONDS property, if not found, return REALTIME_SEGMENT_RELOCATOR_FREQUENCY */ public int getSegmentRelocatorFrequencyInSeconds() { Integer segmentRelocatorFreqSeconds = getProperty(ControllerPeriodicTasksConf.SEGMENT_RELOCATOR_FREQUENCY_IN_SECONDS, Integer.class); if (segmentRelocatorFreqSeconds == null) { String realtimeSegmentRelocatorPeriod = getProperty(ControllerPeriodicTasksConf.DEPRECATED_REALTIME_SEGMENT_RELOCATOR_FREQUENCY); if (realtimeSegmentRelocatorPeriod != null) { segmentRelocatorFreqSeconds = (int) convertPeriodToSeconds(realtimeSegmentRelocatorPeriod); } else { segmentRelocatorFreqSeconds = ControllerPeriodicTasksConf.DEFAULT_SEGMENT_RELOCATOR_FREQUENCY_IN_SECONDS; } } return segmentRelocatorFreqSeconds; } public void setSegmentRelocatorFrequencyInSeconds(int segmentRelocatorFrequencyInSeconds) { setProperty(ControllerPeriodicTasksConf.SEGMENT_RELOCATOR_FREQUENCY_IN_SECONDS, Integer.toString(segmentRelocatorFrequencyInSeconds)); } public long getExternalViewOnlineToOfflineTimeout() { return getProperty(EXTERNAL_VIEW_ONLINE_TO_OFFLINE_TIMEOUT, DEFAULT_EXTERNAL_VIEW_ONLINE_TO_OFFLINE_TIMEOUT_MILLIS); } public void setExternalViewOnlineToOfflineTimeout(long timeout) { setProperty(EXTERNAL_VIEW_ONLINE_TO_OFFLINE_TIMEOUT, timeout); } public boolean tenantIsolationEnabled() { return getProperty(CLUSTER_TENANT_ISOLATION_ENABLE, true); } public void setTenantIsolationEnabled(boolean isSingleTenant) { setProperty(CLUSTER_TENANT_ISOLATION_ENABLE, isSingleTenant); } public void setServerAdminRequestTimeoutSeconds(int timeoutSeconds) { setProperty(SERVER_ADMIN_REQUEST_TIMEOUT_SECONDS, timeoutSeconds); } public int getServerAdminRequestTimeoutSeconds() { return getProperty(SERVER_ADMIN_REQUEST_TIMEOUT_SECONDS, DEFAULT_SERVER_ADMIN_REQUEST_TIMEOUT_SECONDS); } public int getDeletedSegmentsRetentionInDays() { return getProperty(DELETED_SEGMENTS_RETENTION_IN_DAYS, DEFAULT_DELETED_SEGMENTS_RETENTION_IN_DAYS); } public void setDeletedSegmentsRetentionInDays(int retentionInDays) { setProperty(DELETED_SEGMENTS_RETENTION_IN_DAYS, retentionInDays); } public int getTaskManagerFrequencyInSeconds() { return getProperty(ControllerPeriodicTasksConf.TASK_MANAGER_FREQUENCY_IN_SECONDS, ControllerPeriodicTasksConf.DEFAULT_TASK_MANAGER_FREQUENCY_IN_SECONDS); } public void setTaskManagerFrequencyInSeconds(int frequencyInSeconds) { setProperty(ControllerPeriodicTasksConf.TASK_MANAGER_FREQUENCY_IN_SECONDS, Integer.toString(frequencyInSeconds)); } public long getMinionInstancesCleanupTaskFrequencyInSeconds() { return getProperty(ControllerPeriodicTasksConf.MINION_INSTANCES_CLEANUP_TASK_FREQUENCY_IN_SECONDS, ControllerPeriodicTasksConf.DEFAULT_MINION_INSTANCES_CLEANUP_TASK_FREQUENCY_IN_SECONDS); } public void setMinionInstancesCleanupTaskFrequencyInSeconds(int frequencyInSeconds) { setProperty(ControllerPeriodicTasksConf.MINION_INSTANCES_CLEANUP_TASK_FREQUENCY_IN_SECONDS, Integer.toString(frequencyInSeconds)); } public long getMinionInstancesCleanupTaskInitialDelaySeconds() { return getProperty(ControllerPeriodicTasksConf.MINION_INSTANCES_CLEANUP_TASK_INITIAL_DELAY_SECONDS, ControllerPeriodicTasksConf.getRandomInitialDelayInSeconds()); } public void setMinionInstancesCleanupTaskInitialDelaySeconds(int initialDelaySeconds) { setProperty(ControllerPeriodicTasksConf.MINION_INSTANCES_CLEANUP_TASK_INITIAL_DELAY_SECONDS, Integer.toString(initialDelaySeconds)); } public int getDefaultTableMinReplicas() { return getProperty(TABLE_MIN_REPLICAS, DEFAULT_TABLE_MIN_REPLICAS); } public void setTableMinReplicas(int minReplicas) { setProperty(TABLE_MIN_REPLICAS, minReplicas); } public String getJerseyAdminApiPort() { return getProperty(JERSEY_ADMIN_API_PORT, String.valueOf(DEFAULT_JERSEY_ADMIN_PORT)); } public String getAccessControlFactoryClass() { return getProperty(ACCESS_CONTROL_FACTORY_CLASS, DEFAULT_ACCESS_CONTROL_FACTORY_CLASS); } public void setAccessControlFactoryClass(String accessControlFactoryClass) { setProperty(ACCESS_CONTROL_FACTORY_CLASS, accessControlFactoryClass); } public long getSegmentUploadTimeoutInMillis() { return getProperty(SEGMENT_UPLOAD_TIMEOUT_IN_MILLIS, DEFAULT_SEGMENT_UPLOAD_TIMEOUT_IN_MILLIS); } public void setSegmentUploadTimeoutInMillis(long segmentUploadTimeoutInMillis) { setProperty(SEGMENT_UPLOAD_TIMEOUT_IN_MILLIS, segmentUploadTimeoutInMillis); } public int getRealtimeSegmentMetadataCommitNumLocks() { return getProperty(REALTIME_SEGMENT_METADATA_COMMIT_NUMLOCKS, DEFAULT_REALTIME_SEGMENT_METADATA_COMMIT_NUMLOCKS); } public void setRealtimeSegmentMetadataCommitNumLocks(int realtimeSegmentMetadataCommitNumLocks) { setProperty(REALTIME_SEGMENT_METADATA_COMMIT_NUMLOCKS, realtimeSegmentMetadataCommitNumLocks); } public boolean getEnableStorageQuotaCheck() { return getProperty(ENABLE_STORAGE_QUOTA_CHECK, DEFAULT_ENABLE_STORAGE_QUOTA_CHECK); } public boolean getEnableBatchMessageMode() { return getProperty(ENABLE_BATCH_MESSAGE_MODE, DEFAULT_ENABLE_BATCH_MESSAGE_MODE); } public int getSegmentLevelValidationIntervalInSeconds() { return getProperty(ControllerPeriodicTasksConf.SEGMENT_LEVEL_VALIDATION_INTERVAL_IN_SECONDS, ControllerPeriodicTasksConf.DEFAULT_SEGMENT_LEVEL_VALIDATION_INTERVAL_IN_SECONDS); } public long getStatusCheckerInitialDelayInSeconds() { return getProperty(ControllerPeriodicTasksConf.STATUS_CHECKER_INITIAL_DELAY_IN_SECONDS, ControllerPeriodicTasksConf.getRandomInitialDelayInSeconds()); } public long getRetentionManagerInitialDelayInSeconds() { return getProperty(ControllerPeriodicTasksConf.RETENTION_MANAGER_INITIAL_DELAY_IN_SECONDS, ControllerPeriodicTasksConf.getRandomInitialDelayInSeconds()); } public long getOfflineSegmentIntervalCheckerInitialDelayInSeconds() { return getProperty(ControllerPeriodicTasksConf.OFFLINE_SEGMENT_INTERVAL_CHECKER_INITIAL_DELAY_IN_SECONDS, ControllerPeriodicTasksConf.getRandomInitialDelayInSeconds()); } public long getRealtimeSegmentValidationManagerInitialDelaySeconds() { return getPeriodicTaskInitialDelayInSeconds(); } public long getPinotTaskManagerInitialDelaySeconds() { return getPeriodicTaskInitialDelayInSeconds(); } public boolean isPinotTaskManagerSchedulerEnabled() { return getProperty(ControllerPeriodicTasksConf.PINOT_TASK_MANAGER_SCHEDULER_ENABLED, false); } /** * RealtimeSegmentRelocator has been rebranded to SegmentRelocator. * Check for SEGMENT_RELOCATOR_INITIAL_DELAY_IN_SECONDS property, if not found, return REALTIME_SEGMENT_RELOCATION_INITIAL_DELAY_IN_SECONDS */ public long getSegmentRelocatorInitialDelayInSeconds() { Long segmentRelocatorInitialDelaySeconds = getProperty(ControllerPeriodicTasksConf.SEGMENT_RELOCATOR_INITIAL_DELAY_IN_SECONDS, Long.class); if (segmentRelocatorInitialDelaySeconds == null) { segmentRelocatorInitialDelaySeconds = getProperty(ControllerPeriodicTasksConf.DEPRECATED_REALTIME_SEGMENT_RELOCATION_INITIAL_DELAY_IN_SECONDS, ControllerPeriodicTasksConf.getRandomInitialDelayInSeconds()); } return segmentRelocatorInitialDelaySeconds; } public long getPeriodicTaskInitialDelayInSeconds() { return ControllerPeriodicTasksConf.getRandomInitialDelayInSeconds(); } public void setControllerMode(ControllerMode controllerMode) { setProperty(CONTROLLER_MODE, controllerMode.name()); } public ControllerMode getControllerMode() { return ControllerMode.valueOf(getProperty(CONTROLLER_MODE, DEFAULT_CONTROLLER_MODE.toString()).toUpperCase()); } public void setLeadControllerResourceRebalanceStrategy(String rebalanceStrategy) { setProperty(LEAD_CONTROLLER_RESOURCE_REBALANCE_STRATEGY, rebalanceStrategy); } public String getLeadControllerResourceRebalanceStrategy() { return getProperty(LEAD_CONTROLLER_RESOURCE_REBALANCE_STRATEGY, DEFAULT_LEAD_CONTROLLER_RESOURCE_REBALANCE_STRATEGY); } public boolean getHLCTablesAllowed() { return getProperty(ALLOW_HLC_TABLES, DEFAULT_ALLOW_HLC_TABLES); } public void setHLCTablesAllowed(boolean allowHLCTables) { setProperty(ALLOW_HLC_TABLES, allowHLCTables); } public String getMetricsPrefix() { return getProperty(CONFIG_OF_CONTROLLER_METRICS_PREFIX, DEFAULT_METRICS_PREFIX); } public int getControllerBrokerPortOverride() { return getProperty(CONTROLLER_BROKER_PORT_OVERRIDE, -1); } private long convertPeriodToSeconds(String timeStr) { long seconds; try { Long millis = TimeUtils.convertPeriodToMillis(timeStr); seconds = millis / 1000; } catch (Exception e) { throw new RuntimeException("Invalid time spec '" + timeStr + "' (Valid examples: '3h', '4h30m', '30m')", e); } return seconds; } private String getSupportedProtocol(String property) { String value = getProperty(property, CommonConstants.HTTP_PROTOCOL); Preconditions.checkArgument(SUPPORTED_PROTOCOLS.contains(value), "Unsupported %s protocol '%s'", property, value); return value; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.coprocessor; import java.util.Collections; import java.util.List; import org.apache.phoenix.coprocessor.generated.MetaDataProtos; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse; import org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService; import org.apache.phoenix.hbase.index.util.VersionUtil; import org.apache.phoenix.schema.PColumn; import org.apache.phoenix.schema.PTable; import org.apache.phoenix.schema.PTableImpl; import org.apache.phoenix.util.ByteUtil; import com.google.common.collect.Lists; import com.google.protobuf.ByteString; import com.google.protobuf.HBaseZeroCopyByteString; /** * * Coprocessor protocol for Phoenix DDL. Phoenix stores the table metadata in * an HBase table named SYSTEM.TABLE. Each table is represented by: * - one row for the table * - one row per column in the tabe * Upto {@link #DEFAULT_MAX_META_DATA_VERSIONS} versions are kept. The time * stamp of the metadata must always be increasing. The timestamp of the key * values in the data row corresponds to the schema that it's using. * * TODO: dynamically prune number of schema version kept based on whether or * not the data table still uses it (based on the min time stamp of the data * table). * * * @since 0.1 */ public abstract class MetaDataProtocol extends MetaDataService { public static final int PHOENIX_MAJOR_VERSION = 4; public static final int PHOENIX_MINOR_VERSION = 2; public static final int PHOENIX_PATCH_NUMBER = 1; public static final int PHOENIX_VERSION = VersionUtil.encodeVersion(PHOENIX_MAJOR_VERSION, PHOENIX_MINOR_VERSION, PHOENIX_PATCH_NUMBER); public static final long MIN_TABLE_TIMESTAMP = 0; // Incremented from 5 to 7 with the addition of the STORE_NULLS table option in 4.3 public static final long MIN_SYSTEM_TABLE_TIMESTAMP = MIN_TABLE_TIMESTAMP + 7; public static final int DEFAULT_MAX_META_DATA_VERSIONS = 1000; public static final int DEFAULT_MAX_STAT_DATA_VERSIONS = 3; public static final boolean DEFAULT_META_DATA_KEEP_DELETED_CELLS = true; // Min system table timestamps for every release. public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_1_0 = MIN_TABLE_TIMESTAMP + 3; public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_2_0 = MIN_TABLE_TIMESTAMP + 4; public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_2_1 = MIN_TABLE_TIMESTAMP + 5; public static final long MIN_SYSTEM_TABLE_TIMESTAMP_4_3_0 = MIN_TABLE_TIMESTAMP + 7; // TODO: pare this down to minimum, as we don't need duplicates for both table and column errors, nor should we need // a different code for every type of error. // ENTITY_ALREADY_EXISTS, ENTITY_NOT_FOUND, NEWER_ENTITY_FOUND, ENTITY_NOT_IN_REGION, CONCURRENT_MODIFICATION // ILLEGAL_MUTATION (+ sql code) public enum MutationCode { TABLE_ALREADY_EXISTS, TABLE_NOT_FOUND, COLUMN_NOT_FOUND, COLUMN_ALREADY_EXISTS, CONCURRENT_TABLE_MUTATION, TABLE_NOT_IN_REGION, NEWER_TABLE_FOUND, UNALLOWED_TABLE_MUTATION, NO_PK_COLUMNS, PARENT_TABLE_NOT_FOUND, NO_OP }; public static class MetaDataMutationResult { private MutationCode returnCode; private long mutationTime; private PTable table; private List<byte[]> tableNamesToDelete; private byte[] columnName; private byte[] familyName; private boolean wasUpdated; public MetaDataMutationResult() { } public MetaDataMutationResult(MutationCode returnCode, long currentTime, PTable table, PColumn column) { this(returnCode, currentTime, table); if(column != null){ this.columnName = column.getName().getBytes(); this.familyName = column.getFamilyName().getBytes(); } } public MetaDataMutationResult(MutationCode returnCode, long currentTime, PTable table) { this(returnCode, currentTime, table, Collections.<byte[]> emptyList()); } // For testing, so that connectionless can set wasUpdated so ColumnResolver doesn't complain public MetaDataMutationResult(MutationCode returnCode, long currentTime, PTable table, boolean wasUpdated) { this(returnCode, currentTime, table, Collections.<byte[]> emptyList()); this.wasUpdated = wasUpdated; } public MetaDataMutationResult(MutationCode returnCode, long currentTime, PTable table, List<byte[]> tableNamesToDelete) { this.returnCode = returnCode; this.mutationTime = currentTime; this.table = table; this.tableNamesToDelete = tableNamesToDelete; } public MutationCode getMutationCode() { return returnCode; } public long getMutationTime() { return mutationTime; } public boolean wasUpdated() { return wasUpdated; } public PTable getTable() { return table; } public void setTable(PTable table) { this.table = table; } public List<byte[]> getTableNamesToDelete() { return tableNamesToDelete; } public byte[] getColumnName() { return columnName; } public byte[] getFamilyName() { return familyName; } public static MetaDataMutationResult constructFromProto(MetaDataResponse proto) { MetaDataMutationResult result = new MetaDataMutationResult(); result.returnCode = MutationCode.values()[proto.getReturnCode().ordinal()]; result.mutationTime = proto.getMutationTime(); if (proto.hasTable()) { result.wasUpdated = true; result.table = PTableImpl.createFromProto(proto.getTable()); } if (proto.getTablesToDeleteCount() > 0) { result.tableNamesToDelete = Lists.newArrayListWithExpectedSize(proto.getTablesToDeleteCount()); for (ByteString tableName : proto.getTablesToDeleteList()) { result.tableNamesToDelete.add(tableName.toByteArray()); } } result.columnName = ByteUtil.EMPTY_BYTE_ARRAY; if(proto.hasColumnName()){ result.columnName = proto.getColumnName().toByteArray(); } if(proto.hasFamilyName()){ result.familyName = proto.getFamilyName().toByteArray(); } return result; } public static MetaDataResponse toProto(MetaDataMutationResult result) { MetaDataProtos.MetaDataResponse.Builder builder = MetaDataProtos.MetaDataResponse.newBuilder(); if (result != null) { builder.setReturnCode(MetaDataProtos.MutationCode.values()[result.getMutationCode() .ordinal()]); builder.setMutationTime(result.getMutationTime()); if (result.table != null) { builder.setTable(PTableImpl.toProto(result.table)); } if (result.getTableNamesToDelete() != null) { for (byte[] tableName : result.tableNamesToDelete) { builder.addTablesToDelete(HBaseZeroCopyByteString.wrap(tableName)); } } if(result.getColumnName() != null){ builder.setColumnName(HBaseZeroCopyByteString.wrap(result.getColumnName())); } if(result.getFamilyName() != null){ builder.setFamilyName(HBaseZeroCopyByteString.wrap(result.getFamilyName())); } } return builder.build(); } } }
package it.unibz.krdb.obda.parser; /* * #%L * ontop-obdalib-core * %% * Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import it.unibz.krdb.obda.model.Function; import it.unibz.krdb.obda.model.CQIE; import it.unibz.krdb.obda.model.DatalogProgram; import it.unibz.krdb.obda.model.Term; import it.unibz.krdb.obda.model.Variable; import it.unibz.krdb.obda.model.impl.FunctionalTermImpl; import it.unibz.krdb.obda.model.impl.URIConstantImpl; import it.unibz.krdb.obda.model.impl.ValueConstantImpl; import java.util.List; import junit.framework.TestCase; import org.antlr.runtime.RecognitionException; public class DatalogParserTest extends TestCase { /** Test inputs */ private static final String[] CQ_STRINGS = { // Scenario 1: Basic input (Datalog syntax) "base <http://base.org/stuff/1.0/> \n" + "prefix abc: <http://www.abc.org/1999/02/22-abc-syntax-ns#> \n" + "prefix : <http://example.org/stuff/1.0/> \n" + "abc:p($x, $y) :- :q($x), r($y)", // Scenario 2: Basic input (SWIRL syntax) "base <http://base.org/stuff/1.0/> \n" + "prefix abc: <http://www.abc.org/1999/02/22-abc-syntax-ns#> \n" + "prefix : <http://example.org/stuff/1.0/> \n" + ":q($x), r($y) -> abc:p($x, $y)", // Scenario 3: Different types of term. "base <http://base.org/stuff/1.0/> \n" + "prefix abc: <http://www.abc.org/1999/02/22-abc-syntax-ns#> \n" + "prefix : <http://example.org/stuff/1.0/> \n" + "abc:p($x, $y) :- :q($x, \"Person\"), " + ":r(s($y, \"Student\"), http://example.org/stuff/1.1/FUB)", // Scenario 4: Multiple rules. "base <http://base.org/stuff/1.0/> \n" + "prefix abc: <http://www.abc.org/1999/02/22-abc-syntax-ns#> \n" + "prefix : <http://example.org/stuff/1.0/> \n" + "abc:p($x) :- :q($x, \"Person\") \n" + "abc:r($y) :- :s($y, http://example.org/stuff/1.1/FUB) \n" + "abc:t($z) :- :u($z, f(http://example.org/stuff/1.2/Occupation, \"Student\"))", // Scenario 5: Recursive object terms. "prefix abc: <http://www.abc.org/1999/02/22-abc-syntax-ns#> \n" + "prefix : <http://example.org/stuff/1.0/> \n" + "abc:p($x) :- :q($x, :r(http://example.org/stuff/1.1/FUB, " + ":s(http://example.org/stuff/1.2/Occupation, " + ":t(http://example.org/stuff/1.3/Degree, \"Master\"))))", // Scenario 6: No head. "prefix abc: <http://www.abc.org/1999/02/22-abc-syntax-ns#> \n" + "prefix : <http://example.org/stuff/1.0/> \n" + " :- :q($x)", // Scenario 7: No body. "prefix abc: <http://www.abc.org/1999/02/22-abc-syntax-ns#> \n" + "prefix : <http://example.org/stuff/1.0/> \n" + "abc:p($x) :- ", // Scenario 8: Select all. "base <http://base.org/stuff/1.0/> \n" + "prefix abc: <http://www.abc.org/1999/02/22-abc-syntax-ns#> \n" + "prefix : <http://example.org/stuff/1.0/> \n" + "abc:p(*) :- :q($x), r($y)", // Scenario 9: Basic input using caret symbol. "base <http://base.org/stuff/1.0/> \n" + "prefix abc: <http://www.abc.org/1999/02/22-abc-syntax-ns#> \n" + "prefix : <http://example.org/stuff/1.0/> \n" + "abc:p($x, $y) :- :q($x) ^ r($y)", // Scenario 10: Empty term on the head. "base <http://base.org/stuff/1.0/> \n" + "prefix abc: <http://www.abc.org/1999/02/22-abc-syntax-ns#> \n" + "prefix : <http://example.org/stuff/1.0/> \n" + "abc:p() :- :q($x), r($y)", // Scenario 11: Full name with URI. "http://www.abc.org/1999/02/22-abc-syntax-ns#p($x, $y) :- " + "http://example.org/stuff/1.0/q($x), " + "http://base.org/stuff/1.0/r(http://example.org/stuff/1.0/s($y, \"Student\"), " + "http://example.org/stuff/1.1/FUB)" }; /** The oracle */ private static int EXPECTED_RULE_SIZE; private static int EXPECTED_BODY_SIZE; private static int EXPECTED_HEAD_TERM_SIZE; private static int EXPECTED_BODY_TERM_SIZE; private DatalogProgramParser parser; private DatalogProgram datalog; private String uri; private List<Term> terms; private Term term; //@Beforere public void setUp() throws Exception { parser = new DatalogProgramParser(); } /** * Testing Scenario #1 * * @throws RecognitionException */ //@Test public void testBasicInputDatalogSyntax() throws RecognitionException { datalog = parser.parse(CQ_STRINGS[0]); EXPECTED_RULE_SIZE = 1; List<CQIE> rules = datalog.getRules(); assertTrue("Mismatch rule size!", rules.size() == EXPECTED_RULE_SIZE); // Rule #1 //-- The Head Function head = rules.get(0).getHead(); assertNotNull("Head is null!", head); uri = head.getFunctionSymbol().getName().toString(); assertEquals("Mismatch predicate name!", uri, "http://www.abc.org/1999/02/22-abc-syntax-ns#p"); EXPECTED_HEAD_TERM_SIZE = 2; terms = head.getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_HEAD_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); term = terms.get(1); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); //-- The Body EXPECTED_BODY_SIZE = 2; List<Function> body = rules.get(0).getBody(); assertNotNull("Body is null!", body); assertTrue("Mismatch body size!", body.size() == EXPECTED_BODY_SIZE); //---- Body atom #1 uri = ((Function)body.get(0)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/q"); EXPECTED_BODY_TERM_SIZE = 1; terms = ((Function)body.get(0)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); //---- Body atom #2 uri = ((Function)body.get(1)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://base.org/stuff/1.0/r"); EXPECTED_BODY_TERM_SIZE = 1; terms = ((Function)body.get(1)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); } /** * Testing Scenario #2 * * @throws RecognitionException */ //@Test public void testBasicInputSwirlSyntax() throws RecognitionException { datalog = parser.parse(CQ_STRINGS[1]); EXPECTED_RULE_SIZE = 1; List<CQIE> rules = datalog.getRules(); assertTrue("Mismatch rule size!", rules.size() == EXPECTED_RULE_SIZE); // Rule #1 //-- The Head Function head = rules.get(0).getHead(); assertNotNull("Head is null!", head); uri = head.getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://www.abc.org/1999/02/22-abc-syntax-ns#p"); EXPECTED_HEAD_TERM_SIZE = 2; terms = head.getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_HEAD_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); term = terms.get(1); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); //-- The Body EXPECTED_BODY_SIZE = 2; List<Function> body = rules.get(0).getBody(); assertNotNull("Body is null!", body); assertTrue("Mismatch body size!", body.size() == EXPECTED_BODY_SIZE); //---- Body atom #1 uri = ((Function)body.get(0)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/q"); EXPECTED_BODY_TERM_SIZE = 1; terms = ((Function)body.get(0)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); //---- Body atom #2 uri = ((Function)body.get(1)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://base.org/stuff/1.0/r"); EXPECTED_BODY_TERM_SIZE = 1; terms = ((Function)body.get(1)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); } /** * Testing Scenario #3 * * @throws RecognitionException */ //@Test public void testDifferentTypesOfTerm() throws RecognitionException { datalog = parser.parse(CQ_STRINGS[2]); EXPECTED_RULE_SIZE = 1; List<CQIE> rules = datalog.getRules(); assertTrue("Mismatch rule size!", rules.size() == EXPECTED_RULE_SIZE); // Rule #1 //-- The Head Function head = rules.get(0).getHead(); assertNotNull("Head is null!", head); uri = head.getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://www.abc.org/1999/02/22-abc-syntax-ns#p"); EXPECTED_HEAD_TERM_SIZE = 2; terms = head.getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_HEAD_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); term = terms.get(1); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); //-- The Body EXPECTED_BODY_SIZE = 2; List<Function> body = rules.get(0).getBody(); assertNotNull("Body is null!", body); assertTrue("Mismatch body size!", body.size() == EXPECTED_BODY_SIZE); //---- Body atom #1 uri = ((Function)body.get(0)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/q"); EXPECTED_BODY_TERM_SIZE = 2; terms = ((Function)body.get(0)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); term = terms.get(1); assertTrue("Mismatch term type!", term instanceof ValueConstantImpl); assertEquals("Mismatch variable name!", ((ValueConstantImpl)term).getValue(), "Person"); //---- Body atom #2 uri = ((Function)body.get(1)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/r"); EXPECTED_BODY_TERM_SIZE = 2; terms = ((Function)body.get(1)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof FunctionalTermImpl); assertEquals("Mismatch variable name!", ((FunctionalTermImpl)term).getFunctionSymbol().toString(), "http://base.org/stuff/1.0/s"); //------ Object term List<Term> objVarTerms = ((FunctionalTermImpl)term).getTerms(); assertEquals("Mismatch term size!", objVarTerms.size(), 2); Term objVarTerm = objVarTerms.get(0); assertTrue("Mismatch term type!", objVarTerm instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)objVarTerm).getName(), "y"); objVarTerm = objVarTerms.get(1); assertTrue("Mismatch term type!", objVarTerm instanceof ValueConstantImpl); assertEquals("Mismatch variable name!", ((ValueConstantImpl)objVarTerm).getValue(), "Student"); //------ Object term ends. term = terms.get(1); assertTrue("Mismatch term type!", term instanceof URIConstantImpl); assertEquals("Mismatch variable name!", ((URIConstantImpl)term).getURI().toString(), "http://example.org/stuff/1.1/FUB"); } /** * Testing Scenario #4 * * @throws RecognitionException */ //@Test public void testMultipleRules() throws RecognitionException { datalog = parser.parse(CQ_STRINGS[3]); EXPECTED_RULE_SIZE = 3; List<CQIE> rules = datalog.getRules(); assertTrue("Mismatch rule size!", rules.size() == EXPECTED_RULE_SIZE); //----------// // Rule #1 // //----------// //-- The Head Function head = rules.get(0).getHead(); assertNotNull("Head is null!", head); uri = head.getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://www.abc.org/1999/02/22-abc-syntax-ns#p"); EXPECTED_HEAD_TERM_SIZE = 1; terms = head.getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_HEAD_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); //-- The Body EXPECTED_BODY_SIZE = 1; List<Function> body = rules.get(0).getBody(); assertNotNull("Body is null!", body); assertTrue("Mismatch body size!", body.size() == EXPECTED_BODY_SIZE); //---- Body atom #1 uri = ((Function)body.get(0)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/q"); EXPECTED_BODY_TERM_SIZE = 2; terms = ((Function)body.get(0)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); term = terms.get(1); assertTrue("Mismatch term type!", term instanceof ValueConstantImpl); assertEquals("Mismatch variable name!", ((ValueConstantImpl)term).getValue(), "Person"); //----------// // Rule #2 // //----------// //-- The Head head = rules.get(1).getHead(); assertNotNull("Head is null!", head); uri = head.getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://www.abc.org/1999/02/22-abc-syntax-ns#r"); EXPECTED_HEAD_TERM_SIZE = 1; terms = head.getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_HEAD_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); //-- The Body EXPECTED_BODY_SIZE = 1; body = rules.get(1).getBody(); assertNotNull("Body is null!", body); assertTrue("Mismatch body size!", body.size() == EXPECTED_BODY_SIZE); //---- Body atom #1 uri = ((Function)body.get(0)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/s"); EXPECTED_BODY_TERM_SIZE = 2; terms = ((Function)body.get(0)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); term = terms.get(1); assertTrue("Mismatch term type!", term instanceof URIConstantImpl); assertEquals("Mismatch variable name!", ((URIConstantImpl)term).getURI().toString(), "http://example.org/stuff/1.1/FUB"); //----------// // Rule #3 // //----------// //-- The Head head = rules.get(2).getHead(); assertNotNull("Head is null!", head); uri = head.getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://www.abc.org/1999/02/22-abc-syntax-ns#t"); EXPECTED_HEAD_TERM_SIZE = 1; terms = head.getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_HEAD_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "z"); //-- The Body EXPECTED_BODY_SIZE = 1; body = rules.get(2).getBody(); assertNotNull("Body is null!", body); assertTrue("Mismatch body size!", body.size() == EXPECTED_BODY_SIZE); //---- Body atom #1 uri = ((Function)body.get(0)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/u"); EXPECTED_BODY_TERM_SIZE = 2; terms = ((Function)body.get(0)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "z"); term = terms.get(1); assertTrue("Mismatch term type!", term instanceof FunctionalTermImpl); assertEquals("Mismatch variable name!", ((FunctionalTermImpl)term).getFunctionSymbol().toString(), "http://base.org/stuff/1.0/f"); //------- Object term List<Term> objVarTerms = ((FunctionalTermImpl)term).getTerms(); assertEquals("Mismatch term size!", objVarTerms.size(), 2); Term objVarTerm = objVarTerms.get(0); assertTrue("Mismatch term type!", objVarTerm instanceof URIConstantImpl); assertEquals("Mismatch variable name!", ((URIConstantImpl)objVarTerm).getURI().toString(), "http://example.org/stuff/1.2/Occupation"); objVarTerm = objVarTerms.get(1); assertTrue("Mismatch term type!", objVarTerm instanceof ValueConstantImpl); assertEquals("Mismatch variable name!", ((ValueConstantImpl)objVarTerm).getValue(), "Student"); } /** * Testing Scenario #5 * * @throws RecognitionException */ //@Test public void testIterativeObjectTerms() throws RecognitionException { datalog = parser.parse(CQ_STRINGS[4]); EXPECTED_RULE_SIZE = 1; List<CQIE> rules = datalog.getRules(); assertTrue("Mismatch rule size!", rules.size() == EXPECTED_RULE_SIZE); // Rule #1 //-- The Head Function head = rules.get(0).getHead(); assertNotNull("Head is null!", head); uri = head.getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://www.abc.org/1999/02/22-abc-syntax-ns#p"); EXPECTED_HEAD_TERM_SIZE = 1; terms = head.getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_HEAD_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); //-- The Body EXPECTED_BODY_SIZE = 1; List<Function> body = rules.get(0).getBody(); assertNotNull("Body is null!", body); assertTrue("Mismatch body size!", body.size() == EXPECTED_BODY_SIZE); //---- Body atom #1 uri = ((Function)body.get(0)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/q"); EXPECTED_BODY_TERM_SIZE = 2; terms = ((Function)body.get(0)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); term = terms.get(1); assertTrue("Mismatch term type!", term instanceof FunctionalTermImpl); assertEquals("Mismatch variable name!", ((FunctionalTermImpl)term).getFunctionSymbol().toString(), "http://example.org/stuff/1.0/r"); //------ Object term I1 List<Term> objVarTerms = ((FunctionalTermImpl)term).getTerms(); assertEquals("Mismatch term size!", objVarTerms.size(), 2); Term objVarTerm = objVarTerms.get(0); assertTrue("Mismatch term type!", objVarTerm instanceof URIConstantImpl); assertEquals("Mismatch variable name!", ((URIConstantImpl)objVarTerm).getURI().toString(), "http://example.org/stuff/1.1/FUB"); objVarTerm = objVarTerms.get(1); assertTrue("Mismatch term type!", objVarTerm instanceof FunctionalTermImpl); assertEquals("Mismatch variable name!", ((FunctionalTermImpl)objVarTerm).getFunctionSymbol().toString(), "http://example.org/stuff/1.0/s"); //------ Object term I2 objVarTerms = ((FunctionalTermImpl)objVarTerm).getTerms(); assertEquals("Mismatch term size!", objVarTerms.size(), 2); objVarTerm = objVarTerms.get(0); assertTrue("Mismatch term type!", objVarTerm instanceof URIConstantImpl); assertEquals("Mismatch variable name!", ((URIConstantImpl)objVarTerm).getURI().toString(), "http://example.org/stuff/1.2/Occupation"); objVarTerm = objVarTerms.get(1); assertTrue("Mismatch term type!", objVarTerm instanceof FunctionalTermImpl); assertEquals("Mismatch variable name!", ((FunctionalTermImpl)objVarTerm).getFunctionSymbol().toString(), "http://example.org/stuff/1.0/t"); //------ Object term I3 objVarTerms = ((FunctionalTermImpl)objVarTerm).getTerms(); assertEquals("Mismatch term size!", objVarTerms.size(), 2); objVarTerm = objVarTerms.get(0); assertTrue("Mismatch term type!", objVarTerm instanceof URIConstantImpl); assertEquals("Mismatch variable name!", ((URIConstantImpl)objVarTerm).getURI().toString(), "http://example.org/stuff/1.3/Degree"); objVarTerm = objVarTerms.get(1); assertTrue("Mismatch term type!", objVarTerm instanceof ValueConstantImpl); assertEquals("Mismatch variable name!", ((ValueConstantImpl)objVarTerm).getValue(), "Master"); } /** * Testing Scenario #6 * * @throws RecognitionException */ //@Test public void testNoHead() throws RecognitionException { datalog = parser.parse(CQ_STRINGS[5]); EXPECTED_RULE_SIZE = 1; List<CQIE> rules = datalog.getRules(); assertTrue("Mismatch rule size!", rules.size() == EXPECTED_RULE_SIZE); // Rule #1 //-- The Head Function head = rules.get(0).getHead(); assertNull("Head is not null!", head); //-- The Body EXPECTED_BODY_SIZE = 1; List<Function> body = rules.get(0).getBody(); assertNotNull("Body is null!", body); assertTrue("Mismatch body size!", body.size() == EXPECTED_BODY_SIZE); //---- Body atom #1 uri = ((Function)body.get(0)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/q"); EXPECTED_BODY_TERM_SIZE = 1; terms = ((Function)body.get(0)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); } /** * Testing Scenario #7 * * @throws RecognitionException */ //@Test public void testNoBody() throws RecognitionException { datalog = parser.parse(CQ_STRINGS[6]); EXPECTED_RULE_SIZE = 1; List<CQIE> rules = datalog.getRules(); assertTrue("Mismatch rule size!", rules.size() == EXPECTED_RULE_SIZE); // Rule #1 //-- The Head Function head = rules.get(0).getHead(); assertNotNull("Head is null!", head); uri = head.getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://www.abc.org/1999/02/22-abc-syntax-ns#p"); EXPECTED_HEAD_TERM_SIZE = 1; terms = head.getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_HEAD_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); //-- The Body EXPECTED_BODY_SIZE = 1; List<Function> body = rules.get(0).getBody(); assertTrue("Body is not empty!", body.size() == 0); } /** * Testing Scenario #8 * * @throws RecognitionException */ //@Test public void testSelectAll() throws RecognitionException { datalog = parser.parse(CQ_STRINGS[7]); EXPECTED_RULE_SIZE = 1; List<CQIE> rules = datalog.getRules(); assertTrue("Mismatch rule size!", rules.size() == EXPECTED_RULE_SIZE); // Rule #1 //-- The Head Function head = rules.get(0).getHead(); assertNotNull("Head is null!", head); uri = head.getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://www.abc.org/1999/02/22-abc-syntax-ns#p"); assertEquals(2, head.getFunctionSymbol().getArity()); EXPECTED_HEAD_TERM_SIZE = 2; terms = head.getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_HEAD_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); term = terms.get(1); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); //-- The Body EXPECTED_BODY_SIZE = 2; List<Function> body = rules.get(0).getBody(); assertNotNull("Body is null!", body); assertTrue("Mismatch body size!", body.size() == EXPECTED_BODY_SIZE); //---- Body atom #1 uri = ((Function)body.get(0)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/q"); EXPECTED_BODY_TERM_SIZE = 1; terms = ((Function)body.get(0)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); //---- Body atom #2 uri = ((Function)body.get(1)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://base.org/stuff/1.0/r"); EXPECTED_BODY_TERM_SIZE = 1; terms = ((Function)body.get(1)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); } /** * Testing Scenario #9 * * @throws RecognitionException */ //@Test public void testBasicInputUsingCaretSymbol() throws RecognitionException { datalog = parser.parse(CQ_STRINGS[8]); EXPECTED_RULE_SIZE = 1; List<CQIE> rules = datalog.getRules(); assertTrue("Mismatch rule size!", rules.size() == EXPECTED_RULE_SIZE); // Rule #1 //-- The Head Function head = rules.get(0).getHead(); assertNotNull("Head is null!", head); uri = head.getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://www.abc.org/1999/02/22-abc-syntax-ns#p"); EXPECTED_HEAD_TERM_SIZE = 2; terms = head.getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_HEAD_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); term = terms.get(1); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); //-- The Body EXPECTED_BODY_SIZE = 2; List<Function> body = rules.get(0).getBody(); assertNotNull("Body is null!", body); assertTrue("Mismatch body size!", body.size() == EXPECTED_BODY_SIZE); //---- Body atom #1 uri = ((Function)body.get(0)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/q"); EXPECTED_BODY_TERM_SIZE = 1; terms = ((Function)body.get(0)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); //---- Body atom #2 uri = ((Function)body.get(1)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://base.org/stuff/1.0/r"); EXPECTED_BODY_TERM_SIZE = 1; terms = ((Function)body.get(1)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); } /** * Testing Scenario #10 * * @throws RecognitionException */ //@Test public void testEmptyHeadTerm() throws RecognitionException { datalog = parser.parse(CQ_STRINGS[9]); EXPECTED_RULE_SIZE = 1; List<CQIE> rules = datalog.getRules(); assertTrue("Mismatch rule size!", rules.size() == EXPECTED_RULE_SIZE); // Rule #1 //-- The Head Function head = rules.get(0).getHead(); assertNotNull("Head is null!", head); uri = head.getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://www.abc.org/1999/02/22-abc-syntax-ns#p"); EXPECTED_HEAD_TERM_SIZE = 0; terms = head.getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_HEAD_TERM_SIZE); //-- The Body EXPECTED_BODY_SIZE = 2; List<Function> body = rules.get(0).getBody(); assertNotNull("Body is null!", body); assertTrue("Mismatch body size!", body.size() == EXPECTED_BODY_SIZE); //---- Body atom #1 uri = ((Function)body.get(0)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/q"); EXPECTED_BODY_TERM_SIZE = 1; terms = ((Function)body.get(0)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); //---- Body atom #2 uri = ((Function)body.get(1)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://base.org/stuff/1.0/r"); EXPECTED_BODY_TERM_SIZE = 1; terms = ((Function)body.get(1)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); } /** * Testing Scenario #11 * * @throws RecognitionException */ //@Test public void testFullNameWithUri() throws RecognitionException { datalog = parser.parse(CQ_STRINGS[10]); EXPECTED_RULE_SIZE = 1; List<CQIE> rules = datalog.getRules(); assertTrue("Mismatch rule size!", rules.size() == EXPECTED_RULE_SIZE); // Rule #1 //-- The Head Function head = rules.get(0).getHead(); assertNotNull("Head is null!", head); uri = head.getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://www.abc.org/1999/02/22-abc-syntax-ns#p"); EXPECTED_HEAD_TERM_SIZE = 2; terms = head.getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_HEAD_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); term = terms.get(1); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "y"); //-- The Body EXPECTED_BODY_SIZE = 2; List<Function> body = rules.get(0).getBody(); assertNotNull("Body is null!", body); assertTrue("Mismatch body size!", body.size() == EXPECTED_BODY_SIZE); //---- Body atom #1 uri = ((Function)body.get(0)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://example.org/stuff/1.0/q"); EXPECTED_BODY_TERM_SIZE = 1; terms = ((Function)body.get(0)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)term).getName(), "x"); //---- Body atom #2 uri = ((Function)body.get(1)).getFunctionSymbol().getName(); assertEquals("Mismatch predicate name!", uri, "http://base.org/stuff/1.0/r"); EXPECTED_BODY_TERM_SIZE = 2; terms = ((Function)body.get(1)).getTerms(); assertEquals("Mismatch term size!", terms.size(), EXPECTED_BODY_TERM_SIZE); term = terms.get(0); assertTrue("Mismatch term type!", term instanceof FunctionalTermImpl); assertEquals("Mismatch variable name!", ((FunctionalTermImpl)term).getFunctionSymbol().toString(), "http://example.org/stuff/1.0/s"); //------ Object term List<Term> objVarTerms = ((FunctionalTermImpl)term).getTerms(); assertEquals("Mismatch term size!", objVarTerms.size(), 2); Term objVarTerm = objVarTerms.get(0); assertTrue("Mismatch term type!", objVarTerm instanceof Variable); assertEquals("Mismatch variable name!", ((Variable)objVarTerm).getName(), "y"); objVarTerm = objVarTerms.get(1); assertTrue("Mismatch term type!", objVarTerm instanceof ValueConstantImpl); assertEquals("Mismatch variable name!", ((ValueConstantImpl)objVarTerm).getValue(), "Student"); //------ Object term ends. term = terms.get(1); assertTrue("Mismatch term type!", term instanceof URIConstantImpl); assertEquals("Mismatch variable name!", ((URIConstantImpl)term).getURI().toString(), "http://example.org/stuff/1.1/FUB"); } }
package com.intellij.tasks.gitlab; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.intellij.openapi.util.Comparing; import com.intellij.tasks.Task; import com.intellij.tasks.TaskRepositoryType; import com.intellij.tasks.gitlab.model.GitlabIssue; import com.intellij.tasks.gitlab.model.GitlabProject; import com.intellij.tasks.impl.gson.TaskGsonUtil; import com.intellij.tasks.impl.httpclient.NewBaseRepositoryImpl; import com.intellij.util.Function; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.xmlb.annotations.Tag; import com.intellij.util.xmlb.annotations.Transient; import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; import org.apache.http.client.ResponseHandler; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.utils.URIBuilder; import org.apache.http.protocol.HttpContext; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.regex.Pattern; import static com.intellij.tasks.impl.httpclient.TaskResponseUtil.GsonMultipleObjectsDeserializer; import static com.intellij.tasks.impl.httpclient.TaskResponseUtil.GsonSingleObjectDeserializer; /** * @author Mikhail Golubev */ @Tag("Gitlab") public class GitlabRepository extends NewBaseRepositoryImpl { @NonNls public static final String REST_API_PATH_PREFIX = "/api/v3/"; @NonNls private static final String TOKEN_HEADER = "PRIVATE-TOKEN"; private static final Pattern ID_PATTERN = Pattern.compile("\\d+"); private static final Gson GSON = TaskGsonUtil.createDefaultBuilder().create(); // @formatter:off private static final TypeToken<List<GitlabProject>> LIST_OF_PROJECTS_TYPE = new TypeToken<List<GitlabProject>>() {}; private static final TypeToken<List<GitlabIssue>> LIST_OF_ISSUES_TYPE = new TypeToken<List<GitlabIssue>>() {}; // @formatter:on static final GitlabProject UNSPECIFIED_PROJECT = new GitlabProject() { @Override public String getName() { return "-- all issues created by you --"; } @Override public int getId() { return -1; } }; private GitlabProject myCurrentProject; private List<GitlabProject> myProjects = null; /** * Serialization constructor */ @SuppressWarnings("UnusedDeclaration") public GitlabRepository() { } /** * Normal instantiation constructor */ public GitlabRepository(TaskRepositoryType type) { super(type); } /** * Cloning constructor */ public GitlabRepository(GitlabRepository other) { super(other); myCurrentProject = other.myCurrentProject; } @Override public boolean equals(Object o) { if (!super.equals(o)) return false; final GitlabRepository repository = (GitlabRepository)o; if (!Comparing.equal(myCurrentProject, repository.myCurrentProject)) return false; return true; } @NotNull @Override public GitlabRepository clone() { return new GitlabRepository(this); } @Override public Task[] getIssues(@Nullable String query, int offset, int limit, boolean withClosed) throws Exception { final List<GitlabIssue> issues = fetchIssues((offset / limit) + 1, limit, !withClosed); return ContainerUtil.map2Array(issues, GitlabTask.class, issue -> new GitlabTask(this, issue)); } @Nullable @Override public Task findTask(@NotNull String id) throws Exception { // doesn't work now, because Gitlab's REST API doesn't provide endpoint to find task // using only its global ID, it requires both task's global ID AND task's project ID return null; } @Nullable @Override public CancellableConnection createCancellableConnection() { return new HttpTestConnection(new HttpGet(getIssuesUrl())); } /** * Always forcibly attempts do fetch new projects from server. */ @NotNull public List<GitlabProject> fetchProjects() throws Exception { final ResponseHandler<List<GitlabProject>> handler = new GsonMultipleObjectsDeserializer<>(GSON, LIST_OF_PROJECTS_TYPE); final String projectUrl = getRestApiUrl("projects"); final List<GitlabProject> result = new ArrayList<>(); int pageNum = 1; while (true) { final URI paginatedProjectsUrl = new URIBuilder(projectUrl) .addParameter("page", String.valueOf(pageNum)) .addParameter("per_page", "30") .build(); final List<GitlabProject> page = getHttpClient().execute(new HttpGet(paginatedProjectsUrl), handler); // Gitlab's REST API doesn't allow to know beforehand how many projects are available if (page.isEmpty()) { break; } result.addAll(page); pageNum++; } myProjects = result; return Collections.unmodifiableList(myProjects); } @SuppressWarnings("UnusedDeclaration") @NotNull public GitlabProject fetchProject(int id) throws Exception { final HttpGet request = new HttpGet(getRestApiUrl("project", id)); return getHttpClient().execute(request, new GsonSingleObjectDeserializer<>(GSON, GitlabProject.class)); } @NotNull public List<GitlabIssue> fetchIssues(int pageNumber, int pageSize, boolean openedOnly) throws Exception { ensureProjectsDiscovered(); final URIBuilder uriBuilder = new URIBuilder(getIssuesUrl()) .addParameter("page", String.valueOf(pageNumber)) .addParameter("per_page", String.valueOf(pageSize)) // Ordering was added in v7.8 .addParameter("order_by", "updated_at"); if (openedOnly) { // Filtering by state was added in v7.3 uriBuilder.addParameter("state", "opened"); } final ResponseHandler<List<GitlabIssue>> handler = new GsonMultipleObjectsDeserializer<>(GSON, LIST_OF_ISSUES_TYPE); return getHttpClient().execute(new HttpGet(uriBuilder.build()), handler); } private String getIssuesUrl() { if (myCurrentProject != null && myCurrentProject != UNSPECIFIED_PROJECT) { return getRestApiUrl("projects", myCurrentProject.getId(), "issues"); } return getRestApiUrl("issues"); } /** * @param issueId global issue's ID (<tt>id</tt> field, not <tt>iid</tt>) */ @Nullable public GitlabIssue fetchIssue(int projectId, int issueId) throws Exception { ensureProjectsDiscovered(); final HttpGet request = new HttpGet(getRestApiUrl("projects", projectId, "issues", issueId)); final ResponseHandler<GitlabIssue> handler = new GsonSingleObjectDeserializer<>(GSON, GitlabIssue.class, true); return getHttpClient().execute(request, handler); } @Override public String getPresentableName() { String name = getUrl(); if (myCurrentProject != null && myCurrentProject != UNSPECIFIED_PROJECT) { name += "/" + myCurrentProject.getName(); } return name; } @Nullable @Override public String extractId(@NotNull String taskName) { return ID_PATTERN.matcher(taskName).matches() ? taskName : null; } @Override public boolean isConfigured() { return super.isConfigured() && !myPassword.isEmpty(); } @NotNull @Override public String getRestApiPathPrefix() { return REST_API_PATH_PREFIX; } @Nullable @Override protected HttpRequestInterceptor createRequestInterceptor() { return new HttpRequestInterceptor() { @Override public void process(HttpRequest request, HttpContext context) throws HttpException, IOException { request.addHeader(TOKEN_HEADER, myPassword); //request.addHeader("Accept", "application/json"); } }; } public void setCurrentProject(@Nullable GitlabProject project) { myCurrentProject = project != null && project.getId() == -1 ? UNSPECIFIED_PROJECT : project; } public GitlabProject getCurrentProject() { return myCurrentProject; } /** * May return cached projects or make request to receive new ones. */ @NotNull public List<GitlabProject> getProjects() { try { ensureProjectsDiscovered(); } catch (Exception ignored) { return Collections.emptyList(); } return Collections.unmodifiableList(myProjects); } private void ensureProjectsDiscovered() throws Exception { if (myProjects == null) { fetchProjects(); } } @TestOnly @Transient public void setProjects(@NotNull List<GitlabProject> projects) { myProjects = projects; } }
package org.batfish.datamodel; import static org.batfish.datamodel.Names.generatedTenantVniInterfaceName; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet.Builder; import java.io.IOException; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; import java.util.function.Supplier; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.ParametersAreNonnullByDefault; import org.batfish.common.BatfishException; import org.batfish.datamodel.PrefixTrieMultiMap.FoldOperator; import org.batfish.datamodel.route.nh.NextHopDiscard; import org.batfish.datamodel.route.nh.NextHopInterface; import org.batfish.datamodel.route.nh.NextHopIp; import org.batfish.datamodel.route.nh.NextHopVisitor; import org.batfish.datamodel.route.nh.NextHopVrf; import org.batfish.datamodel.route.nh.NextHopVtep; @ParametersAreNonnullByDefault public final class FibImpl implements Fib { /** Helps perform recursive route resolution and maintain the route chain */ private static final class ResolutionTreeNode { private final @Nonnull AbstractRoute _route; private final @Nullable Ip _finalNextHopIp; private final @Nonnull List<ResolutionTreeNode> _children; /** Use static factories for sanity */ private ResolutionTreeNode( AbstractRoute route, @Nullable Ip finalNextHopIp, List<ResolutionTreeNode> children) { _route = route; _finalNextHopIp = finalNextHopIp; _children = children; } static ResolutionTreeNode withParent( AbstractRoute route, @Nullable ResolutionTreeNode parent, @Nullable Ip finalNextHopIp) { ResolutionTreeNode child = new ResolutionTreeNode(route, finalNextHopIp, new LinkedList<>()); if (parent != null) { parent.addChild(child); } return child; } static ResolutionTreeNode root(@Nonnull AbstractRoute route) { return new ResolutionTreeNode(route, null, new LinkedList<>()); } @Nonnull public AbstractRoute getRoute() { return _route; } @Nullable public Ip getFinalNextHopIp() { return _finalNextHopIp; } @Nonnull public List<ResolutionTreeNode> getChildren() { return _children; } private void addChild(ResolutionTreeNode child) { _children.add(child); } } private static final int MAX_DEPTH = 10; /** This trie is the source of truth for all resolved FIB routes */ @Nonnull private final PrefixTrieMultiMap<FibEntry> _root; private transient Supplier<Set<FibEntry>> _entries; public <R extends AbstractRouteDecorator> FibImpl( @Nonnull GenericRib<R> rib, ResolutionRestriction<R> restriction) { _root = new PrefixTrieMultiMap<>(Prefix.ZERO); rib.getTypedRoutes() .forEach( r -> { Set<FibEntry> s = resolveRoute(rib, r.getAbstractRoute(), restriction); _root.putAll(r.getNetwork(), s); }); initSuppliers(); } private void initSuppliers() { _entries = Suppliers.memoize(this::computeEntries); } private Set<FibEntry> computeEntries() { return _root.getAllElements(); } @Nonnull @Override public Set<FibEntry> allEntries() { return _entries.get(); } /** * Attempt to resolve a RIB route down to an interface route. * * @param rib {@link GenericRib} for which to do the resolution. * @param route {@link AbstractRoute} with a next hop IP to be resolved. * @param restriction A restriction on which routes may be used to recursively resolve next-hop * IPs * @return A map (interface name -&gt; last hop IP -&gt; last taken route) for * @throws BatfishException if resolution depth is exceeded (high likelihood of a routing loop) OR * an invalid route in the RIB has been encountered. */ @VisibleForTesting <R extends AbstractRouteDecorator> Set<FibEntry> resolveRoute( GenericRib<R> rib, AbstractRoute route, ResolutionRestriction<R> restriction) { ResolutionTreeNode resolutionRoot = ResolutionTreeNode.root(route); buildResolutionTree( rib, route, Route.UNSET_ROUTE_NEXT_HOP_IP, new HashSet<>(), 0, Prefix.MAX_PREFIX_LENGTH, null, resolutionRoot, restriction); Builder<FibEntry> collector = ImmutableSet.builder(); collectEntries(resolutionRoot, new Stack<>(), collector); return collector.build(); } private void collectEntries( ResolutionTreeNode node, Stack<AbstractRoute> stack, ImmutableCollection.Builder<FibEntry> entriesBuilder) { AbstractRoute route = node.getRoute(); if (node.getChildren().isEmpty() && node.getFinalNextHopIp() != null) { FibAction fibAction = new NextHopVisitor<FibAction>() { @Override public FibAction visitNextHopIp(NextHopIp nextHopIp) { throw new IllegalStateException( String.format("FIB resolution failed to reach an interface route for %s", route)); } @Override public FibAction visitNextHopInterface(NextHopInterface nextHopInterface) { return new FibForward(node.getFinalNextHopIp(), nextHopInterface.getInterfaceName()); } @Override public FibAction visitNextHopDiscard(NextHopDiscard nextHopDiscard) { return FibNullRoute.INSTANCE; } @Override public FibAction visitNextHopVrf(NextHopVrf nextHopVrf) { return new FibNextVrf(nextHopVrf.getVrfName()); } @Override public FibAction visitNextHopVtep(NextHopVtep nextHopVtep) { // Forward out the VXLAN "interface", which will send to the correct remote node by // "ARPing" for the VTEP IP. String forwardingIface = generatedTenantVniInterfaceName(nextHopVtep.getVni()); return new FibForward(nextHopVtep.getVtepIp(), forwardingIface); } }.visit(route.getNextHop()); entriesBuilder.add(new FibEntry(fibAction, stack)); return; } stack.push(route); for (ResolutionTreeNode child : node.getChildren()) { collectEntries(child, stack, entriesBuilder); } stack.pop(); } /** * Tail-recursive method to build a route resolution tree. Each top-level route is mapped to a * number of leaf {@link ResolutionTreeNode}. Leaf nodes must contain non-null {@link * ResolutionTreeNode#_finalNextHopIp} */ private <R extends AbstractRouteDecorator> void buildResolutionTree( GenericRib<R> rib, AbstractRoute route, Ip mostRecentNextHopIp, Set<Prefix> seenNetworks, int depth, int maxPrefixLength, @Nullable AbstractRoute parentRoute, ResolutionTreeNode treeNode, ResolutionRestriction<R> restriction) { Prefix network = route.getNetwork(); if (seenNetworks.contains(network)) { // Don't enter a resolution loop return; } Set<Prefix> newSeenNetworks = new HashSet<>(seenNetworks); newSeenNetworks.add(network); if (depth > MAX_DEPTH) { // TODO: Declare this a loop using some warning mechanism // https://github.com/batfish/batfish/issues/1469 return; } // For non-forwarding routes, try to find a less specific route if (route.getNonForwarding()) { if (parentRoute == null) { return; } else { seenNetworks.remove(parentRoute.getNetwork()); buildResolutionTree( rib, parentRoute, mostRecentNextHopIp, seenNetworks, depth + 1, maxPrefixLength - 1, null, treeNode, restriction); return; } } new NextHopVisitor<Void>() { @Override public Void visitNextHopIp(NextHopIp nextHopIp) { Set<AbstractRoute> forwardingRoutes = rib .longestPrefixMatch( nextHopIp.getIp(), maxPrefixLength, r -> { if (route.getProtocol() == RoutingProtocol.STATIC) { // TODO: factor out common code with // StaticRouteHelper.shouldActivateNextHopIpRoute if (r.getAbstractRoute().getProtocol() == RoutingProtocol.CONNECTED) { // All static routes can be activated by a connected route. return true; } if (!((StaticRoute) route).getRecursive()) { // Non-recursive static routes cannot be activated by non-connected // routes. return false; } } // Recursive routes must pass restriction if present. return restriction.test(r); }) .stream() .map(AbstractRouteDecorator::getAbstractRoute) .collect(ImmutableSet.toImmutableSet()); if (forwardingRoutes.isEmpty()) { // Re-resolve *this route* with a less specific prefix match seenNetworks.remove(route.getNetwork()); buildResolutionTree( rib, route, mostRecentNextHopIp, seenNetworks, depth + 1, maxPrefixLength - 1, parentRoute, treeNode, restriction); } else { // We have at least one valid longest-prefix match for (AbstractRoute nextHopLongestPrefixMatchRoute : forwardingRoutes) { buildResolutionTree( rib, nextHopLongestPrefixMatchRoute, nextHopIp.getIp(), newSeenNetworks, depth + 1, Prefix.MAX_PREFIX_LENGTH, route, ResolutionTreeNode.withParent(nextHopLongestPrefixMatchRoute, treeNode, null), restriction); } } return null; } @Override public Void visitNextHopInterface(NextHopInterface nextHopInterface) { Ip finalNextHopIp = nextHopInterface.getIp() == null ? mostRecentNextHopIp : route.getNextHopIp(); ResolutionTreeNode.withParent(route, treeNode, finalNextHopIp); return null; } @Override public Void visitNextHopDiscard(NextHopDiscard nextHopDiscard) { ResolutionTreeNode.withParent(route, treeNode, Route.UNSET_ROUTE_NEXT_HOP_IP); return null; } @Override public Void visitNextHopVrf(NextHopVrf nextHopVrf) { ResolutionTreeNode.withParent(route, treeNode, Route.UNSET_ROUTE_NEXT_HOP_IP); return null; } @Override public Void visitNextHopVtep(NextHopVtep nextHopVtep) { ResolutionTreeNode.withParent(route, treeNode, Route.UNSET_ROUTE_NEXT_HOP_IP); return null; } }.visit(route.getNextHop()); } @Nonnull @Override public Set<FibEntry> get(Ip ip) { return _root.longestPrefixMatch(ip); } @Nonnull @Override public Map<Prefix, IpSpace> getMatchingIps() { ImmutableMap.Builder<Prefix, IpSpace> builder = ImmutableMap.builder(); /* Do a fold over the trie. At each node, create the matching Ips for that prefix (adding it * to the builder) and return an IpSpace of IPs matched by any prefix in that subtrie. To create * the matching Ips of the prefix, whitelist the prefix and blacklist the IPs matched by * subtrie prefixes (i.e. longer prefixes). */ _root.fold( new FoldOperator<FibEntry, Set<IpWildcard>>() { @Nonnull @Override public Set<IpWildcard> fold( Prefix prefix, Set<FibEntry> elems, @Nullable Set<IpWildcard> leftPrefixes, @Nullable Set<IpWildcard> rightPrefixes) { Set<IpWildcard> subTriePrefixes; boolean leftEmpty = leftPrefixes == null || leftPrefixes.isEmpty(); boolean rightEmpty = rightPrefixes == null || rightPrefixes.isEmpty(); if (leftEmpty && rightEmpty) { subTriePrefixes = ImmutableSet.of(); } else if (leftEmpty) { subTriePrefixes = rightPrefixes; } else if (rightEmpty) { subTriePrefixes = leftPrefixes; } else { subTriePrefixes = ImmutableSet.<IpWildcard>builder() .addAll(leftPrefixes) .addAll(rightPrefixes) .build(); } if (elems.isEmpty()) { return subTriePrefixes; } IpWildcard wc = IpWildcard.create(prefix); if (subTriePrefixes.isEmpty()) { builder.put(prefix, prefix.toIpSpace()); } else { // Ips matching prefix are those in prefix and not in any subtrie prefixes. builder.put( prefix, IpWildcardSetIpSpace.create(subTriePrefixes, ImmutableSet.of(wc))); } return ImmutableSet.of(wc); } }); return builder.build(); } private void readObject(java.io.ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); initSuppliers(); } }
package com.leansoft.luxun.integration; import java.io.File; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import org.apache.thrift.TException; import org.junit.Test; import com.leansoft.luxun.api.generated.ConsumeRequest; import com.leansoft.luxun.api.generated.ConsumeResponse; import com.leansoft.luxun.api.generated.ErrorCode; import com.leansoft.luxun.api.generated.ProduceRequest; import com.leansoft.luxun.api.generated.ResultCode; import com.leansoft.luxun.message.Message; import com.leansoft.luxun.message.MessageList; import com.leansoft.luxun.message.generated.CompressionCodec; import com.leansoft.luxun.server.LuxunServer; import com.leansoft.luxun.server.ServerConfig; import com.leansoft.luxun.utils.TestUtils; import com.leansoft.luxun.utils.Utils; public class LazyInitProducerTest extends ProducerConsumerTestHarness { Properties props; ServerConfig config; List<ServerConfig> configs; List<LuxunServer> servers; @Override public void setUp() throws Exception { port = TestUtils.choosePort(); props = TestUtils.createBrokerConfig(0, port); config = new ServerConfig(props); configs = new ArrayList<ServerConfig>(); configs.add(config); servers = new ArrayList<LuxunServer>(); servers.add(TestUtils.createServer(config)); super.setUp(); } @Override public void tearDown() throws Exception { super.tearDown(); for(LuxunServer server : servers) { server.close(); } for(ServerConfig config : configs) { Utils.deleteDirectory(new File(config.getLogDir())); } } @Test public void testProduceAndFetchByIndex() throws TException { // send some messages String topic = "test"; MessageList messageList = new MessageList(); messageList.add(new Message("hello".getBytes())); messageList.add(new Message("there".getBytes())); producer.send(topic, messageList); List<MessageList> listOfMessageList = consumer.consume(topic, 0, 10000); assertTrue(listOfMessageList.size() == 1); assertTrue(isMessageListEqual(messageList, listOfMessageList.get(0))); try { consumer.consume(topic, -1, 10000); fail("excepted IndexOutOfBoundsException was not thrown"); } catch(IndexOutOfBoundsException e) { // expected } } @Test public void testProduceAndFetchByFanoutId() throws TException { // send some messages String topic = "test"; MessageList messageList = new MessageList(); messageList.add(new Message("hello".getBytes())); messageList.add(new Message("there".getBytes())); producer.send(topic, messageList); List<MessageList> listOfMessageList = consumer.consume(topic, "fan1", 10000); assertTrue(listOfMessageList.size() == 1); assertTrue(isMessageListEqual(messageList, listOfMessageList.get(0))); // fan1 is empty now listOfMessageList = consumer.consume(topic, "fan1", 10000); assertTrue(listOfMessageList.isEmpty()); } @Test public void testProduceAndFetchOneByOneByIndex() throws TException { // send some messages String topic = "test"; for(int i = 0; i < 10; i++) { MessageList messageList = new MessageList(); messageList.add(new Message(("hello" + i).getBytes())); producer.send(topic, messageList); } // consume one by one for(int i = 0; i < 5; i++) { List<MessageList> listOfMessageList = consumer.consume(topic, i, -1); assertTrue(listOfMessageList.size() == 1); MessageList msgList = listOfMessageList.get(0); assertTrue(msgList.size() == 1); Message msg = msgList.get(0); assertEquals("hello" + i, new String(msg.getBytes())); } // consume remaining in a batch List<MessageList> listOfMessageList = consumer.consume(topic, 5, 10000); assertTrue(listOfMessageList.size() == 5); for(int i = 5; i < 10; i++) { MessageList msgList = listOfMessageList.get(i - 5); assertTrue(msgList.size() == 1); Message msg = msgList.get(0); assertEquals("hello" + i, new String(msg.getBytes())); } } @Test public void testProduceAndFetchOneByOneByFanoutId() throws TException { // send some messages String topic = "test"; for(int i = 0; i < 10; i++) { MessageList messageList = new MessageList(); messageList.add(new Message(("hello" + i).getBytes())); producer.send(topic, messageList); } // consume one by one for(int i = 0; i < 5; i++) { List<MessageList> listOfMessageList = consumer.consume(topic, "fan", 0); assertTrue(listOfMessageList.size() == 1); MessageList msgList = listOfMessageList.get(0); assertTrue(msgList.size() == 1); Message msg = msgList.get(0); assertEquals("hello" + i, new String(msg.getBytes())); } // consume remaining in a batch List<MessageList> listOfMessageList = consumer.consume(topic, "fan", 10000); assertTrue(listOfMessageList.size() == 5); for(int i = 5; i < 10; i++) { MessageList msgList = listOfMessageList.get(i - 5); assertTrue(msgList.size() == 1); Message msg = msgList.get(0); assertEquals("hello" + i, new String(msg.getBytes())); } } public void testProduceAndMultiFetchByIndex() throws TException { Map<String, MessageList> messages = new HashMap<String, MessageList>(); List<String> topics = new ArrayList<String>(); topics.add("test1"); topics.add("test2"); topics.add("test3"); List<ConsumeRequest> fetches = new ArrayList<ConsumeRequest>(); for(String topic : topics) { MessageList messageList = new MessageList(); messageList.add(new Message(("a_" + topic).getBytes())); messageList.add(new Message(("b_" + topic).getBytes())); messages.put(topic, messageList); producer.send(topic, messageList); ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(topic); consumeRequest.setStartIndex(0); consumeRequest.setMaxFetchSize(10000); fetches.add(consumeRequest); } List<ConsumeResponse> responses = consumer.multiConsume(fetches); int index = 0; for(String topic : topics) { ConsumeResponse response = responses.get(index); List<ByteBuffer> itemList = response.getItemList(); List<MessageList> listOfMessageList = new ArrayList<MessageList>(); for(ByteBuffer buffer : itemList) { listOfMessageList.add(MessageList.fromThriftBuffer(buffer)); } assertTrue(listOfMessageList.size() == 1); assertTrue(isMessageListEqual(messages.get(topic), listOfMessageList.get(0))); index++; } fetches.clear(); // send some invalid indexes for(String topic : topics) { ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(topic); consumeRequest.setStartIndex(-1); consumeRequest.setMaxFetchSize(10000); fetches.add(consumeRequest); } responses = consumer.multiConsume(fetches); for(ConsumeResponse response : responses) { assertTrue(response.getResult().getResultCode() == ResultCode.FAILURE); assertTrue(response.getResult().getErrorCode() == ErrorCode.INDEX_OUT_OF_BOUNDS); } } public void testProduceAndMultiFetchByFanoutId() throws TException { Map<String, MessageList> messages = new HashMap<String, MessageList>(); List<String> topics = new ArrayList<String>(); topics.add("test1"); topics.add("test2"); topics.add("test3"); List<ConsumeRequest> fetches = new ArrayList<ConsumeRequest>(); for(String topic : topics) { MessageList messageList = new MessageList(); messageList.add(new Message(("a_" + topic).getBytes())); messageList.add(new Message(("b_" + topic).getBytes())); messages.put(topic, messageList); producer.send(topic, messageList); ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(topic); consumeRequest.setFanoutId("fan1"); consumeRequest.setMaxFetchSize(10000); fetches.add(consumeRequest); } List<ConsumeResponse> responses = consumer.multiConsume(fetches); int index = 0; for(String topic : topics) { ConsumeResponse response = responses.get(index); List<ByteBuffer> itemList = response.getItemList(); List<MessageList> listOfMessageList = new ArrayList<MessageList>(); for(ByteBuffer buffer : itemList) { listOfMessageList.add(MessageList.fromThriftBuffer(buffer)); } assertTrue(listOfMessageList.size() == 1); assertTrue(isMessageListEqual(messages.get(topic), listOfMessageList.get(0))); index++; } } public void testProduceAndMultiFetchByIndexWithCompression() throws TException { Map<String, MessageList> messages = new HashMap<String, MessageList>(); List<String> topics = new ArrayList<String>(); topics.add("test1"); topics.add("test2"); topics.add("test3"); List<ConsumeRequest> fetches = new ArrayList<ConsumeRequest>(); for(String topic : topics) { MessageList messageList = new MessageList(CompressionCodec.GZIP); messageList.add(new Message(("a_" + topic).getBytes())); messageList.add(new Message(("b_" + topic).getBytes())); messages.put(topic, messageList); producer.send(topic, messageList); ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(topic); consumeRequest.setStartIndex(0); consumeRequest.setMaxFetchSize(10000); fetches.add(consumeRequest); } List<ConsumeResponse> responses = consumer.multiConsume(fetches); int index = 0; for(String topic : topics) { ConsumeResponse response = responses.get(index); List<ByteBuffer> itemList = response.getItemList(); List<MessageList> listOfMessageList = new ArrayList<MessageList>(); for(ByteBuffer buffer : itemList) { listOfMessageList.add(MessageList.fromThriftBuffer(buffer)); } assertTrue(listOfMessageList.size() == 1); assertTrue(isMessageListEqual(messages.get(topic), listOfMessageList.get(0))); index++; } } public void testProduceAndMultiFetchByFanoutIdWithCompression() throws TException { Map<String, MessageList> messages = new HashMap<String, MessageList>(); List<String> topics = new ArrayList<String>(); topics.add("test1"); topics.add("test2"); topics.add("test3"); List<ConsumeRequest> fetches = new ArrayList<ConsumeRequest>(); for(String topic : topics) { MessageList messageList = new MessageList(CompressionCodec.GZIP); messageList.add(new Message(("a_" + topic).getBytes())); messageList.add(new Message(("b_" + topic).getBytes())); messages.put(topic, messageList); producer.send(topic, messageList); ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(topic); consumeRequest.setFanoutId("fan0"); consumeRequest.setMaxFetchSize(10000); fetches.add(consumeRequest); } List<ConsumeResponse> responses = consumer.multiConsume(fetches); int index = 0; for(String topic : topics) { ConsumeResponse response = responses.get(index); List<ByteBuffer> itemList = response.getItemList(); List<MessageList> listOfMessageList = new ArrayList<MessageList>(); for(ByteBuffer buffer : itemList) { listOfMessageList.add(MessageList.fromThriftBuffer(buffer)); } assertTrue(listOfMessageList.size() == 1); assertTrue(isMessageListEqual(messages.get(topic), listOfMessageList.get(0))); index++; } } public void testMultiProduceThenFetchByIndex() throws TException { // send some messages Map<String, MessageList> messages = new HashMap<String, MessageList>(); List<String> topics = new ArrayList<String>(); topics.add("test1"); topics.add("test2"); topics.add("test3"); List<ConsumeRequest> fetches = new ArrayList<ConsumeRequest>(); List<ProduceRequest> produceList = new ArrayList<ProduceRequest>(); for(String topic : topics) { MessageList messageList = new MessageList(); messageList.add(new Message(("a_" + topic).getBytes())); messageList.add(new Message(("b_" + topic).getBytes())); messages.put(topic, messageList); produceList.add(new ProduceRequest(messageList.toThriftBuffer(), topic)); ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(topic); consumeRequest.setStartIndex(0); consumeRequest.setMaxFetchSize(10000); fetches.add(consumeRequest); } producer.multiSend(produceList); List<ConsumeResponse> responses = consumer.multiConsume(fetches); int index = 0; for(String topic : topics) { ConsumeResponse response = responses.get(index); List<ByteBuffer> itemList = response.getItemList(); List<MessageList> listOfMessageList = new ArrayList<MessageList>(); for(ByteBuffer buffer : itemList) { listOfMessageList.add(MessageList.fromThriftBuffer(buffer)); } assertTrue(listOfMessageList.size() == 1); assertTrue(isMessageListEqual(messages.get(topic), listOfMessageList.get(0))); index++; } } public void testMultiProduceThenFetchByFanoutId() throws TException { // send some messages Map<String, MessageList> messages = new HashMap<String, MessageList>(); List<String> topics = new ArrayList<String>(); topics.add("test1"); topics.add("test2"); topics.add("test3"); List<ConsumeRequest> fetches = new ArrayList<ConsumeRequest>(); List<ProduceRequest> produceList = new ArrayList<ProduceRequest>(); int i = 0; for(String topic : topics) { MessageList messageList = new MessageList(); messageList.add(new Message(("a_" + topic).getBytes())); messageList.add(new Message(("b_" + topic).getBytes())); messages.put(topic, messageList); produceList.add(new ProduceRequest(messageList.toThriftBuffer(), topic)); ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(topic); consumeRequest.setFanoutId("fan" + i); consumeRequest.setMaxFetchSize(10000); fetches.add(consumeRequest); i++; } producer.multiSend(produceList); List<ConsumeResponse> responses = consumer.multiConsume(fetches); int index = 0; for(String topic : topics) { ConsumeResponse response = responses.get(index); List<ByteBuffer> itemList = response.getItemList(); List<MessageList> listOfMessageList = new ArrayList<MessageList>(); for(ByteBuffer buffer : itemList) { listOfMessageList.add(MessageList.fromThriftBuffer(buffer)); } assertTrue(listOfMessageList.size() == 1); assertTrue(isMessageListEqual(messages.get(topic), listOfMessageList.get(0))); index++; } } public void testMultiProduceWithCompressionThenFetchByIndex() throws TException { // send some messages Map<String, MessageList> messages = new HashMap<String, MessageList>(); List<String> topics = new ArrayList<String>(); topics.add("test1"); topics.add("test2"); topics.add("test3"); List<ConsumeRequest> fetches = new ArrayList<ConsumeRequest>(); List<ProduceRequest> produceList = new ArrayList<ProduceRequest>(); for(String topic : topics) { MessageList messageList = new MessageList(CompressionCodec.GZIP); messageList.add(new Message(("a_" + topic).getBytes())); messageList.add(new Message(("b_" + topic).getBytes())); messages.put(topic, messageList); produceList.add(new ProduceRequest(messageList.toThriftBuffer(), topic)); ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(topic); consumeRequest.setStartIndex(0); consumeRequest.setMaxFetchSize(10000); fetches.add(consumeRequest); } producer.multiSend(produceList); List<ConsumeResponse> responses = consumer.multiConsume(fetches); int index = 0; for(String topic : topics) { ConsumeResponse response = responses.get(index); List<ByteBuffer> itemList = response.getItemList(); List<MessageList> listOfMessageList = new ArrayList<MessageList>(); for(ByteBuffer buffer : itemList) { listOfMessageList.add(MessageList.fromThriftBuffer(buffer)); } assertTrue(listOfMessageList.size() == 1); assertTrue(isMessageListEqual(messages.get(topic), listOfMessageList.get(0))); index++; } } public void testMultiProduceWithCompressionThenFetchByFanoutId() throws TException { // send some messages Map<String, MessageList> messages = new HashMap<String, MessageList>(); List<String> topics = new ArrayList<String>(); topics.add("test1"); topics.add("test2"); topics.add("test3"); List<ConsumeRequest> fetches = new ArrayList<ConsumeRequest>(); List<ProduceRequest> produceList = new ArrayList<ProduceRequest>(); int i = 0; for(String topic : topics) { MessageList messageList = new MessageList(CompressionCodec.GZIP); messageList.add(new Message(("a_" + topic).getBytes())); messageList.add(new Message(("b_" + topic).getBytes())); messages.put(topic, messageList); produceList.add(new ProduceRequest(messageList.toThriftBuffer(), topic)); ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(topic); consumeRequest.setFanoutId("fan" + i); consumeRequest.setMaxFetchSize(10000); fetches.add(consumeRequest); i++; } producer.multiSend(produceList); List<ConsumeResponse> responses = consumer.multiConsume(fetches); int index = 0; for(String topic : topics) { ConsumeResponse response = responses.get(index); List<ByteBuffer> itemList = response.getItemList(); List<MessageList> listOfMessageList = new ArrayList<MessageList>(); for(ByteBuffer buffer : itemList) { listOfMessageList.add(MessageList.fromThriftBuffer(buffer)); } assertTrue(listOfMessageList.size() == 1); assertTrue(isMessageListEqual(messages.get(topic), listOfMessageList.get(0))); index++; } } public void testMultiProduceResendThenFetchByIndex() throws TException { // send some messages Map<String, MessageList> messages = new HashMap<String, MessageList>(); List<String> topics = new ArrayList<String>(); topics.add("test1"); topics.add("test2"); topics.add("test3"); List<ConsumeRequest> fetches = new ArrayList<ConsumeRequest>(); List<ProduceRequest> produceList = new ArrayList<ProduceRequest>(); for(String topic : topics) { MessageList messageList = new MessageList(); messageList.add(new Message(("a_" + topic).getBytes())); messageList.add(new Message(("b_" + topic).getBytes())); messages.put(topic, messageList); produceList.add(new ProduceRequest(messageList.toThriftBuffer(), topic)); ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(topic); consumeRequest.setStartIndex(0); consumeRequest.setMaxFetchSize(10000); fetches.add(consumeRequest); } producer.multiSend(produceList); // resend the same ultisend producer.multiSend(produceList); TestUtils.sleepQuietly(1000); // give time to broker to save the messages List<ConsumeResponse> responses = consumer.multiConsume(fetches); int index = 0; for(String topic : topics) { ConsumeResponse response = responses.get(index); List<ByteBuffer> itemList = response.getItemList(); List<MessageList> listOfMessageList = new ArrayList<MessageList>(); for(ByteBuffer buffer : itemList) { listOfMessageList.add(MessageList.fromThriftBuffer(buffer)); } assertTrue(listOfMessageList.size() == 2); assertTrue(isMessageListEqual(messages.get(topic), listOfMessageList.get(0))); assertTrue(isMessageListEqual(messages.get(topic), listOfMessageList.get(1))); index++; } } public void testMultiProduceResendThenFetchByFanoutId() throws TException { // send some messages Map<String, MessageList> messages = new HashMap<String, MessageList>(); List<String> topics = new ArrayList<String>(); topics.add("test1"); topics.add("test2"); topics.add("test3"); List<ConsumeRequest> fetches = new ArrayList<ConsumeRequest>(); List<ProduceRequest> produceList = new ArrayList<ProduceRequest>(); int i = 0; for(String topic : topics) { MessageList messageList = new MessageList(); messageList.add(new Message(("a_" + topic).getBytes())); messageList.add(new Message(("b_" + topic).getBytes())); messages.put(topic, messageList); produceList.add(new ProduceRequest(messageList.toThriftBuffer(), topic)); ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(topic); consumeRequest.setFanoutId("fan" + i); consumeRequest.setMaxFetchSize(10000); fetches.add(consumeRequest); i++; } producer.multiSend(produceList); // resend the same ultisend producer.multiSend(produceList); TestUtils.sleepQuietly(1000); // give time to broker to save the messages List<ConsumeResponse> responses = consumer.multiConsume(fetches); int index = 0; for(String topic : topics) { ConsumeResponse response = responses.get(index); List<ByteBuffer> itemList = response.getItemList(); List<MessageList> listOfMessageList = new ArrayList<MessageList>(); for(ByteBuffer buffer : itemList) { listOfMessageList.add(MessageList.fromThriftBuffer(buffer)); } assertTrue(listOfMessageList.size() == 2); assertTrue(isMessageListEqual(messages.get(topic), listOfMessageList.get(0))); assertTrue(isMessageListEqual(messages.get(topic), listOfMessageList.get(1))); index++; } } public void testConsumeNotExistTopicByIndex() throws TException { String newTopic = "new-topic"; ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(newTopic); consumeRequest.setStartIndex(0); consumeRequest.setMaxFetchSize(10000); ConsumeResponse consumeResponse = consumer.consume(consumeRequest); assertTrue(consumeResponse.getResult().getResultCode() == ResultCode.FAILURE); assertTrue(consumeResponse.getResult().getErrorCode() == ErrorCode.TOPIC_NOT_EXIST); File logFile = new File(config.getLogDir(), newTopic); assertTrue(!logFile.exists()); } public void testConsumeNotExistTopicByFanoutId() throws TException { String newTopic = "new-topic"; ConsumeRequest consumeRequest = new ConsumeRequest(); consumeRequest.setTopic(newTopic); consumeRequest.setFanoutId("fan8"); consumeRequest.setMaxFetchSize(10000); ConsumeResponse consumeResponse = consumer.consume(consumeRequest); assertTrue(consumeResponse.getResult().getResultCode() == ResultCode.FAILURE); assertTrue(consumeResponse.getResult().getErrorCode() == ErrorCode.TOPIC_NOT_EXIST); File logFile = new File(config.getLogDir(), newTopic); assertTrue(!logFile.exists()); } private boolean isMessageListEqual(MessageList source, MessageList target) { if (source.size() != target.size()) { return false; } for(int i = 0; i < source.size(); i++) { if (!source.get(i).equals(target.get(i))) { return false; } } return true; } }
/* * Copyright 2007 Yusuke Yamamoto * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package twitter4j.internal.json; import twitter4j.RateLimitStatus; import twitter4j.TwitterException; import twitter4j.conf.Configuration; import twitter4j.internal.http.HttpResponse; import twitter4j.internal.org.json.JSONObject; import java.util.Date; import static twitter4j.internal.util.z_T4JInternalParseUtil.getDate; import static twitter4j.internal.util.z_T4JInternalParseUtil.getInt; /** * A data class representing Twitter REST API's rate limit status * * @author Yusuke Yamamoto - yusuke at mac.com * @see <a href="https://dev.twitter.com/docs/rate-limiting">Rate Limiting | Twitter Developers</a> */ /*package*/ final class RateLimitStatusJSONImpl implements RateLimitStatus, java.io.Serializable { private int remainingHits; private int hourlyLimit; private int resetTimeInSeconds; private int secondsUntilReset; private Date resetTime; private static final long serialVersionUID = 832355052293658614L; private RateLimitStatusJSONImpl(int hourlyLimit, int remainingHits, int resetTimeInSeconds, Date resetTime) { this.hourlyLimit = hourlyLimit; this.remainingHits = remainingHits; this.resetTime = resetTime; this.resetTimeInSeconds = resetTimeInSeconds; this.secondsUntilReset = (int) ((resetTime.getTime() - System.currentTimeMillis()) / 1000); } RateLimitStatusJSONImpl(HttpResponse res, Configuration conf) throws TwitterException { JSONObject json = res.asJSONObject(); init(json); if (conf.isJSONStoreEnabled()) { DataObjectFactoryUtil.clearThreadLocalMap(); DataObjectFactoryUtil.registerJSONObject(this, json); } } RateLimitStatusJSONImpl(JSONObject json) throws TwitterException { init(json); } void init(JSONObject json) throws TwitterException { this.hourlyLimit = getInt("hourly_limit", json); this.remainingHits = getInt("remaining_hits", json); this.resetTime = getDate("reset_time", json, "EEE MMM d HH:mm:ss Z yyyy"); this.resetTimeInSeconds = getInt("reset_time_in_seconds", json); this.secondsUntilReset = (int) ((resetTime.getTime() - System.currentTimeMillis()) / 1000); } static RateLimitStatus createFromResponseHeader(HttpResponse res) { if (null == res) { return null; } int remainingHits;//"X-RateLimit-Remaining" int hourlyLimit;//"X-RateLimit-Limit" int resetTimeInSeconds;//not included in the response header. Need to be calculated. Date resetTime;//new Date("X-RateLimit-Reset") String limit = res.getResponseHeader("X-RateLimit-Limit"); if (limit != null) { hourlyLimit = Integer.parseInt(limit); } else { return null; } String remaining = res.getResponseHeader("X-RateLimit-Remaining"); if (remaining != null) { remainingHits = Integer.parseInt(remaining); } else { return null; } String reset = res.getResponseHeader("X-RateLimit-Reset"); if (reset != null) { long longReset = Long.parseLong(reset); resetTimeInSeconds = (int) (longReset / 1000); resetTime = new Date(longReset * 1000); } else { return null; } return new RateLimitStatusJSONImpl(hourlyLimit, remainingHits, resetTimeInSeconds, resetTime); } static RateLimitStatus createFeatureSpecificRateLimitStatusFromResponseHeader(HttpResponse res) { if (null == res) { return null; } int remainingHits;//"X-FeatureRateLimit-Remaining" int hourlyLimit;//"X-FeatureRateLimit-Limit" int resetTimeInSeconds;//not included in the response header. Need to be calculated. Date resetTime;//new Date("X-FeatureRateLimit-Reset") String limit = res.getResponseHeader("X-FeatureRateLimit-Limit"); if (limit != null) { hourlyLimit = Integer.parseInt(limit); } else { return null; } String remaining = res.getResponseHeader("X-FeatureRateLimit-Remaining"); if (remaining != null) { remainingHits = Integer.parseInt(remaining); } else { return null; } String reset = res.getResponseHeader("X-FeatureRateLimit-Reset"); if (reset != null) { long longReset = Long.parseLong(reset); resetTimeInSeconds = (int) (longReset / 1000); resetTime = new Date(longReset * 1000); } else { return null; } return new RateLimitStatusJSONImpl(hourlyLimit, remainingHits, resetTimeInSeconds, resetTime); } /** * {@inheritDoc} */ public int getRemainingHits() { return remainingHits; } /** * {@inheritDoc} */ public int getHourlyLimit() { return hourlyLimit; } /** * {@inheritDoc} */ public int getResetTimeInSeconds() { return resetTimeInSeconds; } /** * {@inheritDoc} */ public int getSecondsUntilReset() { return secondsUntilReset; } /** * {@inheritDoc} */ public Date getResetTime() { return resetTime; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof RateLimitStatusJSONImpl)) return false; RateLimitStatusJSONImpl that = (RateLimitStatusJSONImpl) o; if (hourlyLimit != that.hourlyLimit) return false; if (remainingHits != that.remainingHits) return false; if (resetTimeInSeconds != that.resetTimeInSeconds) return false; if (secondsUntilReset != that.secondsUntilReset) return false; if (resetTime != null ? !resetTime.equals(that.resetTime) : that.resetTime != null) return false; return true; } @Override public int hashCode() { int result = remainingHits; result = 31 * result + hourlyLimit; result = 31 * result + resetTimeInSeconds; result = 31 * result + secondsUntilReset; result = 31 * result + (resetTime != null ? resetTime.hashCode() : 0); return result; } @Override public String toString() { return "RateLimitStatusJSONImpl{" + "remainingHits=" + remainingHits + ", hourlyLimit=" + hourlyLimit + ", resetTimeInSeconds=" + resetTimeInSeconds + ", secondsUntilReset=" + secondsUntilReset + ", resetTime=" + resetTime + '}'; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.query.h2.opt; import java.util.Collection; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.concurrent.ConcurrentSkipListMap; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.query.h2.H2Cursor; import org.apache.ignite.internal.util.GridCursorIteratorWrapper; import org.apache.ignite.internal.util.IgniteTree; import org.apache.ignite.internal.util.lang.GridCursor; import org.apache.ignite.internal.util.offheap.unsafe.GridOffHeapSnapTreeMap; import org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeGuard; import org.apache.ignite.internal.util.snaptree.SnapTreeMap; import org.apache.ignite.internal.util.typedef.internal.SB; import org.apache.ignite.spi.indexing.IndexingQueryFilter; import org.h2.engine.Session; import org.h2.index.Cursor; import org.h2.index.IndexType; import org.h2.index.SingleRowCursor; import org.h2.message.DbException; import org.h2.result.SearchRow; import org.h2.result.SortOrder; import org.h2.table.Column; import org.h2.table.IndexColumn; import org.h2.table.TableFilter; import org.h2.value.Value; import org.jetbrains.annotations.Nullable; /** * Base class for snapshotable segmented tree indexes. */ @SuppressWarnings("ComparatorNotSerializable") public class GridH2TreeIndex extends GridH2IndexBase implements Comparator<GridSearchRowPointer> { /** */ private final IgniteNavigableMapTree[] segments; /** */ private final boolean snapshotEnabled; /** * Constructor with index initialization. Creates index with single segment. * * @param name Index name. * @param tbl Table. * @param pk If this index is primary key. * @param colsList Index columns list. */ @SuppressWarnings("unchecked") public GridH2TreeIndex(String name, GridH2Table tbl, boolean pk, List<IndexColumn> colsList) { this(name, tbl, pk, colsList, 1); } /** * Constructor with index initialization. * * @param name Index name. * @param tbl Table. * @param pk If this index is primary key. * @param colsList Index columns list. * @param segmentsCnt Number of segments. */ @SuppressWarnings("unchecked") public GridH2TreeIndex(String name, GridH2Table tbl, boolean pk, List<IndexColumn> colsList, int segmentsCnt) { assert segmentsCnt > 0 : segmentsCnt; IndexColumn[] cols = colsList.toArray(new IndexColumn[colsList.size()]); IndexColumn.mapColumns(cols, tbl); initBaseIndex(tbl, 0, name, cols, pk ? IndexType.createPrimaryKey(false, false) : IndexType.createNonUnique(false, false, false)); segments = new IgniteNavigableMapTree[segmentsCnt]; final GridH2RowDescriptor desc = tbl.rowDescriptor(); if (desc == null || desc.memory() == null) { snapshotEnabled = desc == null || desc.snapshotableIndex(); if (snapshotEnabled) { for (int i = 0; i < segmentsCnt; i++) { segments[i] = new IgniteNavigableMapTree(new SnapTreeMap<GridSearchRowPointer, GridH2Row>(this) { @Override protected void afterNodeUpdate_nl(Node<GridSearchRowPointer, GridH2Row> node, Object val) { if (val != null) node.key = (GridSearchRowPointer)val; } @Override protected Comparable<? super GridSearchRowPointer> comparable(Object key) { if (key instanceof ComparableRow) return (Comparable<? super SearchRow>)key; return super.comparable(key); } }); } } else { for (int i = 0; i < segmentsCnt; i++) { segments[i] = new IgniteNavigableMapTree( new ConcurrentSkipListMap<GridSearchRowPointer, GridH2Row>( new Comparator<GridSearchRowPointer>() { @Override public int compare(GridSearchRowPointer o1, GridSearchRowPointer o2) { if (o1 instanceof ComparableRow) return ((ComparableRow)o1).compareTo(o2); if (o2 instanceof ComparableRow) return -((ComparableRow)o2).compareTo(o1); return compareRows(o1, o2); } } )); } } } else { assert desc.snapshotableIndex() : desc; snapshotEnabled = true; for (int i = 0; i < segmentsCnt; i++) { segments[i] = new IgniteNavigableMapTree(new GridOffHeapSnapTreeMap<GridSearchRowPointer, GridH2Row>(desc, desc, desc.memory(), desc.guard(), this) { @Override protected void afterNodeUpdate_nl(long node, GridH2Row val) { final long oldKey = keyPtr(node); if (val != null) { key(node, val); guard.finalizeLater(new Runnable() { @Override public void run() { desc.createPointer(oldKey).decrementRefCount(); } }); } } @Override protected Comparable<? super GridSearchRowPointer> comparable(Object key) { if (key instanceof ComparableRow) return (Comparable<? super SearchRow>)key; return super.comparable(key); } }); } } initDistributedJoinMessaging(tbl); } /** {@inheritDoc} */ @Override protected IgniteTree doTakeSnapshot() { assert snapshotEnabled; int seg = threadLocalSegment(); IgniteNavigableMapTree tree = segments[seg]; return tree.clone(); } /** {@inheritDoc} */ @Override protected final IgniteTree treeForRead(int seg) { if (!snapshotEnabled) return segments[seg]; IgniteTree res = threadLocalSnapshot(); if (res == null) return segments[seg]; return res; } /** {@inheritDoc} */ @Override public void destroy() { assert threadLocalSnapshot() == null; super.destroy(); } /** {@inheritDoc} */ @Override public long getRowCount(@Nullable Session ses) { IndexingQueryFilter f = threadLocalFilter(); int seg = threadLocalSegment(); // Fast path if we don't need to perform any filtering. if (f == null || f.forSpace((getTable()).cacheName()) == null) try { return treeForRead(seg).size(); } catch (IgniteCheckedException e) { throw DbException.convert(e); } GridCursor<GridH2Row> cursor = doFind(null, false, null); long size = 0; try { while (cursor.next()) size++; } catch (IgniteCheckedException e) { throw DbException.convert(e); } return size; } /** {@inheritDoc} */ @Override public long getRowCountApproximation() { return table.getRowCountApproximation(); } /** {@inheritDoc} */ @Override public int compare(GridSearchRowPointer r1, GridSearchRowPointer r2) { // Second row here must be data row if first is a search row. return -compareRows(r2, r1); } /** {@inheritDoc} */ @Override public String toString() { SB sb = new SB((indexType.isUnique() ? "Unique index '" : "Index '") + getName() + "' ["); boolean first = true; for (IndexColumn col : getIndexColumns()) { if (first) first = false; else sb.a(", "); sb.a(col.getSQL()); } sb.a(" ]"); return sb.toString(); } /** {@inheritDoc} */ @Override public double getCost(Session ses, int[] masks, TableFilter[] filters, int filter, SortOrder sortOrder, HashSet<Column> cols) { long rowCnt = getRowCountApproximation(); double baseCost = getCostRangeIndex(masks, rowCnt, filters, filter, sortOrder, false, cols); int mul = getDistributedMultiplier(ses, filters, filter); return mul * baseCost; } /** {@inheritDoc} */ @Override public boolean canFindNext() { return false; } /** {@inheritDoc} */ @Override public Cursor find(Session ses, @Nullable SearchRow first, @Nullable SearchRow last) { return new H2Cursor(doFind(first, true, last), null); } /** {@inheritDoc} */ @Override public Cursor findNext(Session ses, SearchRow higherThan, SearchRow last) { return new H2Cursor(doFind(higherThan, false, last), null); } /** * Finds row with key equal one in given search row. * WARNING!! Method call must be protected by {@link GridUnsafeGuard#begin()} * {@link GridUnsafeGuard#end()} block. * * @param row Search row. * @return Row. */ @Override public GridH2Row findOne(GridH2Row row) { int seg = segmentForRow(row); return segments[seg].findOne(row); } /** * Returns sub-tree bounded by given values. * * @param first Lower bound. * @param includeFirst Whether lower bound should be inclusive. * @param last Upper bound always inclusive. * @return Iterator over rows in given range. */ @SuppressWarnings("unchecked") private GridCursor<GridH2Row> doFind(@Nullable SearchRow first, boolean includeFirst, @Nullable SearchRow last) { int seg = threadLocalSegment(); IgniteTree t = treeForRead(seg); return doFind0(t, first, includeFirst, last, threadLocalFilter()); } /** {@inheritDoc} */ @Override protected final GridCursor<GridH2Row> doFind0( IgniteTree t, @Nullable SearchRow first, boolean includeFirst, @Nullable SearchRow last, IndexingQueryFilter filter ) { includeFirst &= first != null; GridCursor<GridH2Row> range = subTree(t, comparable(first, includeFirst ? -1 : 1), comparable(last, 1)); if (range == null) return EMPTY_CURSOR; return filter(range, filter); } /** * @param row Row. * @param bias Bias. * @return Comparable row. */ private GridSearchRowPointer comparable(SearchRow row, int bias) { if (row == null) return null; if (bias == 0 && row instanceof GridH2Row) return (GridSearchRowPointer)row; return new ComparableRow(row, bias); } /** * Takes sup-map from given one. * * @param tree Tree. * @param first Lower bound. * @param last Upper bound. * @return Sub-map. */ @SuppressWarnings({"IfMayBeConditional", "TypeMayBeWeakened"}) private GridCursor<GridH2Row> subTree(IgniteTree tree, @Nullable GridSearchRowPointer first, @Nullable GridSearchRowPointer last) { if (first != null && last != null && compare(first, last) > 0) return null; try { // We take exclusive bounds because it is possible that one search row will be equal to multiple key rows // in tree and we must return them all. return tree.find(first, last); } catch (IgniteCheckedException e) { throw DbException.convert(e); } } /** * Gets iterator over all rows in this index. * * @return Rows iterator. */ GridCursor<GridH2Row> rows() { return doFind(null, false, null); } /** {@inheritDoc} */ @Override public boolean canGetFirstOrLast() { return true; } /** {@inheritDoc} */ @Override public Cursor findFirstOrLast(Session ses, boolean first) { try { int seg = threadLocalSegment(); IgniteTree t = treeForRead(seg); GridH2Row row = (GridH2Row)(first ? t.findFirst() : t.findLast()); return new SingleRowCursor(row); } catch (IgniteCheckedException e) { throw DbException.convert(e); } } /** {@inheritDoc} */ @Override public GridH2Row put(GridH2Row row) { int seg = segmentForRow(row); return segments[seg].put(row); } /** {@inheritDoc} */ @Override public GridH2Row remove(SearchRow row) { GridSearchRowPointer comparable = comparable(row, 0); int seg = segmentForRow(row); return segments[seg].remove(comparable); } /** {@inheritDoc} */ @Override protected int segmentsCount() { return segments.length; } /** * Comparable row with bias. Will be used for queries to have correct bounds (in case of multicolumn index * and query on few first columns we will multiple equal entries in tree). */ private final class ComparableRow implements GridSearchRowPointer, Comparable<SearchRow> { /** */ private final SearchRow row; /** */ private final int bias; /** * @param row Row. * @param bias Bias. */ private ComparableRow(SearchRow row, int bias) { this.row = row; this.bias = bias; } /** {@inheritDoc} */ @Override public int compareTo(SearchRow o) { int res = compareRows(o, row); if (res == 0) return bias; return -res; } /** {@inheritDoc} */ @Override public boolean equals(Object obj) { throw new IllegalStateException("Should never be called."); } /** {@inheritDoc} */ @Override public int getColumnCount() { return row.getColumnCount(); } /** {@inheritDoc} */ @Override public Value getValue(int idx) { return row.getValue(idx); } /** {@inheritDoc} */ @Override public void setValue(int idx, Value v) { row.setValue(idx, v); } /** {@inheritDoc} */ @Override public void setKeyAndVersion(SearchRow old) { row.setKeyAndVersion(old); } /** {@inheritDoc} */ @Override public int getVersion() { return row.getVersion(); } /** {@inheritDoc} */ @Override public void setKey(long key) { row.setKey(key); } /** {@inheritDoc} */ @Override public long getKey() { return row.getKey(); } /** {@inheritDoc} */ @Override public int getMemory() { return row.getMemory(); } /** {@inheritDoc} */ @Override public long pointer() { throw new IllegalStateException(); } /** {@inheritDoc} */ @Override public void incrementRefCount() { throw new IllegalStateException(); } /** {@inheritDoc} */ @Override public void decrementRefCount() { throw new IllegalStateException(); } } /** * Adapter from {@link NavigableMap} to {@link IgniteTree}. */ private static final class IgniteNavigableMapTree implements IgniteTree<GridSearchRowPointer, GridH2Row>, Cloneable { /** Tree. */ private final NavigableMap<GridSearchRowPointer, GridH2Row> tree; /** * @param tree Tree. */ private IgniteNavigableMapTree(NavigableMap<GridSearchRowPointer, GridH2Row> tree) { this.tree = tree; } /** {@inheritDoc} */ @Override public void invoke(GridSearchRowPointer key, Object x, InvokeClosure<GridH2Row> c) { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public GridH2Row put(GridH2Row val) { return tree.put(val, val); } /** {@inheritDoc} */ @Override public GridH2Row findOne(GridSearchRowPointer key) { return tree.get(key); } /** {@inheritDoc} */ @Override public GridCursor<GridH2Row> find(GridSearchRowPointer lower, GridSearchRowPointer upper) throws IgniteCheckedException { Collection<GridH2Row> rows; if (lower == null && upper == null) rows = tree.values(); else if (lower != null && upper == null) rows = tree.tailMap(lower).values(); else if (lower == null) rows = tree.headMap(upper).values(); else rows = tree.subMap(lower, false, upper, false).values(); return new GridCursorIteratorWrapper<>(rows.iterator()); } /** {@inheritDoc} */ @Override public GridH2Row findFirst() throws IgniteCheckedException { Map.Entry<GridSearchRowPointer, GridH2Row> first = tree.firstEntry(); return (first == null) ? null : first.getValue(); } /** {@inheritDoc} */ @Override public GridH2Row findLast() throws IgniteCheckedException { Map.Entry<GridSearchRowPointer, GridH2Row> last = tree.lastEntry(); return (last == null) ? null : last.getValue(); } /** {@inheritDoc} */ @Override public GridH2Row remove(GridSearchRowPointer key) { return tree.remove(key); } /** {@inheritDoc} */ @Override public long size() { return tree.size(); } /** {@inheritDoc} */ @Override public IgniteNavigableMapTree clone() { IgniteNavigableMapTree cp; try { cp = (IgniteNavigableMapTree)super.clone(); } catch (final CloneNotSupportedException e) { throw DbException.convert(e); } return new IgniteNavigableMapTree(cp.tree); } } }
/* * Fowarder4j. * Copyright (C) 2015-2019 Fowarder4j Team. * https://github.com/lolocohen/forwarder4j * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.forwarder4j; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Properties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Loads and handles the configuration properties. * @author Laurent Cohen */ public class Config extends Properties { /** * Logger for this class. */ private static Logger log = LoggerFactory.getLogger(Config.class); /** * System property indicating the config file path. */ private static final String CONFIG_FILE_PROP = "forwarder4j.config"; /** * Default path for the config file. */ private static final String DEFAULT_CONFIG_FILE = "config/forwarder4j.properties"; /** * Singleton instance of the configuration */ private static Config instance = null; /** * Get the configuration properties. * @return a {@link Config} singleton instance. */ public static synchronized Config getConfiguration() { if (instance == null) { instance = new Config(); final String location = System.getProperty(CONFIG_FILE_PROP); try (BufferedReader reader = new BufferedReader(new FileReader((location != null) ? location : DEFAULT_CONFIG_FILE))) { instance.load(reader); } catch(Exception e) { log.debug(e.getMessage(), e); } } return instance; } /** * Get the string value of a property with a specified name. * @param key the name of the property to look for. * @return the value of the property as a string, or null if it is not found. */ public String getString(final String key) { return getProperty(key, null); } /** * Get the string value of a property with a specified name. * @param key the name of the property to look for. * @param defValue a default value to return if the property is not found. * @return the value of the property as a string, or the default value if it is not found. */ public String getString(final String key, final String defValue) { return getProperty(key, defValue); } /** * Set a property with the specified String value. * @param key the name of the property to set. * @param value the value to set on the property. */ public void setString(final String key, final String value) { setProperty(key, value); } /** * Get the integer value of a property with a specified name. * @param key the name of the property to look for. * @return the value of the property as an int, or zero if it is not found. */ public int getInt(final String key) { return getInt(key, 0); } /** * Get the integer value of a property with a specified name. * @param key the name of the property to look for. * @param defValue a default value to return if the property is not found. * @return the value of the property as an int, or the default value if it is not found. */ public int getInt(final String key, final int defValue) { int intVal = defValue; String val = getProperty(key, null); if (val != null) { val = val.trim(); try { intVal = Integer.valueOf(val); } catch(@SuppressWarnings("unused") NumberFormatException ignore) { try { intVal = Double.valueOf(val).intValue(); } catch(@SuppressWarnings("unused") NumberFormatException ignore2) { } } } return intVal; } /** * Set a property with the specified int value. * @param key the name of the property to set. * @param value the value to set on the property. */ public void setInt(final String key, final int value) { setProperty(key, Integer.toString(value)); } /** * Get the long integer value of a property with a specified name. * @param key the name of the property to look for. * @return the value of the property as a long, or zero if it is not found. */ public long getLong(final String key) { return getLong(key, 0L); } /** * Get the long integer value of a property with a specified name. * @param key the name of the property to look for. * @param defValue a default value to return if the property is not found. * @return the value of the property as a long, or the default value if it is not found. */ public long getLong(final String key, final long defValue) { long longVal = defValue; String val = getProperty(key, null); if (val != null) { val = val.trim(); try { longVal = Long.valueOf(val); } catch(@SuppressWarnings("unused") NumberFormatException ignore) { try { longVal = Double.valueOf(val).longValue(); } catch(@SuppressWarnings("unused") NumberFormatException ignore2) { } } } return longVal; } /** * Set a property with the specified long value. * @param key the name of the property to set. * @param value the value to set on the property. */ public void setLong(final String key, final long value) { setProperty(key, Long.toString(value)); } /** * Get the single precision value of a property with a specified name. * @param key the name of the property to look for. * @return the value of the property as a float, or zero if it is not found. */ public float getFloat(final String key) { return getFloat(key, 0f); } /** * Get the single precision value of a property with a specified name. * @param key the name of the property to look for. * @param defValue a default value to return if the property is not found. * @return the value of the property as a float, or the default value if it is not found. */ public float getFloat(final String key, final float defValue) { float floatVal = defValue; String val = getProperty(key, null); if (val != null) { try { floatVal = Float.parseFloat(val.trim()); } catch(@SuppressWarnings("unused") final NumberFormatException e) { } } return floatVal; } /** * Set a property with the specified float value. * @param key the name of the property to set. * @param value the value to set on the property. */ public void setFloat(final String key, final float value) { setProperty(key, Float.toString(value)); } /** * Get the double precision value of a property with a specified name. * If the key is not found a default value of 0d is returned. * @param key the name of the property to look for. * @return the value of the property as a double, or zero if it is not found. */ public double getDouble(final String key) { return getDouble(key, 0d); } /** * Get the double precision value of a property with a specified name. * @param key the name of the property to look for. * @param defValue a default value to return if the property is not found. * @return the value of the property as a double, or the default value if it is not found. */ public double getDouble(final String key, final double defValue) { double doubleVal = defValue; String val = getProperty(key, null); if (val != null) { try { doubleVal = Double.parseDouble(val.trim()); } catch(@SuppressWarnings("unused") final NumberFormatException e) { } } return doubleVal; } /** * Set a property with the specified double value. * @param key the name of the property to set. * @param value the value to set on the property. */ public void setDouble(final String key, final double value) { setProperty(key, Double.toString(value)); } /** * Get the boolean value of a property with a specified name. * If the key is not found a default value of false is returned. * @param key the name of the property to look for. * @return the value of the property as a boolean, or <code>false</code> if it is not found. */ public boolean getBoolean(final String key) { return getBoolean(key, false); } /** * Get the boolean value of a property with a specified name. * @param key the name of the property to look for. * @param defValue a default value to return if the property is not found. * @return the value of the property as a boolean, or the default value if it is not found. */ public boolean getBoolean(final String key, final boolean defValue) { boolean booleanVal = defValue; String val = getProperty(key, null); if (val != null) booleanVal = Boolean.valueOf(val.trim()).booleanValue(); return booleanVal; } /** * Set a property with the specified boolean value. * @param key the name of the property to set. * @param value the value to set on the property. */ public void setBoolean(final String key, final boolean value) { setProperty(key, Boolean.toString(value)); } /** * Get the char value of a property with a specified name. * If the key is not found a default value of ' ' is returned. * @param key the name of the property to look for. * @return the value of the property as a char, or the default value ' ' (space character) if it is not found. */ public char getChar(final String key) { return getChar(key, ' '); } /** * Get the char value of a property with a specified name. * If the value has more than one character, the first one will be used. * @param key the name of the property to look for. * @param defValue a default value to return if the property is not found. * @return the value of the property as a char, or the default value if it is not found. */ public char getChar(final String key, final char defValue) { char charVal = defValue; String val = getProperty(key, null); if ((val != null) && (val.length() > 0)) charVal = val.charAt(0); return charVal; } /** * Set a property with the specified char value. * @param key the name of the property to set. * @param value the value to set on the property. */ public void setChar(final String key, final char value) { setProperty(key, Character.toString(value)); } /** * Get the value of the specified property as a {@link File}. * @param key the name of the property to look up. * @return an abstract file path based on the value of the property, or null if the property is not defined. */ public File getFile(final String key) { return getFile(key, null); } /** * Get the value of the specified property as a {@link File}. * @param key the name of the property to look up. * @param defValue the value to return if the property is not found. * @return an abstract file path based on the value of the property, or the default value if the property is not defined. */ public File getFile(final String key, final File defValue) { String s = getProperty(key); return (s == null) || s.trim().isEmpty() ? defValue : new File(s); } /** * Set the value of the specified property as a {@link File}. * @param key the name of the property to look up. * @param value the file whose path to set as the property value. */ public void setFile(final String key, final File value) { if (value != null) setProperty(key, value.getPath()); } /** * Get the value of a property with the specified name as an {@link InetAddress}. * @param key the name of the property to retrieve. * @return the property as an {@link InetAddress} instance, or null if the property is not defined or the host doesn't exist. */ public InetAddress getInetAddress(final String key) { return getInetAddress(key, null); } /** * Get the value of a property with the specified name as an {@link InetAddress}. * @param key the name of the property to retrieve. * @param def the default value to use if the property is not defined. * @return the property as an {@link InetAddress} instance, or the specified default value if the property is not defined. */ public InetAddress getInetAddress(final String key, final InetAddress def) { String val = getString(key); if (val == null) return def; try { return InetAddress.getByName(val); } catch(@SuppressWarnings("unused") final UnknownHostException e) { return def; } } /** * Convert this set of properties into a string. * @return a representation of this object as a string. */ public String asString() { StringBuilder sb = new StringBuilder(); for (String key: stringPropertyNames()) sb.append(key).append(" = ").append(getProperty(key)).append('\n'); return sb.toString(); } /** * Extract the properties that pass the specified filter. * @param filter the filter to use, if <code>null</code> then all properties are retruned. * @return a new <code>TypedProperties</code> object containing only the properties matching the filter. */ public Config filter(final Filter filter) { Config result = new Config(); for (String key: stringPropertyNames()) { String value = getProperty(key); if ((filter == null) || filter.accepts(key, value)) result.put(key, value); } return result; } /** * A filter for <code>TypedProperties</code> objects. */ public interface Filter { /** * Determine whether this filter accepts a property with the specirfied name and value. * @param name the name of the property. * @param value the value of the property. * @return <code>true</code> if the property is accepted, <code>false</code> otherwise. */ boolean accepts(String name, String value); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cloudwatchevents.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/events-2015-10-07/ListEventSources" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListEventSourcesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The list of event sources. * </p> */ private java.util.List<EventSource> eventSources; /** * <p> * A token you can use in a subsequent operation to retrieve the next set of results. * </p> */ private String nextToken; /** * <p> * The list of event sources. * </p> * * @return The list of event sources. */ public java.util.List<EventSource> getEventSources() { return eventSources; } /** * <p> * The list of event sources. * </p> * * @param eventSources * The list of event sources. */ public void setEventSources(java.util.Collection<EventSource> eventSources) { if (eventSources == null) { this.eventSources = null; return; } this.eventSources = new java.util.ArrayList<EventSource>(eventSources); } /** * <p> * The list of event sources. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setEventSources(java.util.Collection)} or {@link #withEventSources(java.util.Collection)} if you want to * override the existing values. * </p> * * @param eventSources * The list of event sources. * @return Returns a reference to this object so that method calls can be chained together. */ public ListEventSourcesResult withEventSources(EventSource... eventSources) { if (this.eventSources == null) { setEventSources(new java.util.ArrayList<EventSource>(eventSources.length)); } for (EventSource ele : eventSources) { this.eventSources.add(ele); } return this; } /** * <p> * The list of event sources. * </p> * * @param eventSources * The list of event sources. * @return Returns a reference to this object so that method calls can be chained together. */ public ListEventSourcesResult withEventSources(java.util.Collection<EventSource> eventSources) { setEventSources(eventSources); return this; } /** * <p> * A token you can use in a subsequent operation to retrieve the next set of results. * </p> * * @param nextToken * A token you can use in a subsequent operation to retrieve the next set of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * A token you can use in a subsequent operation to retrieve the next set of results. * </p> * * @return A token you can use in a subsequent operation to retrieve the next set of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * A token you can use in a subsequent operation to retrieve the next set of results. * </p> * * @param nextToken * A token you can use in a subsequent operation to retrieve the next set of results. * @return Returns a reference to this object so that method calls can be chained together. */ public ListEventSourcesResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getEventSources() != null) sb.append("EventSources: ").append(getEventSources()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListEventSourcesResult == false) return false; ListEventSourcesResult other = (ListEventSourcesResult) obj; if (other.getEventSources() == null ^ this.getEventSources() == null) return false; if (other.getEventSources() != null && other.getEventSources().equals(this.getEventSources()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getEventSources() == null) ? 0 : getEventSources().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListEventSourcesResult clone() { try { return (ListEventSourcesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.intellij.ideabuck.actions.choosetargets; import com.facebook.buck.intellij.ideabuck.actions.BuckQueryAction; import com.facebook.buck.intellij.ideabuck.build.BuckBuildTargetAliasParser; import com.facebook.buck.intellij.ideabuck.file.BuckFileUtil; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.intellij.ide.util.gotoByName.ChooseByNamePopup; import com.intellij.navigation.ChooseByNameContributor; import com.intellij.navigation.NavigationItem; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import java.io.File; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.ArrayList; import java.util.Set; import javax.annotation.Nullable; public class ChooseTargetContributor implements ChooseByNameContributor { private static final String ALIAS_SEPARATOR = "::"; private static final String TARGET_NAME_SEPARATOR = ":"; private static final String BUILD_DIR_SEPARATOR = "/"; @Override public String[] getNames(Project project, boolean includeNonProjectItems) { List<String> names = new ArrayList<>(); names.addAll(getNamesFromPathSuggestions(project)); names.addAll(getNamesFromBuildTargetAlias(project)); return names.toArray(new String[names.size()]); } private List<String> getNamesFromBuildTargetAlias(Project project) { List<String> names = new ArrayList<>(); BuckBuildTargetAliasParser.parseAlias(project.getBasePath()); for (Map.Entry<String, Set<String>> entry : BuckBuildTargetAliasParser.sTargetAlias.entrySet()) { String target = entry.getKey(); Set<String> alias = entry.getValue(); target += ALIAS_SEPARATOR + Joiner.on(',').join(alias); names.add(target); } return names; } private List<String> getNamesFromPathSuggestions(Project project) { CurrentInputText currentInputText = new CurrentInputText(project); if (currentInputText.buildDir == null) { return Collections.emptyList(); } List<String> names = new ArrayList<>(); names.addAll(getAllBuildTargetsUnderDirectory(project, currentInputText.buildDir)); names.addAll(getNameSuggestionUnderPath(project, currentInputText.buildDir)); if (!currentInputText.hasBuildRule) { names.addAll( getNameSuggestionUnderPath(project, getParentDir(currentInputText.buildDir))); } return names; } private List<String> getAllBuildTargetsUnderDirectory(Project project, String buildDir) { List<String> names = new ArrayList<>(); // Try to get the relative path to the current input folder VirtualFile baseDir = project.getBaseDir().findFileByRelativePath( appendSuffixIfNotEmpty(buildDir, File.separator)); if (baseDir == null) { return names; } names.add("//" + appendSuffixIfNotEmpty(buildDir, BUILD_DIR_SEPARATOR) + "..."); return names; } private static class CurrentInputText { public final String buildDir; public final boolean hasBuildRule; CurrentInputText(Project project) { ChooseByNamePopup chooseByNamePopup = project.getUserData( ChooseByNamePopup.CHOOSE_BY_NAME_POPUP_IN_PROJECT_KEY); if (chooseByNamePopup == null) { buildDir = null; hasBuildRule = false; return; } String currentText = chooseByNamePopup.getEnteredText() // Remove the begining // .replaceFirst("^/*", ""); // check if we have as input a proper target int targetSeparatorIndex = currentText.lastIndexOf(TARGET_NAME_SEPARATOR); if (targetSeparatorIndex != -1) { hasBuildRule = true; buildDir = currentText.substring(0, targetSeparatorIndex); } else { hasBuildRule = false; buildDir = currentText; } } } private String getParentDir(String currentDir) { int lastDirSeparatorPosition = currentDir.lastIndexOf(File.separator); if (lastDirSeparatorPosition == -1) { return ""; } else { return currentDir.substring(0, lastDirSeparatorPosition); } } private List<String> getNameSuggestionUnderPath(Project project, String buildDir) { List<String> names = new ArrayList<>(); // Try to get the relative path to the current input folder VirtualFile baseDir = project.getBaseDir().findFileByRelativePath( appendSuffixIfNotEmpty(buildDir, File.separator)); if (baseDir == null) { return names; } // get the files under the base folder VirtualFile[] files = baseDir.getChildren(); for (VirtualFile file : files) { names.addAll(getNameSuggestionForVirtualFile(project, file, buildDir)); } return names; } private String appendSuffixIfNotEmpty(String source, String suffix) { if (!source.isEmpty()) { source += suffix; } return source; } private List<String> getNameSuggestionForVirtualFile(Project project, VirtualFile file, String buildDir) { // if the file is a directory we add it to the targets if (file.isDirectory()) { return new ArrayList<String>(Collections.singletonList( "//" + appendSuffixIfNotEmpty(buildDir, BUILD_DIR_SEPARATOR) + file.getName())); } else if (file.getName().equals(BuckFileUtil.getBuildFileName(project.getBasePath()))) { //if the file is a buck file we parse it and add its target names to the list return getBuildTargetFromBuildProjectFile(project, buildDir); } return Collections.emptyList(); } private List<String> getBuildTargetFromBuildProjectFile(Project project, String buildDir) { List<String> names = new ArrayList<>(); names.add(getAllBuildTargetsInSameDirectory(buildDir)); names.addAll( BuckQueryAction.execute( project, "//" + buildDir + TARGET_NAME_SEPARATOR, new Function<List<String>, Void>() { @Nullable @Override public Void apply(@Nullable List<String> strings) { ApplicationManager.getApplication().invokeLater( new Runnable() { public void run() { ChooseByNamePopup chooseByNamePopup = project.getUserData( ChooseByNamePopup.CHOOSE_BY_NAME_POPUP_IN_PROJECT_KEY); // the user might have closed the window if (chooseByNamePopup != null) { // if we don't have them, just refresh the view when we do, if the // window is still open chooseByNamePopup.rebuildList(true); } } }); return null; } } ) ); return names; } private String getAllBuildTargetsInSameDirectory(String buildDir) { return "//" + buildDir + TARGET_NAME_SEPARATOR; } @Override public NavigationItem[] getItemsByName( String name, String pattern, Project project, boolean includeNonProjectItems) { String alias = null; int index = name.lastIndexOf(ALIAS_SEPARATOR); if (index > 0) { alias = name.substring(index + ALIAS_SEPARATOR.length()); name = name.substring(0, index); } return new NavigationItem[] { new ChooseTargetItem(name, alias) }; } }
/* * Copyright 2014 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.services.ejb.test; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import javax.ejb.EJB; import org.drools.compiler.kie.builder.impl.InternalKieModule; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.junit.Arquillian; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.jbpm.kie.services.impl.KModuleDeploymentUnit; import org.jbpm.services.api.model.DeploymentUnit; import org.jbpm.services.api.model.ProcessDefinition; import org.jbpm.services.api.model.UserTaskDefinition; import org.jbpm.services.ejb.api.DefinitionServiceEJBLocal; import org.jbpm.services.ejb.api.DeploymentServiceEJBLocal; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.api.KieServices; import org.kie.api.builder.ReleaseId; import org.kie.scanner.KieMavenRepository; import static org.junit.Assert.*; import static org.kie.scanner.KieMavenRepository.getKieMavenRepository; @RunWith(Arquillian.class) public class DefinitionServiceEJBIntegrationTest extends AbstractTestSupport { @Deployment public static WebArchive createDeployment() { File archive = new File("target/sample-war-ejb-app.war"); if (!archive.exists()) { throw new IllegalStateException("There is no archive yet generated, run maven build or mvn assembly:assembly"); } WebArchive war = ShrinkWrap.createFromZipFile(WebArchive.class, archive); war.addPackage("org.jbpm.services.ejb.test"); // test cases // deploy test kjar deployKjar(); return war; } protected static void deployKjar() { KieServices ks = KieServices.Factory.get(); ReleaseId releaseId = ks.newReleaseId(GROUP_ID, ARTIFACT_ID, VERSION); List<String> processes = new ArrayList<String>(); processes.add("processes/hiring.bpmn2"); processes.add("processes/customtask.bpmn"); processes.add("processes/humanTask.bpmn"); processes.add("processes/signal.bpmn"); processes.add("processes/import.bpmn"); processes.add("processes/callactivity.bpmn"); processes.add("processes/itemrefissue.bpmn"); InternalKieModule kJar1 = createKieJar(ks, releaseId, processes); File pom = new File("target/kmodule", "pom.xml"); pom.getParentFile().mkdir(); try { FileOutputStream fs = new FileOutputStream(pom); fs.write(getPom(releaseId).getBytes()); fs.close(); } catch (Exception e) { } KieMavenRepository repository = getKieMavenRepository(); repository.installArtifact(releaseId, kJar1, pom); } private List<DeploymentUnit> units = new ArrayList<DeploymentUnit>(); @After public void cleanup() { cleanupSingletonSessionId(); if (units != null && !units.isEmpty()) { for (DeploymentUnit unit : units) { deploymentService.undeploy(unit); } units.clear(); } } @EJB private DefinitionServiceEJBLocal bpmn2Service; @EJB private DeploymentServiceEJBLocal deploymentService; @Test public void testHumanTaskProcess() throws IOException { assertNotNull(deploymentService); DeploymentUnit deploymentUnit = new KModuleDeploymentUnit(GROUP_ID, ARTIFACT_ID, VERSION); deploymentService.deploy(deploymentUnit); units.add(deploymentUnit); String processId = "org.jbpm.writedocument"; Collection<UserTaskDefinition> processTasks = bpmn2Service.getTasksDefinitions(deploymentUnit.getIdentifier(), processId); assertEquals(3, processTasks.size()); Map<String, String> processData = bpmn2Service.getProcessVariables(deploymentUnit.getIdentifier(), processId); assertEquals(3, processData.keySet().size()); Map<String, String> taskInputMappings = bpmn2Service.getTaskInputMappings(deploymentUnit.getIdentifier(), processId, "Write a Document" ); assertEquals(3, taskInputMappings.keySet().size()); Map<String, String> taskOutputMappings = bpmn2Service.getTaskOutputMappings(deploymentUnit.getIdentifier(), processId, "Write a Document" ); assertEquals(1, taskOutputMappings.keySet().size()); Map<String, Collection<String>> associatedEntities = bpmn2Service.getAssociatedEntities(deploymentUnit.getIdentifier(), processId); assertEquals(3, associatedEntities.keySet().size()); } @Test public void testHiringProcessData() throws IOException { assertNotNull(deploymentService); DeploymentUnit deploymentUnit = new KModuleDeploymentUnit(GROUP_ID, ARTIFACT_ID, VERSION); deploymentService.deploy(deploymentUnit); units.add(deploymentUnit); String processId = "hiring"; Collection<UserTaskDefinition> processTasks = bpmn2Service.getTasksDefinitions(deploymentUnit.getIdentifier(), processId); assertEquals(4, processTasks.size()); Map<String, String> processData = bpmn2Service.getProcessVariables(deploymentUnit.getIdentifier(), processId); assertEquals(9, processData.keySet().size()); Map<String, String> taskInputMappings = bpmn2Service.getTaskInputMappings(deploymentUnit.getIdentifier(), processId, "HR Interview" ); assertEquals(4, taskInputMappings.keySet().size()); assertEquals("java.lang.String", taskInputMappings.get("TaskName")); assertEquals("Object", taskInputMappings.get("GroupId")); assertEquals("Object", taskInputMappings.get("Comment")); assertEquals("String", taskInputMappings.get("in_name")); Map<String, String> taskOutputMappings = bpmn2Service.getTaskOutputMappings(deploymentUnit.getIdentifier(), processId, "HR Interview" ); assertEquals(4, taskOutputMappings.keySet().size()); assertEquals("String", taskOutputMappings.get("out_name")); assertEquals("Integer", taskOutputMappings.get("out_age")); assertEquals("String", taskOutputMappings.get("out_mail")); assertEquals("Integer", taskOutputMappings.get("out_score")); Map<String, Collection<String>> associatedEntities = bpmn2Service.getAssociatedEntities(deploymentUnit.getIdentifier(), processId); assertEquals(4, associatedEntities.keySet().size()); Map<String, String> allServiceTasks = bpmn2Service.getServiceTasks(deploymentUnit.getIdentifier(), processId); assertEquals(2, allServiceTasks.keySet().size()); } @Test public void testFindReusableSubProcesses() { assertNotNull(deploymentService); DeploymentUnit deploymentUnit = new KModuleDeploymentUnit(GROUP_ID, ARTIFACT_ID, VERSION); deploymentService.deploy(deploymentUnit); units.add(deploymentUnit); String theString = "ParentProcess"; assertNotNull(theString); Collection<String> reusableProcesses = bpmn2Service.getReusableSubProcesses(deploymentUnit.getIdentifier(), theString); assertNotNull(reusableProcesses); assertEquals(1, reusableProcesses.size()); assertEquals("signal", reusableProcesses.iterator().next()); } @Test public void itemRefIssue(){ assertNotNull(deploymentService); DeploymentUnit deploymentUnit = new KModuleDeploymentUnit(GROUP_ID, ARTIFACT_ID, VERSION); deploymentService.deploy(deploymentUnit); units.add(deploymentUnit); String processId = "itemrefissue"; Map<String, String> processData = bpmn2Service.getProcessVariables(deploymentUnit.getIdentifier(), processId); assertNotNull(processData); } @Test public void testHumanTaskProcessBeforeAndAfterUndeploy() throws IOException { assertNotNull(deploymentService); DeploymentUnit deploymentUnit = new KModuleDeploymentUnit(GROUP_ID, ARTIFACT_ID, VERSION); deploymentService.deploy(deploymentUnit); units.add(deploymentUnit); String processId = "org.jbpm.writedocument"; ProcessDefinition procDef = bpmn2Service.getProcessDefinition(deploymentUnit.getIdentifier(), processId); assertNotNull(procDef); assertEquals(procDef.getId(), "org.jbpm.writedocument"); assertEquals(procDef.getName(), "humanTaskSample"); assertEquals(procDef.getKnowledgeType(), "PROCESS"); assertEquals(procDef.getPackageName(), "defaultPackage"); assertEquals(procDef.getType(), "RuleFlow"); assertEquals(procDef.getVersion(), "1"); // now let's undeploy the unit deploymentService.undeploy(deploymentUnit); procDef = bpmn2Service.getProcessDefinition(deploymentUnit.getIdentifier(), processId); assertNull(procDef); } }
/** * JBoss, Home of Professional Open Source. * Copyright 2014-2020 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.pnc.integration; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.container.test.api.RunAsClient; import org.jboss.arquillian.junit.Arquillian; import org.jboss.pnc.client.BuildClient; import org.jboss.pnc.client.BuildConfigurationClient; import org.jboss.pnc.client.ClientException; import org.jboss.pnc.client.GroupBuildClient; import org.jboss.pnc.client.GroupConfigurationClient; import org.jboss.pnc.client.RemoteCollection; import org.jboss.pnc.client.RemoteResourceNotFoundException; import org.jboss.pnc.dto.Build; import org.jboss.pnc.dto.BuildConfiguration; import org.jboss.pnc.dto.BuildConfigurationRevision; import org.jboss.pnc.dto.BuildConfigurationRevisionRef; import org.jboss.pnc.dto.GroupBuild; import org.jboss.pnc.dto.GroupConfiguration; import org.jboss.pnc.dto.requests.GroupBuildRequest; import org.jboss.pnc.enums.BuildStatus; import org.jboss.pnc.enums.RebuildMode; import org.jboss.pnc.integration.utils.ResponseUtils; import org.jboss.pnc.integration.setup.Deployments; import org.jboss.pnc.integration.mock.RemoteBuildsCleanerMock; import org.jboss.pnc.rest.api.parameters.BuildParameters; import org.jboss.pnc.rest.api.parameters.BuildsFilterParameters; import org.jboss.pnc.rest.api.parameters.GroupBuildParameters; import org.jboss.pnc.test.category.ContainerTest; import org.jboss.shrinkwrap.api.spec.EnterpriseArchive; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.Instant; import java.util.ArrayList; import java.util.EnumSet; import java.util.List; import java.util.Optional; import java.util.concurrent.TimeUnit; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; import static org.jboss.pnc.integration.setup.RestClientConfiguration.asUser; @RunAsClient @RunWith(Arquillian.class) @Category(ContainerTest.class) public class BuildTest { private static final Logger logger = LoggerFactory.getLogger(BuildTest.class); private BuildConfigurationClient buildConfigurationClient = new BuildConfigurationClient(asUser()); private GroupConfigurationClient groupConfigurationClient = new GroupConfigurationClient(asUser()); private GroupBuildClient groupBuildClient = new GroupBuildClient(asUser()); private BuildClient buildClient = new BuildClient(asUser()); @Deployment public static EnterpriseArchive deploy() { final EnterpriseArchive ear = Deployments.testEarForInContainerTest(BuildTest.class); Deployments.addBuildExecutorMock(ear); JavaArchive coordinatorJar = ear.getAsType(JavaArchive.class, Deployments.COORDINATOR_JAR); coordinatorJar.addAsManifestResource("beans-use-mock-remote-clients.xml", "beans.xml"); coordinatorJar.addClass(RemoteBuildsCleanerMock.class); return ear; } @Test public void shouldTriggerBuildAndFinishWithoutProblems() throws ClientException { // with BuildConfiguration buildConfiguration = buildConfigurationClient.getAll().iterator().next(); // when Build build = buildConfigurationClient.trigger(buildConfiguration.getId(), getPersistentParameters(true)); assertThat(build).isNotNull().extracting("id").isNotNull().isNotEqualTo(""); EnumSet<BuildStatus> isIn = EnumSet.of(BuildStatus.SUCCESS); ResponseUtils.waitSynchronouslyFor(() -> buildToFinish(build.getId(), isIn, null), 15, TimeUnit.SECONDS); } @Test public void shouldTriggerGroupBuildAndFinishWithoutProblems() throws ClientException { // given GroupConfiguration groupConfig = groupConfigurationClient.getAll().iterator().next(); // when GroupBuildParameters groupBuildParameters = new GroupBuildParameters(); groupBuildParameters.setRebuildMode(RebuildMode.FORCE); GroupBuild groupBuild = groupConfigurationClient.trigger( groupConfig.getId(), groupBuildParameters, GroupBuildRequest.builder().buildConfigurationRevisions(new ArrayList<>()).build()); assertThat(groupBuild).isNotNull().extracting("id").isNotNull().isNotEqualTo(""); // then EnumSet<BuildStatus> isIn = EnumSet.of(BuildStatus.SUCCESS); EnumSet<BuildStatus> isNotIn = EnumSet.of(BuildStatus.REJECTED); ResponseUtils.waitSynchronouslyFor( () -> groupBuildToFinish(groupBuild.getId(), isIn, isNotIn), 15, TimeUnit.SECONDS); } @Test public void shouldRejectGroupBuildWithNoRebuildsRequired() throws ClientException { // given GroupConfiguration groupConfig = groupConfigurationClient.getAll().iterator().next(); // and after one build is done GroupBuildParameters groupBuildParameters = new GroupBuildParameters(); groupBuildParameters.setRebuildMode(RebuildMode.FORCE); GroupBuild groupBuild1 = groupConfigurationClient.trigger( groupConfig.getId(), groupBuildParameters, GroupBuildRequest.builder().buildConfigurationRevisions(new ArrayList<>()).build()); assertThat(groupBuild1).isNotNull().extracting("id").isNotNull().isNotEqualTo(""); ResponseUtils.waitSynchronouslyFor( () -> groupBuildToFinish(groupBuild1.getId(), EnumSet.of(BuildStatus.SUCCESS), null), 15, TimeUnit.SECONDS); // when next build is triggered GroupBuild groupBuild2 = groupConfigurationClient.trigger( groupConfig.getId(), new GroupBuildParameters(), GroupBuildRequest.builder().buildConfigurationRevisions(new ArrayList<>()).build()); // then EnumSet<BuildStatus> isIn = EnumSet.of(BuildStatus.NO_REBUILD_REQUIRED); EnumSet<BuildStatus> isNotIn = EnumSet.of(BuildStatus.SUCCESS, BuildStatus.REJECTED); ResponseUtils.waitSynchronouslyFor( () -> groupBuildToFinish(groupBuild2.getId(), isIn, isNotIn), 15, TimeUnit.SECONDS); } @Test public void shouldTriggerGroupBuildWithBCInRevisionAndFinishWithoutProblems() throws ClientException { // given GroupConfiguration groupConfiguration = groupConfigurationClient.getAll().iterator().next(); assertThat(groupConfiguration.getBuildConfigs()).isNotEmpty(); List<BuildConfigurationRevisionRef> buildConfigurationRevisions = new ArrayList<>(); BuildConfigurationRevision buildConfigurationRevision = BuildConfigurationRevision.builder() .id(groupConfiguration.getBuildConfigs().keySet().iterator().next()) .rev(1) .name(groupConfiguration.getName()) .build(); buildConfigurationRevisions.add(buildConfigurationRevision); GroupBuildRequest groupConfigWithAuditedBCsRest = GroupBuildRequest.builder() .buildConfigurationRevisions(buildConfigurationRevisions) .build(); GroupBuildParameters groupBuildParameters = new GroupBuildParameters(); groupBuildParameters.setRebuildMode(RebuildMode.FORCE); // when GroupBuild groupBuild = groupConfigurationClient .trigger(groupConfiguration.getId(), groupBuildParameters, groupConfigWithAuditedBCsRest); // then assertThat(groupBuild).isNotNull().extracting("id").isNotNull().isNotEqualTo(""); EnumSet<BuildStatus> isIn = EnumSet.of(BuildStatus.SUCCESS); EnumSet<BuildStatus> isNotIn = EnumSet.of(BuildStatus.REJECTED); ResponseUtils.waitSynchronouslyFor( () -> groupBuildToFinish(groupBuild.getId(), isIn, isNotIn), 15, TimeUnit.SECONDS); } @Test public void shouldTriggerBuildWithADependencyAndFinishWithoutProblems() throws ClientException { // given - A BC with a dependency on pnc-1.0.0.DR1 BuildConfiguration buildConfigurationParent = buildConfigurationClient .getAll(Optional.empty(), Optional.of("name==dependency-analysis-1.3")) .iterator() .next(); // Update dependency BuildConfiguration buildConfigurationChild = buildConfigurationClient .getAll(Optional.empty(), Optional.of("name==pnc-1.0.0.DR1")) .iterator() .next(); BuildConfiguration updatedBuildConfigurationChild = buildConfigurationChild.toBuilder() .description(buildConfigurationChild.getDescription() + ".") .build(); buildConfigurationClient.update(buildConfigurationChild.getId(), updatedBuildConfigurationChild); // The update of the description should not have changed the lastModificationDate assertThat(buildConfigurationChild.getModificationTime()) .isEqualTo(updatedBuildConfigurationChild.getModificationTime()); // when Build build = buildConfigurationClient .trigger(buildConfigurationParent.getId(), getBuildParameters(false, true)); BuildsFilterParameters parameters = new BuildsFilterParameters(); parameters.setRunning(true); RemoteCollection<Build> childBuildCol = buildConfigurationClient .getBuilds(buildConfigurationChild.getId(), parameters); Build childBuild = childBuildCol.getAll().iterator().next(); // then assertThat(childBuildCol.size()).isEqualTo(1); assertThat(buildConfigurationParent.getDependencies().size()).isEqualTo(1); assertThat(build).isNotNull().extracting("id").isNotNull().isNotEqualTo(""); EnumSet<BuildStatus> isIn = EnumSet.of(BuildStatus.SUCCESS); ResponseUtils.waitSynchronouslyFor(() -> buildToFinish(build.getId(), isIn, null), 15, TimeUnit.SECONDS); ResponseUtils.waitSynchronouslyFor(() -> buildToFinish(childBuild.getId(), isIn, null), 15, TimeUnit.SECONDS); } @Test public void shouldBuildTemporaryBuildAndNotAssignItToMilestone() throws ClientException { // BC pnc-1.0.0.DR1 is assigned to a product version containing an active product milestone see // DatabaseDataInitializer#initiliazeProjectProductData BuildConfiguration buildConfiguration = buildConfigurationClient .getAll(Optional.empty(), Optional.of("name==pnc-1.0.0.DR1")) .iterator() .next(); // when Build build = buildConfigurationClient.trigger(buildConfiguration.getId(), getTemporaryParameters(true)); // then assertThat(build).isNotNull().extracting("id").isNotNull().isNotEqualTo(""); ResponseUtils.waitSynchronouslyFor(() -> buildToFinish(build.getId()), 15, TimeUnit.SECONDS); Build updatedBuild = buildClient.getSpecific(build.getId()); assertThat(updatedBuild.getProductMilestone()).isNull(); } @Test public void shouldTriggerPersistentWithoutForceAfterTemporaryOnTheSameRev() throws ClientException { BuildConfiguration buildConfiguration = buildConfigurationClient .getAll(Optional.empty(), Optional.of("name==maven-plugin-test")) .iterator() .next(); // Updating the description only won't create a new revision, as description is not audited anymore Instant oldLastModDate = buildConfiguration.getModificationTime(); BuildConfiguration updatedConfiguration = buildConfiguration.toBuilder() .description( "Random Description to be able to trigger build again so that temporary build will be first on this revision") .buildScript("mvn" + " clean deploy -DskipTests=true") .build(); buildConfigurationClient.update(updatedConfiguration.getId(), updatedConfiguration); updatedConfiguration = buildConfigurationClient.getSpecific(updatedConfiguration.getId()); assertThat(oldLastModDate).isNotEqualTo(updatedConfiguration.getModificationTime()); EnumSet<BuildStatus> isIn = EnumSet.of(BuildStatus.SUCCESS); EnumSet<BuildStatus> isNotIn = EnumSet.of(BuildStatus.REJECTED, BuildStatus.NO_REBUILD_REQUIRED); Build build = buildConfigurationClient.trigger(buildConfiguration.getId(), getTemporaryParameters()); ResponseUtils.waitSynchronouslyFor(() -> buildToFinish(build.getId(), isIn, isNotIn), 15, TimeUnit.SECONDS); Build afterTempPersistentBuild = buildConfigurationClient .trigger(buildConfiguration.getId(), getPersistentParameters()); ResponseUtils.waitSynchronouslyFor( () -> buildToFinish(afterTempPersistentBuild.getId(), isIn, isNotIn), 15, TimeUnit.SECONDS); } // NCL-5192 // Replicates NCL-5192 through explicit dependency instead of implicit @Test public void dontRebuildTemporaryBuildWhenThereIsNewerPersistentOnSameRev() throws ClientException { BuildConfiguration parent = buildConfigurationClient .getAll(Optional.empty(), Optional.of("name==pnc-build-agent-0.4")) .iterator() .next(); BuildConfiguration dependency = buildConfigurationClient.getAll(Optional.empty(), Optional.of("name==termd")) .iterator() .next(); Instant oldLastModDateParent = parent.getModificationTime(); BuildConfiguration updatedParent = parent.toBuilder() .description( "Random Description to be able to trigger build again so that temporary build will be first on this revision") .buildScript("mvn" + " clean deploy -DskipTests=true") .build(); buildConfigurationClient.update(updatedParent.getId(), updatedParent); updatedParent = buildConfigurationClient.getSpecific(updatedParent.getId()); assertThat(oldLastModDateParent).isNotEqualTo(updatedParent.getModificationTime()); Instant oldLastModDateDependency = parent.getModificationTime(); BuildConfiguration updatedDependency = dependency.toBuilder() .description("Random Description so it rebuilds") .buildScript("mvn" + " clean deploy -DskipTests=true") .build(); buildConfigurationClient.update(updatedDependency.getId(), updatedDependency); assertThat(oldLastModDateDependency).isNotEqualTo(updatedDependency.getModificationTime()); EnumSet<BuildStatus> isIn = EnumSet.of(BuildStatus.SUCCESS); EnumSet<BuildStatus> isNotIn = EnumSet.of(BuildStatus.REJECTED, BuildStatus.NO_REBUILD_REQUIRED); // Build temporary builds (parent and dependency) on new revision Build temporaryBuild = buildConfigurationClient.trigger(parent.getId(), getTemporaryParameters()); ResponseUtils .waitSynchronouslyFor(() -> buildToFinish(temporaryBuild.getId(), isIn, isNotIn), 15, TimeUnit.SECONDS); // Build persistent build of dependency on the same revision Build dependencyPersistentBuild = buildConfigurationClient .trigger(dependency.getId(), getPersistentParameters()); ResponseUtils.waitSynchronouslyFor( () -> buildToFinish(dependencyPersistentBuild.getId(), isIn, isNotIn), 15, TimeUnit.SECONDS); // Build temporary build of parent and check it gets REJECTED even if it's dependency has newer record // (in this case temp build should ignore persistent one) Build finalRecord = buildConfigurationClient.trigger(parent.getId(), getTemporaryParameters()); ResponseUtils.waitSynchronouslyFor( () -> buildToFinish(finalRecord.getId(), EnumSet.of(BuildStatus.NO_REBUILD_REQUIRED), null), 15, TimeUnit.SECONDS); } @Test public void shouldRejectAfterBuildingTwoTempBuildsOnSameRevision() throws ClientException { BuildConfiguration buildConfiguration = buildConfigurationClient .getAll(Optional.empty(), Optional.of("name==maven-plugin-test")) .iterator() .next(); BuildConfiguration updatedConfiguration = buildConfiguration.toBuilder() .description( "Random Description to be able to trigger build again so that temporary build will be first on this revision") .buildScript("mvn" + " clean deploy -DskipTests=true") .build(); buildConfigurationClient.update(updatedConfiguration.getId(), updatedConfiguration); Build temporaryBuild = buildConfigurationClient.trigger(updatedConfiguration.getId(), getTemporaryParameters()); ResponseUtils.waitSynchronouslyFor( () -> buildToFinish(temporaryBuild.getId(), EnumSet.of(BuildStatus.SUCCESS), null), 15, TimeUnit.SECONDS); Build secondTempBuild = buildConfigurationClient .trigger(updatedConfiguration.getId(), getTemporaryParameters()); ResponseUtils.waitSynchronouslyFor( () -> buildToFinish( secondTempBuild.getId(), EnumSet.of(BuildStatus.NO_REBUILD_REQUIRED), EnumSet.of(BuildStatus.SUCCESS, BuildStatus.REJECTED)), 15, TimeUnit.SECONDS); } @Test public void shouldNotTriggerANewPersistentBuildWithoutForceIfOnlyDescriptionChanged() throws ClientException { BuildConfiguration buildConfiguration = buildConfigurationClient .getAll(Optional.empty(), Optional.of("name==maven-plugin-test")) .iterator() .next(); EnumSet<BuildStatus> isIn = EnumSet.of(BuildStatus.SUCCESS); EnumSet<BuildStatus> isNotIn = EnumSet.of(BuildStatus.REJECTED, BuildStatus.NO_REBUILD_REQUIRED); // Build persistent builds (parent and dependency) on new revision Build persistentBuild = buildConfigurationClient .trigger(buildConfiguration.getId(), getPersistentParameters(true)); ResponseUtils.waitSynchronouslyFor( () -> buildToFinish(persistentBuild.getId(), isIn, isNotIn), 15, TimeUnit.SECONDS); // Updating the description only won't create a new revision, as description is not audited anymore Instant oldLastModDate = buildConfiguration.getModificationTime(); BuildConfiguration updatedConfiguration = buildConfiguration.toBuilder() .description( "Random Description to be able to trigger build again so that persistent build will be first on this revision") .build(); buildConfigurationClient.update(updatedConfiguration.getId(), updatedConfiguration); assertThat(oldLastModDate).isEqualTo(updatedConfiguration.getModificationTime()); Build build2 = buildConfigurationClient.trigger(updatedConfiguration.getId(), getPersistentParameters()); ResponseUtils.waitSynchronouslyFor( () -> buildToFinish(build2.getId(), EnumSet.of(BuildStatus.NO_REBUILD_REQUIRED), null), 15, TimeUnit.SECONDS); } @Test public void shouldNotTriggerANewTemporaryBuildWithoutForceIfOnlyDescriptionChanged() throws ClientException { BuildConfiguration buildConfiguration = buildConfigurationClient .getAll(Optional.empty(), Optional.of("name==maven-plugin-test")) .iterator() .next(); EnumSet<BuildStatus> isIn = EnumSet.of(BuildStatus.SUCCESS); EnumSet<BuildStatus> isNotIn = EnumSet.of(BuildStatus.REJECTED, BuildStatus.NO_REBUILD_REQUIRED); // Build temporary builds (parent and dependency) on new revision Build persistentBuild = buildConfigurationClient .trigger(buildConfiguration.getId(), getTemporaryParameters(true)); ResponseUtils.waitSynchronouslyFor( () -> buildToFinish(persistentBuild.getId(), isIn, isNotIn), 15, TimeUnit.SECONDS); // Updating the description only won't create a new revision, as description is not audited anymore Instant oldLastModDate = buildConfiguration.getModificationTime(); BuildConfiguration updatedConfiguration = buildConfiguration.toBuilder() .description( "Random Description to be able to trigger build again so that temporary build will be first on this revision") .build(); buildConfigurationClient.update(updatedConfiguration.getId(), updatedConfiguration); assertThat(oldLastModDate).isEqualTo(updatedConfiguration.getModificationTime()); Build build2 = buildConfigurationClient.trigger(updatedConfiguration.getId(), getTemporaryParameters()); ResponseUtils.waitSynchronouslyFor( () -> buildToFinish(build2.getId(), EnumSet.of(BuildStatus.NO_REBUILD_REQUIRED), null), 15, TimeUnit.SECONDS); } @Test public void shouldHaveNoRebuildCauseFilled() throws Exception { // with BuildConfiguration buildConfiguration = buildConfigurationClient.getAll().iterator().next(); // when #1 Build build = buildConfigurationClient.trigger(buildConfiguration.getId(), getPersistentParameters(true)); assertThat(build).isNotNull().extracting("id").isNotNull().isNotEqualTo(""); EnumSet<BuildStatus> isIn = EnumSet.of(BuildStatus.SUCCESS); ResponseUtils.waitSynchronouslyFor(() -> buildToFinish(build.getId(), isIn, null), 15, TimeUnit.SECONDS); // when #2 EnumSet<BuildStatus> isNotIn = EnumSet.of(BuildStatus.SUCCESS, BuildStatus.FAILED); Build rebuild = buildConfigurationClient.trigger(buildConfiguration.getId(), getBuildParameters(false, false)); ResponseUtils.waitSynchronouslyFor( () -> buildToFinish(rebuild.getId(), EnumSet.of(BuildStatus.NO_REBUILD_REQUIRED), isNotIn), 15, TimeUnit.SECONDS); // then Build refresh = buildClient.getSpecific(rebuild.getId()); assertThat(refresh.getNoRebuildCause()).isNotNull().extracting("id").isEqualTo(build.getId()); } private BuildParameters getTemporaryParameters() { return getBuildParameters(true, false); } private BuildParameters getPersistentParameters() { return getBuildParameters(false, false); } private BuildParameters getTemporaryParameters(boolean force) { return getBuildParameters(true, force); } private BuildParameters getPersistentParameters(boolean force) { return getBuildParameters(false, force); } private BuildParameters getBuildParameters(boolean temporary, boolean force) { BuildParameters buildParameters = new BuildParameters(); buildParameters.setTemporaryBuild(temporary); buildParameters.setBuildDependencies(true); if (force) buildParameters.setRebuildMode(RebuildMode.FORCE); return buildParameters; } private Boolean buildToFinish(String id) { return buildToFinish(id, null, null); } private Boolean groupBuildToFinish(String id) { return groupBuildToFinish(id, null, null); } private Boolean buildToFinish(String buildId, EnumSet<BuildStatus> isIn, EnumSet<BuildStatus> isNotIn) { Build build = null; logger.debug("Waiting for build {} to finish", buildId); try { build = buildClient.getSpecific(buildId); assertThat(build).isNotNull(); logger.debug("Gotten build with status: {}", build.getStatus()); if (!build.getStatus().isFinal()) return false; } catch (RemoteResourceNotFoundException e) { fail(String.format("Build with id:%s not present", buildId), e); } catch (ClientException e) { fail("Client has failed in an unexpected way.", e); } assertThat(build).isNotNull(); assertThat(build.getStatus()).isNotNull(); if (isIn != null && !isIn.isEmpty()) assertThat(build.getStatus()).isIn(isIn); if (isNotIn != null && !isNotIn.isEmpty()) assertThat(build.getStatus()).isNotIn(isNotIn); return true; } private Boolean groupBuildToFinish(String groupBuildId, EnumSet<BuildStatus> isIn, EnumSet<BuildStatus> isNotIn) { if (isIn == null) isIn = EnumSet.noneOf(BuildStatus.class); if (isNotIn == null) isNotIn = EnumSet.noneOf(BuildStatus.class); GroupBuild build = null; logger.debug("Waiting for build {} to finish", groupBuildId); try { build = groupBuildClient.getSpecific(groupBuildId); assertThat(build).isNotNull(); logger.debug("Gotten build with status: {}", build.getStatus()); if (!build.getStatus().isFinal()) return false; } catch (RemoteResourceNotFoundException e) { fail(String.format("Group Build with id:%s not present", groupBuildId), e); } catch (ClientException e) { fail("Client has failed in an unexpected way.", e); } assertThat(build.getStatus()).isNotIn(isNotIn).isIn(isIn); return true; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hive.service.cli; import java.math.BigDecimal; import java.sql.Date; import java.sql.Timestamp; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveIntervalDayTime; import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hive.service.cli.thrift.TBoolValue; import org.apache.hive.service.cli.thrift.TByteValue; import org.apache.hive.service.cli.thrift.TColumnValue; import org.apache.hive.service.cli.thrift.TDoubleValue; import org.apache.hive.service.cli.thrift.TI16Value; import org.apache.hive.service.cli.thrift.TI32Value; import org.apache.hive.service.cli.thrift.TI64Value; import org.apache.hive.service.cli.thrift.TStringValue; /** * Protocols before HIVE_CLI_SERVICE_PROTOCOL_V6 (used by RowBasedSet) * */ public class ColumnValue { private static TColumnValue booleanValue(Boolean value) { TBoolValue tBoolValue = new TBoolValue(); if (value != null) { tBoolValue.setValue(value); } return TColumnValue.boolVal(tBoolValue); } private static TColumnValue byteValue(Byte value) { TByteValue tByteValue = new TByteValue(); if (value != null) { tByteValue.setValue(value); } return TColumnValue.byteVal(tByteValue); } private static TColumnValue shortValue(Short value) { TI16Value tI16Value = new TI16Value(); if (value != null) { tI16Value.setValue(value); } return TColumnValue.i16Val(tI16Value); } private static TColumnValue intValue(Integer value) { TI32Value tI32Value = new TI32Value(); if (value != null) { tI32Value.setValue(value); } return TColumnValue.i32Val(tI32Value); } private static TColumnValue longValue(Long value) { TI64Value tI64Value = new TI64Value(); if (value != null) { tI64Value.setValue(value); } return TColumnValue.i64Val(tI64Value); } private static TColumnValue floatValue(Float value) { TDoubleValue tDoubleValue = new TDoubleValue(); if (value != null) { tDoubleValue.setValue(value); } return TColumnValue.doubleVal(tDoubleValue); } private static TColumnValue doubleValue(Double value) { TDoubleValue tDoubleValue = new TDoubleValue(); if (value != null) { tDoubleValue.setValue(value); } return TColumnValue.doubleVal(tDoubleValue); } private static TColumnValue stringValue(String value) { TStringValue tStringValue = new TStringValue(); if (value != null) { tStringValue.setValue(value); } return TColumnValue.stringVal(tStringValue); } private static TColumnValue stringValue(HiveChar value) { TStringValue tStringValue = new TStringValue(); if (value != null) { tStringValue.setValue(value.toString()); } return TColumnValue.stringVal(tStringValue); } private static TColumnValue stringValue(HiveVarchar value) { TStringValue tStringValue = new TStringValue(); if (value != null) { tStringValue.setValue(value.toString()); } return TColumnValue.stringVal(tStringValue); } private static TColumnValue dateValue(Date value) { TStringValue tStringValue = new TStringValue(); if (value != null) { tStringValue.setValue(value.toString()); } return new TColumnValue(TColumnValue.stringVal(tStringValue)); } private static TColumnValue timestampValue(Timestamp value) { TStringValue tStringValue = new TStringValue(); if (value != null) { tStringValue.setValue(value.toString()); } return TColumnValue.stringVal(tStringValue); } private static TColumnValue stringValue(HiveDecimal value) { TStringValue tStrValue = new TStringValue(); if (value != null) { tStrValue.setValue(value.toString()); } return TColumnValue.stringVal(tStrValue); } private static TColumnValue stringValue(HiveIntervalYearMonth value) { TStringValue tStrValue = new TStringValue(); if (value != null) { tStrValue.setValue(value.toString()); } return TColumnValue.stringVal(tStrValue); } private static TColumnValue stringValue(HiveIntervalDayTime value) { TStringValue tStrValue = new TStringValue(); if (value != null) { tStrValue.setValue(value.toString()); } return TColumnValue.stringVal(tStrValue); } public static TColumnValue toTColumnValue(Type type, Object value) { switch (type) { case BOOLEAN_TYPE: return booleanValue((Boolean)value); case TINYINT_TYPE: return byteValue((Byte)value); case SMALLINT_TYPE: return shortValue((Short)value); case INT_TYPE: return intValue((Integer)value); case BIGINT_TYPE: return longValue((Long)value); case FLOAT_TYPE: return floatValue((Float)value); case DOUBLE_TYPE: return doubleValue((Double)value); case STRING_TYPE: return stringValue((String)value); case CHAR_TYPE: return stringValue((HiveChar)value); case VARCHAR_TYPE: return stringValue((HiveVarchar)value); case DATE_TYPE: return dateValue((Date)value); case TIMESTAMP_TYPE: return timestampValue((Timestamp)value); case INTERVAL_YEAR_MONTH_TYPE: return stringValue((HiveIntervalYearMonth) value); case INTERVAL_DAY_TIME_TYPE: return stringValue((HiveIntervalDayTime) value); case DECIMAL_TYPE: return stringValue(((HiveDecimal)value)); case BINARY_TYPE: return stringValue((String)value); case ARRAY_TYPE: case MAP_TYPE: case STRUCT_TYPE: case UNION_TYPE: case USER_DEFINED_TYPE: return stringValue((String)value); default: return null; } } private static Boolean getBooleanValue(TBoolValue tBoolValue) { if (tBoolValue.isSetValue()) { return tBoolValue.isValue(); } return null; } private static Byte getByteValue(TByteValue tByteValue) { if (tByteValue.isSetValue()) { return tByteValue.getValue(); } return null; } private static Short getShortValue(TI16Value tI16Value) { if (tI16Value.isSetValue()) { return tI16Value.getValue(); } return null; } private static Integer getIntegerValue(TI32Value tI32Value) { if (tI32Value.isSetValue()) { return tI32Value.getValue(); } return null; } private static Long getLongValue(TI64Value tI64Value) { if (tI64Value.isSetValue()) { return tI64Value.getValue(); } return null; } private static Double getDoubleValue(TDoubleValue tDoubleValue) { if (tDoubleValue.isSetValue()) { return tDoubleValue.getValue(); } return null; } private static String getStringValue(TStringValue tStringValue) { if (tStringValue.isSetValue()) { return tStringValue.getValue(); } return null; } private static Date getDateValue(TStringValue tStringValue) { if (tStringValue.isSetValue()) { return Date.valueOf(tStringValue.getValue()); } return null; } private static Timestamp getTimestampValue(TStringValue tStringValue) { if (tStringValue.isSetValue()) { return Timestamp.valueOf(tStringValue.getValue()); } return null; } private static byte[] getBinaryValue(TStringValue tString) { if (tString.isSetValue()) { return tString.getValue().getBytes(); } return null; } private static BigDecimal getBigDecimalValue(TStringValue tStringValue) { if (tStringValue.isSetValue()) { return new BigDecimal(tStringValue.getValue()); } return null; } public static Object toColumnValue(TColumnValue value) { TColumnValue._Fields field = value.getSetField(); switch (field) { case BOOL_VAL: return getBooleanValue(value.getBoolVal()); case BYTE_VAL: return getByteValue(value.getByteVal()); case I16_VAL: return getShortValue(value.getI16Val()); case I32_VAL: return getIntegerValue(value.getI32Val()); case I64_VAL: return getLongValue(value.getI64Val()); case DOUBLE_VAL: return getDoubleValue(value.getDoubleVal()); case STRING_VAL: return getStringValue(value.getStringVal()); } throw new IllegalArgumentException("never"); } }
/* Copyright 2005 The Apache Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.bridge.svg12; import java.awt.Point; import java.awt.event.KeyEvent; import java.awt.geom.Point2D; import org.apache.batik.bridge.BridgeContext; import org.apache.batik.bridge.BridgeEventSupport; import org.apache.batik.bridge.FocusManager; import org.apache.batik.bridge.UserAgent; import org.apache.batik.dom.events.AbstractEvent; import org.apache.batik.dom.events.DOMKeyboardEvent; import org.apache.batik.dom.events.DOMMouseEvent; import org.apache.batik.dom.events.DOMTextEvent; import org.apache.batik.dom.events.NodeEventTarget; import org.apache.batik.dom.svg12.SVGOMWheelEvent; import org.apache.batik.dom.util.DOMUtilities; import org.apache.batik.gvt.GraphicsNode; import org.apache.batik.gvt.event.EventDispatcher; import org.apache.batik.gvt.event.GraphicsNodeKeyEvent; import org.apache.batik.gvt.event.GraphicsNodeMouseEvent; import org.apache.batik.gvt.event.GraphicsNodeMouseWheelEvent; import org.apache.batik.gvt.event.GraphicsNodeMouseWheelListener; import org.apache.batik.util.XMLConstants; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.events.DocumentEvent; import org.w3c.dom.events.EventListener; import org.w3c.dom.events.EventTarget; /** * This class is responsible for tracking GraphicsNodeMouseEvents and * forwarding them to the DOM as regular DOM MouseEvents. This SVG 1.2 * specific class handles DOM Level 3 keyboard events and also ensures * that mouse events under sXBL have appropriate bubble limits. * * @author <a href="mailto:cam%40mcc%2eid%2eau">Cameron McCormack</a> * @version $Id$ */ public abstract class SVG12BridgeEventSupport extends BridgeEventSupport { protected SVG12BridgeEventSupport() {} /** * Is called only for the root element in order to dispatch GVT * events to the DOM. */ public static void addGVTListener(BridgeContext ctx, Document doc) { UserAgent ua = ctx.getUserAgent(); if (ua != null) { EventDispatcher dispatcher = ua.getEventDispatcher(); if (dispatcher != null) { final Listener listener = new Listener(ctx, ua); dispatcher.addGraphicsNodeMouseListener(listener); dispatcher.addGraphicsNodeMouseWheelListener(listener); dispatcher.addGraphicsNodeKeyListener(listener); // add an unload listener on the SVGDocument to remove // that listener for dispatching events EventListener l = new GVTUnloadListener(dispatcher, listener); NodeEventTarget target = (NodeEventTarget) doc; target.addEventListenerNS (XMLConstants.XML_EVENTS_NAMESPACE_URI, "SVGUnload", l, false, null); storeEventListenerNS (ctx, target, XMLConstants.XML_EVENTS_NAMESPACE_URI, "SVGUnload", l, false); } } } /** * A GraphicsNodeMouseListener that dispatch DOM events accordingly. */ protected static class Listener extends BridgeEventSupport.Listener implements GraphicsNodeMouseWheelListener { /** * The BridgeContext downcasted to an SVG12BridgeContext. */ protected SVG12BridgeContext ctx12; public Listener(BridgeContext ctx, UserAgent u) { super(ctx, u); ctx12 = (SVG12BridgeContext) ctx; } // Key ------------------------------------------------------------- /** * Invoked when a key has been pressed. * @param evt the graphics node key event */ public void keyPressed(GraphicsNodeKeyEvent evt) { // XXX isDown is not preventing key repeats if (!isDown) { isDown = true; dispatchKeyboardEvent("keydown", evt); } if (evt.getKeyChar() == KeyEvent.CHAR_UNDEFINED) { // We will not get a KEY_TYPED event for this char // so generate a keypress event here. dispatchTextEvent(evt); } } /** * Invoked when a key has been released. * @param evt the graphics node key event */ public void keyReleased(GraphicsNodeKeyEvent evt) { dispatchKeyboardEvent("keyup", evt); isDown = false; } /** * Invoked when a key has been typed. * @param evt the graphics node key event */ public void keyTyped(GraphicsNodeKeyEvent evt) { dispatchTextEvent(evt); } /** * Dispatch a DOM 3 Keyboard event. */ protected void dispatchKeyboardEvent(String eventType, GraphicsNodeKeyEvent evt) { FocusManager fmgr = context.getFocusManager(); if (fmgr == null) { return; } Element targetElement = (Element) fmgr.getCurrentEventTarget(); if (targetElement == null) { return; } DocumentEvent d = (DocumentEvent) targetElement.getOwnerDocument(); DOMKeyboardEvent keyEvt = (DOMKeyboardEvent) d.createEvent("KeyboardEvent"); String modifiers = DOMUtilities.getModifiersList(evt.getLockState(), evt.getModifiers()); keyEvt.initKeyboardEventNS(XMLConstants.XML_EVENTS_NAMESPACE_URI, eventType, true, true, null, mapKeyCodeToIdentifier(evt.getKeyCode()), mapKeyLocation(evt.getKeyLocation()), modifiers); try { ((EventTarget)targetElement).dispatchEvent(keyEvt); } catch (RuntimeException e) { ua.displayError(e); } } /** * Dispatch a DOM 3 Text event. */ protected void dispatchTextEvent(GraphicsNodeKeyEvent evt) { FocusManager fmgr = context.getFocusManager(); if (fmgr == null) { return; } Element targetElement = (Element) fmgr.getCurrentEventTarget(); if (targetElement == null) { return; } DocumentEvent d = (DocumentEvent) targetElement.getOwnerDocument(); DOMTextEvent textEvt = (DOMTextEvent) d.createEvent("TextEvent"); textEvt.initTextEventNS(XMLConstants.XML_EVENTS_NAMESPACE_URI, "textInput", true, true, null, String.valueOf(evt.getKeyChar())); try { ((EventTarget) targetElement).dispatchEvent(textEvt); } catch (RuntimeException e) { ua.displayError(e); } } /** * Maps Java KeyEvent location numbers to DOM 3 location numbers. */ protected int mapKeyLocation(int location) { return location - 1; } /** * Array to hold the map of Java keycodes to DOM 3 key strings. */ protected static String[][] IDENTIFIER_KEY_CODES = new String[256][]; static { putIdentifierKeyCode(DOMKeyboardEvent.KEY_0, KeyEvent.VK_0); putIdentifierKeyCode(DOMKeyboardEvent.KEY_1, KeyEvent.VK_1); putIdentifierKeyCode(DOMKeyboardEvent.KEY_2, KeyEvent.VK_2); putIdentifierKeyCode(DOMKeyboardEvent.KEY_3, KeyEvent.VK_3); putIdentifierKeyCode(DOMKeyboardEvent.KEY_4, KeyEvent.VK_4); putIdentifierKeyCode(DOMKeyboardEvent.KEY_5, KeyEvent.VK_5); putIdentifierKeyCode(DOMKeyboardEvent.KEY_6, KeyEvent.VK_6); putIdentifierKeyCode(DOMKeyboardEvent.KEY_7, KeyEvent.VK_7); putIdentifierKeyCode(DOMKeyboardEvent.KEY_8, KeyEvent.VK_8); putIdentifierKeyCode(DOMKeyboardEvent.KEY_9, KeyEvent.VK_9); putIdentifierKeyCode(DOMKeyboardEvent.KEY_ACCEPT, KeyEvent.VK_ACCEPT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_AGAIN, KeyEvent.VK_AGAIN); putIdentifierKeyCode(DOMKeyboardEvent.KEY_A, KeyEvent.VK_A); putIdentifierKeyCode(DOMKeyboardEvent.KEY_ALL_CANDIDATES, KeyEvent.VK_ALL_CANDIDATES); putIdentifierKeyCode(DOMKeyboardEvent.KEY_ALPHANUMERIC, KeyEvent.VK_ALPHANUMERIC); putIdentifierKeyCode(DOMKeyboardEvent.KEY_ALT_GRAPH, KeyEvent.VK_ALT_GRAPH); putIdentifierKeyCode(DOMKeyboardEvent.KEY_ALT, KeyEvent.VK_ALT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_AMPERSAND, KeyEvent.VK_AMPERSAND); putIdentifierKeyCode(DOMKeyboardEvent.KEY_APOSTROPHE, KeyEvent.VK_QUOTE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_ASTERISK, KeyEvent.VK_ASTERISK); putIdentifierKeyCode(DOMKeyboardEvent.KEY_AT, KeyEvent.VK_AT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_BACKSLASH, KeyEvent.VK_BACK_SLASH); putIdentifierKeyCode(DOMKeyboardEvent.KEY_BACKSPACE, KeyEvent.VK_BACK_SPACE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_B, KeyEvent.VK_B); putIdentifierKeyCode(DOMKeyboardEvent.KEY_CANCEL, KeyEvent.VK_CANCEL); putIdentifierKeyCode(DOMKeyboardEvent.KEY_CAPS_LOCK, KeyEvent.VK_CAPS_LOCK); putIdentifierKeyCode(DOMKeyboardEvent.KEY_CIRCUMFLEX, KeyEvent.VK_CIRCUMFLEX); putIdentifierKeyCode(DOMKeyboardEvent.KEY_C, KeyEvent.VK_C); putIdentifierKeyCode(DOMKeyboardEvent.KEY_CLEAR, KeyEvent.VK_CLEAR); putIdentifierKeyCode(DOMKeyboardEvent.KEY_CODE_INPUT, KeyEvent.VK_CODE_INPUT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COLON, KeyEvent.VK_COLON); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_ACUTE, KeyEvent.VK_DEAD_ACUTE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_BREVE, KeyEvent.VK_DEAD_BREVE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_CARON, KeyEvent.VK_DEAD_CARON); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_CEDILLA, KeyEvent.VK_DEAD_CEDILLA); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_CIRCUMFLEX, KeyEvent.VK_DEAD_CIRCUMFLEX); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_DIERESIS, KeyEvent.VK_DEAD_DIAERESIS); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_DOT_ABOVE, KeyEvent.VK_DEAD_ABOVEDOT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_DOUBLE_ACUTE, KeyEvent.VK_DEAD_DOUBLEACUTE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_GRAVE, KeyEvent.VK_DEAD_GRAVE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_IOTA, KeyEvent.VK_DEAD_IOTA); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_MACRON, KeyEvent.VK_DEAD_MACRON); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_OGONEK, KeyEvent.VK_DEAD_OGONEK); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_RING_ABOVE, KeyEvent.VK_DEAD_ABOVERING); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMBINING_TILDE, KeyEvent.VK_DEAD_TILDE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMMA, KeyEvent.VK_COMMA); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COMPOSE, KeyEvent.VK_COMPOSE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_CONTROL, KeyEvent.VK_CONTROL); putIdentifierKeyCode(DOMKeyboardEvent.KEY_CONVERT, KeyEvent.VK_CONVERT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_COPY, KeyEvent.VK_COPY); putIdentifierKeyCode(DOMKeyboardEvent.KEY_CUT, KeyEvent.VK_CUT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_DELETE, KeyEvent.VK_DELETE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_D, KeyEvent.VK_D); putIdentifierKeyCode(DOMKeyboardEvent.KEY_DOLLAR, KeyEvent.VK_DOLLAR); putIdentifierKeyCode(DOMKeyboardEvent.KEY_DOWN, KeyEvent.VK_DOWN); putIdentifierKeyCode(DOMKeyboardEvent.KEY_E, KeyEvent.VK_E); putIdentifierKeyCode(DOMKeyboardEvent.KEY_END, KeyEvent.VK_END); putIdentifierKeyCode(DOMKeyboardEvent.KEY_ENTER, KeyEvent.VK_ENTER); putIdentifierKeyCode(DOMKeyboardEvent.KEY_EQUALS, KeyEvent.VK_EQUALS); putIdentifierKeyCode(DOMKeyboardEvent.KEY_ESCAPE, KeyEvent.VK_ESCAPE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_EURO, KeyEvent.VK_EURO_SIGN); putIdentifierKeyCode(DOMKeyboardEvent.KEY_EXCLAMATION, KeyEvent.VK_EXCLAMATION_MARK); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F10, KeyEvent.VK_F10); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F11, KeyEvent.VK_F11); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F12, KeyEvent.VK_F12); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F13, KeyEvent.VK_F13); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F14, KeyEvent.VK_F14); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F15, KeyEvent.VK_F15); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F16, KeyEvent.VK_F16); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F17, KeyEvent.VK_F17); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F18, KeyEvent.VK_F18); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F19, KeyEvent.VK_F19); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F1, KeyEvent.VK_F1); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F20, KeyEvent.VK_F20); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F21, KeyEvent.VK_F21); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F22, KeyEvent.VK_F22); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F23, KeyEvent.VK_F23); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F24, KeyEvent.VK_F24); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F2, KeyEvent.VK_F2); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F3, KeyEvent.VK_F3); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F4, KeyEvent.VK_F4); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F5, KeyEvent.VK_F5); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F6, KeyEvent.VK_F6); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F7, KeyEvent.VK_F7); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F8, KeyEvent.VK_F8); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F9, KeyEvent.VK_F9); putIdentifierKeyCode(DOMKeyboardEvent.KEY_FINAL_MODE, KeyEvent.VK_FINAL); putIdentifierKeyCode(DOMKeyboardEvent.KEY_FIND, KeyEvent.VK_FIND); putIdentifierKeyCode(DOMKeyboardEvent.KEY_F, KeyEvent.VK_F); putIdentifierKeyCode(DOMKeyboardEvent.KEY_FULL_STOP, KeyEvent.VK_PERIOD); putIdentifierKeyCode(DOMKeyboardEvent.KEY_FULL_WIDTH, KeyEvent.VK_FULL_WIDTH); putIdentifierKeyCode(DOMKeyboardEvent.KEY_G, KeyEvent.VK_G); putIdentifierKeyCode(DOMKeyboardEvent.KEY_GRAVE, KeyEvent.VK_BACK_QUOTE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_GREATER_THAN, KeyEvent.VK_GREATER); putIdentifierKeyCode(DOMKeyboardEvent.KEY_HALF_WIDTH, KeyEvent.VK_HALF_WIDTH); putIdentifierKeyCode(DOMKeyboardEvent.KEY_HASH, KeyEvent.VK_NUMBER_SIGN); putIdentifierKeyCode(DOMKeyboardEvent.KEY_HELP, KeyEvent.VK_HELP); putIdentifierKeyCode(DOMKeyboardEvent.KEY_HIRAGANA, KeyEvent.VK_HIRAGANA); putIdentifierKeyCode(DOMKeyboardEvent.KEY_H, KeyEvent.VK_H); putIdentifierKeyCode(DOMKeyboardEvent.KEY_HOME, KeyEvent.VK_HOME); putIdentifierKeyCode(DOMKeyboardEvent.KEY_I, KeyEvent.VK_I); putIdentifierKeyCode(DOMKeyboardEvent.KEY_INSERT, KeyEvent.VK_INSERT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_INVERTED_EXCLAMATION, KeyEvent.VK_INVERTED_EXCLAMATION_MARK); putIdentifierKeyCode(DOMKeyboardEvent.KEY_JAPANESE_HIRAGANA, KeyEvent.VK_JAPANESE_HIRAGANA); putIdentifierKeyCode(DOMKeyboardEvent.KEY_JAPANESE_KATAKANA, KeyEvent.VK_JAPANESE_KATAKANA); putIdentifierKeyCode(DOMKeyboardEvent.KEY_JAPANESE_ROMAJI, KeyEvent.VK_JAPANESE_ROMAN); putIdentifierKeyCode(DOMKeyboardEvent.KEY_J, KeyEvent.VK_J); putIdentifierKeyCode(DOMKeyboardEvent.KEY_KANA_MODE, KeyEvent.VK_KANA_LOCK); putIdentifierKeyCode(DOMKeyboardEvent.KEY_KANJI_MODE, KeyEvent.VK_KANJI); putIdentifierKeyCode(DOMKeyboardEvent.KEY_KATAKANA, KeyEvent.VK_KATAKANA); putIdentifierKeyCode(DOMKeyboardEvent.KEY_K, KeyEvent.VK_K); putIdentifierKeyCode(DOMKeyboardEvent.KEY_LEFT_BRACE, KeyEvent.VK_BRACELEFT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_LEFT, KeyEvent.VK_LEFT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_LEFT_PARENTHESIS, KeyEvent.VK_LEFT_PARENTHESIS); putIdentifierKeyCode(DOMKeyboardEvent.KEY_LEFT_SQUARE_BRACKET, KeyEvent.VK_OPEN_BRACKET); putIdentifierKeyCode(DOMKeyboardEvent.KEY_LESS_THAN, KeyEvent.VK_LESS); putIdentifierKeyCode(DOMKeyboardEvent.KEY_L, KeyEvent.VK_L); putIdentifierKeyCode(DOMKeyboardEvent.KEY_META, KeyEvent.VK_META); putIdentifierKeyCode(DOMKeyboardEvent.KEY_META, KeyEvent.VK_META); putIdentifierKeyCode(DOMKeyboardEvent.KEY_MINUS, KeyEvent.VK_MINUS); putIdentifierKeyCode(DOMKeyboardEvent.KEY_M, KeyEvent.VK_M); putIdentifierKeyCode(DOMKeyboardEvent.KEY_MODE_CHANGE, KeyEvent.VK_MODECHANGE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_N, KeyEvent.VK_N); putIdentifierKeyCode(DOMKeyboardEvent.KEY_NONCONVERT, KeyEvent.VK_NONCONVERT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_NUM_LOCK, KeyEvent.VK_NUM_LOCK); putIdentifierKeyCode(DOMKeyboardEvent.KEY_NUM_LOCK, KeyEvent.VK_NUM_LOCK); putIdentifierKeyCode(DOMKeyboardEvent.KEY_O, KeyEvent.VK_O); putIdentifierKeyCode(DOMKeyboardEvent.KEY_PAGE_DOWN, KeyEvent.VK_PAGE_DOWN); putIdentifierKeyCode(DOMKeyboardEvent.KEY_PAGE_UP, KeyEvent.VK_PAGE_UP); putIdentifierKeyCode(DOMKeyboardEvent.KEY_PASTE, KeyEvent.VK_PASTE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_PAUSE, KeyEvent.VK_PAUSE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_P, KeyEvent.VK_P); putIdentifierKeyCode(DOMKeyboardEvent.KEY_PLUS, KeyEvent.VK_PLUS); putIdentifierKeyCode(DOMKeyboardEvent.KEY_PREVIOUS_CANDIDATE, KeyEvent.VK_PREVIOUS_CANDIDATE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_PRINT_SCREEN, KeyEvent.VK_PRINTSCREEN); putIdentifierKeyCode(DOMKeyboardEvent.KEY_PROPS, KeyEvent.VK_PROPS); putIdentifierKeyCode(DOMKeyboardEvent.KEY_Q, KeyEvent.VK_Q); putIdentifierKeyCode(DOMKeyboardEvent.KEY_QUOTE, KeyEvent.VK_QUOTEDBL); putIdentifierKeyCode(DOMKeyboardEvent.KEY_RIGHT_BRACE, KeyEvent.VK_BRACERIGHT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_RIGHT, KeyEvent.VK_RIGHT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_RIGHT_PARENTHESIS, KeyEvent.VK_RIGHT_PARENTHESIS); putIdentifierKeyCode(DOMKeyboardEvent.KEY_RIGHT_SQUARE_BRACKET, KeyEvent.VK_CLOSE_BRACKET); putIdentifierKeyCode(DOMKeyboardEvent.KEY_R, KeyEvent.VK_R); putIdentifierKeyCode(DOMKeyboardEvent.KEY_ROMAN_CHARACTERS, KeyEvent.VK_ROMAN_CHARACTERS); putIdentifierKeyCode(DOMKeyboardEvent.KEY_SCROLL, KeyEvent.VK_SCROLL_LOCK); putIdentifierKeyCode(DOMKeyboardEvent.KEY_SCROLL, KeyEvent.VK_SCROLL_LOCK); putIdentifierKeyCode(DOMKeyboardEvent.KEY_SEMICOLON, KeyEvent.VK_SEMICOLON); putIdentifierKeyCode(DOMKeyboardEvent.KEY_SEMIVOICED_SOUND, KeyEvent.VK_DEAD_SEMIVOICED_SOUND); putIdentifierKeyCode(DOMKeyboardEvent.KEY_SHIFT, KeyEvent.VK_SHIFT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_SHIFT, KeyEvent.VK_SHIFT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_S, KeyEvent.VK_S); putIdentifierKeyCode(DOMKeyboardEvent.KEY_SLASH, KeyEvent.VK_SLASH); putIdentifierKeyCode(DOMKeyboardEvent.KEY_SPACE, KeyEvent.VK_SPACE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_STOP, KeyEvent.VK_STOP); putIdentifierKeyCode(DOMKeyboardEvent.KEY_TAB, KeyEvent.VK_TAB); putIdentifierKeyCode(DOMKeyboardEvent.KEY_T, KeyEvent.VK_T); putIdentifierKeyCode(DOMKeyboardEvent.KEY_U, KeyEvent.VK_U); putIdentifierKeyCode(DOMKeyboardEvent.KEY_UNDERSCORE, KeyEvent.VK_UNDERSCORE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_UNDO, KeyEvent.VK_UNDO); putIdentifierKeyCode(DOMKeyboardEvent.KEY_UNIDENTIFIED, KeyEvent.VK_UNDEFINED); putIdentifierKeyCode(DOMKeyboardEvent.KEY_UP, KeyEvent.VK_UP); putIdentifierKeyCode(DOMKeyboardEvent.KEY_V, KeyEvent.VK_V); putIdentifierKeyCode(DOMKeyboardEvent.KEY_VOICED_SOUND, KeyEvent.VK_DEAD_VOICED_SOUND); putIdentifierKeyCode(DOMKeyboardEvent.KEY_W, KeyEvent.VK_W); putIdentifierKeyCode(DOMKeyboardEvent.KEY_X, KeyEvent.VK_X); putIdentifierKeyCode(DOMKeyboardEvent.KEY_Y, KeyEvent.VK_Y); putIdentifierKeyCode(DOMKeyboardEvent.KEY_Z, KeyEvent.VK_Z); // Java keycodes for duplicate keys putIdentifierKeyCode(DOMKeyboardEvent.KEY_0, KeyEvent.VK_NUMPAD0); putIdentifierKeyCode(DOMKeyboardEvent.KEY_1, KeyEvent.VK_NUMPAD1); putIdentifierKeyCode(DOMKeyboardEvent.KEY_2, KeyEvent.VK_NUMPAD2); putIdentifierKeyCode(DOMKeyboardEvent.KEY_3, KeyEvent.VK_NUMPAD3); putIdentifierKeyCode(DOMKeyboardEvent.KEY_4, KeyEvent.VK_NUMPAD4); putIdentifierKeyCode(DOMKeyboardEvent.KEY_5, KeyEvent.VK_NUMPAD5); putIdentifierKeyCode(DOMKeyboardEvent.KEY_6, KeyEvent.VK_NUMPAD6); putIdentifierKeyCode(DOMKeyboardEvent.KEY_7, KeyEvent.VK_NUMPAD7); putIdentifierKeyCode(DOMKeyboardEvent.KEY_8, KeyEvent.VK_NUMPAD8); putIdentifierKeyCode(DOMKeyboardEvent.KEY_9, KeyEvent.VK_NUMPAD9); putIdentifierKeyCode(DOMKeyboardEvent.KEY_ASTERISK, KeyEvent.VK_MULTIPLY); putIdentifierKeyCode(DOMKeyboardEvent.KEY_DOWN, KeyEvent.VK_KP_DOWN); putIdentifierKeyCode(DOMKeyboardEvent.KEY_FULL_STOP, KeyEvent.VK_DECIMAL); putIdentifierKeyCode(DOMKeyboardEvent.KEY_LEFT, KeyEvent.VK_KP_LEFT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_MINUS, KeyEvent.VK_SUBTRACT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_PLUS, KeyEvent.VK_ADD); putIdentifierKeyCode(DOMKeyboardEvent.KEY_RIGHT, KeyEvent.VK_KP_RIGHT); putIdentifierKeyCode(DOMKeyboardEvent.KEY_SLASH, KeyEvent.VK_DIVIDE); putIdentifierKeyCode(DOMKeyboardEvent.KEY_UP, KeyEvent.VK_KP_UP); } /** * Put a key code to key identifier mapping into the * IDENTIFIER_KEY_CODES table. */ protected static void putIdentifierKeyCode(String keyIdentifier, int keyCode) { if (IDENTIFIER_KEY_CODES[keyCode / 256] == null) { IDENTIFIER_KEY_CODES[keyCode / 256] = new String[256]; } IDENTIFIER_KEY_CODES[keyCode / 256][keyCode % 256] = keyIdentifier; } /** * Convert a Java key code to a DOM 3 key string. */ protected String mapKeyCodeToIdentifier(int keyCode) { String[] a = IDENTIFIER_KEY_CODES[keyCode / 256]; if (a == null) { return DOMKeyboardEvent.KEY_UNIDENTIFIED; } return a[keyCode % 256]; } // MouseWheel ------------------------------------------------------ public void mouseWheelMoved(GraphicsNodeMouseWheelEvent evt) { Document doc = context.getPrimaryBridgeContext().getDocument(); Element targetElement = doc.getDocumentElement(); DocumentEvent d = (DocumentEvent) doc; SVGOMWheelEvent wheelEvt = (SVGOMWheelEvent) d.createEvent("WheelEvent"); wheelEvt.initWheelEventNS(XMLConstants.XML_EVENTS_NAMESPACE_URI, "wheel", true, true, null, evt.getWheelDelta()); try { ((EventTarget)targetElement).dispatchEvent(wheelEvt); } catch (RuntimeException e) { ua.displayError(e); } } // Mouse ----------------------------------------------------------- public void mouseEntered(GraphicsNodeMouseEvent evt) { Point clientXY = evt.getClientPoint(); GraphicsNode node = evt.getGraphicsNode(); Element targetElement = getEventTarget (node, new Point2D.Float(evt.getX(), evt.getY())); Element relatedElement = getRelatedElement(evt); int n = 0; if (relatedElement != null && targetElement != null) { n = DefaultXBLManager.computeBubbleLimit(targetElement, relatedElement); } dispatchMouseEvent("mouseover", targetElement, relatedElement, clientXY, evt, true, n); } public void mouseExited(GraphicsNodeMouseEvent evt) { Point clientXY = evt.getClientPoint(); // Get the 'new' node for the DOM event. GraphicsNode node = evt.getRelatedNode(); Element targetElement = getEventTarget(node, clientXY); if (lastTargetElement != null) { int n = 0; if (targetElement != null) { // moving from one element to another n = DefaultXBLManager.computeBubbleLimit(lastTargetElement, targetElement); } dispatchMouseEvent("mouseout", lastTargetElement, // target targetElement, // relatedTarget clientXY, evt, true, n); lastTargetElement = null; } } public void mouseMoved(GraphicsNodeMouseEvent evt) { Point clientXY = evt.getClientPoint(); GraphicsNode node = evt.getGraphicsNode(); Element targetElement = getEventTarget(node, clientXY); Element holdLTE = lastTargetElement; if (holdLTE != targetElement) { if (holdLTE != null) { int n = 0; if (targetElement != null) { n = DefaultXBLManager.computeBubbleLimit(holdLTE, targetElement); } dispatchMouseEvent("mouseout", holdLTE, // target targetElement, // relatedTarget clientXY, evt, true, n); } if (targetElement != null) { int n = 0; if (holdLTE != null) { n = DefaultXBLManager.computeBubbleLimit(targetElement, holdLTE); } dispatchMouseEvent("mouseover", targetElement, // target holdLTE, // relatedTarget clientXY, evt, true, n); } } dispatchMouseEvent("mousemove", targetElement, // target null, // relatedTarget clientXY, evt, false, 0); } /** * Dispatches a DOM MouseEvent according to the specified * parameters. * * @param eventType the event type * @param targetElement the target of the event * @param relatedElement the related target if any * @param clientXY the mouse coordinates in the client space * @param evt the GVT GraphicsNodeMouseEvent * @param cancelable true means the event is cancelable */ protected void dispatchMouseEvent(String eventType, Element targetElement, Element relatedElement, Point clientXY, GraphicsNodeMouseEvent evt, boolean cancelable) { dispatchMouseEvent(eventType, targetElement, relatedElement, clientXY, evt, cancelable, 0); } /** * Dispatches a DOM MouseEvent according to the specified * parameters. * * @param eventType the event type * @param targetElement the target of the event * @param relatedElement the related target if any * @param clientXY the mouse coordinates in the client space * @param evt the GVT GraphicsNodeMouseEvent * @param cancelable true means the event is cancelable * @param bubbleLimit the limit to the number of nodes the event * will bubble to */ protected void dispatchMouseEvent(String eventType, Element targetElement, Element relatedElement, Point clientXY, GraphicsNodeMouseEvent evt, boolean cancelable, int bubbleLimit) { if (ctx12.mouseCaptureTarget != null) { NodeEventTarget net = null; if (targetElement != null) { net = (NodeEventTarget) targetElement; while (net != null && net != ctx12.mouseCaptureTarget) { net = net.getParentNodeEventTarget(); } } if (net == null) { if (ctx12.mouseCaptureSendAll) { targetElement = (Element) ctx12.mouseCaptureTarget; } else { targetElement = null; } } } if (targetElement != null) { short button = getButton(evt); Point screenXY = evt.getScreenPoint(); // create the coresponding DOM MouseEvent DocumentEvent d = (DocumentEvent) targetElement.getOwnerDocument(); DOMMouseEvent mouseEvt = (DOMMouseEvent) d.createEvent("MouseEvents"); String modifiers = DOMUtilities.getModifiersList(evt.getLockState(), evt.getModifiers()); mouseEvt.initMouseEventNS(XMLConstants.XML_EVENTS_NAMESPACE_URI, eventType, true, cancelable, null, evt.getClickCount(), screenXY.x, screenXY.y, clientXY.x, clientXY.y, button, (EventTarget)relatedElement, modifiers); ((AbstractEvent) mouseEvt).setBubbleLimit(bubbleLimit); try { ((EventTarget) targetElement).dispatchEvent(mouseEvt); } catch (RuntimeException e) { ua.displayError(e); } finally { lastTargetElement = targetElement; } } if (ctx12.mouseCaptureTarget != null && ctx12.mouseCaptureAutoRelease && "mouseup".equals(eventType)) { ctx12.stopMouseCapture(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.vsphere.functions; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.base.Splitter; import com.google.common.base.Strings; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.Lists; import com.google.common.net.InetAddresses; import com.google.inject.Inject; import com.google.inject.Singleton; import com.vmware.vim25.CustomFieldDef; import com.vmware.vim25.CustomFieldStringValue; import com.vmware.vim25.CustomFieldValue; import com.vmware.vim25.DistributedVirtualSwitchPortConnection; import com.vmware.vim25.GuestNicInfo; import com.vmware.vim25.GuestProcessInfo; import com.vmware.vim25.GuestProgramSpec; import com.vmware.vim25.NamePasswordAuthentication; import com.vmware.vim25.VirtualDevice; import com.vmware.vim25.VirtualDeviceBackingInfo; import com.vmware.vim25.VirtualDeviceConfigSpec; import com.vmware.vim25.VirtualDeviceConfigSpecOperation; import com.vmware.vim25.VirtualEthernetCard; import com.vmware.vim25.VirtualEthernetCardDistributedVirtualPortBackingInfo; import com.vmware.vim25.VirtualEthernetCardNetworkBackingInfo; import com.vmware.vim25.VirtualMachineConfigSpec; import com.vmware.vim25.VirtualMachinePowerState; import com.vmware.vim25.VirtualMachineToolsStatus; import com.vmware.vim25.mo.DistributedVirtualPortgroup; import com.vmware.vim25.mo.GuestAuthManager; import com.vmware.vim25.mo.GuestOperationsManager; import com.vmware.vim25.mo.GuestProcessManager; import com.vmware.vim25.mo.InventoryNavigator; import com.vmware.vim25.mo.ManagedEntity; import com.vmware.vim25.mo.Task; import com.vmware.vim25.mo.VirtualMachine; import org.jclouds.compute.domain.NodeMetadata; import org.jclouds.compute.domain.NodeMetadata.Status; import org.jclouds.compute.domain.NodeMetadataBuilder; import org.jclouds.compute.reference.ComputeServiceConstants; import org.jclouds.domain.LocationBuilder; import org.jclouds.domain.LocationScope; import org.jclouds.logging.Logger; import org.jclouds.util.InetAddresses2; import org.jclouds.util.Predicates2; import org.jclouds.vsphere.config.VSphereConstants; import org.jclouds.vsphere.domain.VSphereServiceInstance; import org.jclouds.vsphere.predicates.VSpherePredicate; import javax.annotation.Resource; import javax.inject.Named; import java.net.Inet4Address; import java.net.Inet6Address; import java.net.URI; import java.net.URISyntaxException; import java.rmi.RemoteException; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Predicates.not; import static com.google.common.collect.Iterables.filter; import static com.google.common.collect.Sets.newHashSet; @Singleton public class VirtualMachineToNodeMetadata implements Function<VirtualMachine, NodeMetadata> { public static final Splitter COMMA_SPLITTER = Splitter.on(","); @Resource @Named(ComputeServiceConstants.COMPUTE_LOGGER) protected Logger logger = Logger.NULL; private final Map<VirtualMachinePowerState, Status> toPortableNodeStatus; private final Supplier<Map<String, CustomFieldDef>> customFields; private final Supplier<VSphereServiceInstance> serviceInstanceSupplier; private final Function<String, DistributedVirtualPortgroup> distributedVirtualPortgroupFunction; protected String vmInitPassword = null; @Inject public VirtualMachineToNodeMetadata(Map<VirtualMachinePowerState, NodeMetadata.Status> toPortableNodeStatus, Supplier<Map<String, CustomFieldDef>> customFields, Supplier<VSphereServiceInstance> serviceInstanceSupplier, Function<String, DistributedVirtualPortgroup> distributedVirtualPortgroupFunction, @Named(VSphereConstants.JCLOUDS_VSPHERE_VM_PASSWORD) String vmInitPassword) { this.toPortableNodeStatus = checkNotNull(toPortableNodeStatus, "PortableNodeStatus"); this.customFields = checkNotNull(customFields, "customFields"); this.serviceInstanceSupplier = checkNotNull(serviceInstanceSupplier, "serviceInstanceSupplier"); this.distributedVirtualPortgroupFunction = checkNotNull(distributedVirtualPortgroupFunction, "distributedVirtualPortgroupFunction"); this.vmInitPassword = vmInitPassword; } @Override public NodeMetadata apply(VirtualMachine vm) { VirtualMachine freshVm = null; String virtualMachineName = ""; NodeMetadataBuilder nodeMetadataBuilder = new NodeMetadataBuilder(); try (VSphereServiceInstance instance = serviceInstanceSupplier.get();) { String vmMORId = vm.getMOR().get_value(); ManagedEntity[] vms = new InventoryNavigator(instance.getInstance().getRootFolder()).searchManagedEntities("VirtualMachine"); for (ManagedEntity machine : vms) { if (machine.getMOR().getVal().equals(vmMORId)) { freshVm = (VirtualMachine) machine; break; } } LocationBuilder locationBuilder = new LocationBuilder(); locationBuilder.description(""); locationBuilder.id(""); locationBuilder.scope(LocationScope.HOST); if (freshVm == null) { nodeMetadataBuilder.status(Status.ERROR).id(""); return nodeMetadataBuilder.build(); } virtualMachineName = freshVm.getName(); logger.trace("<< converting vm (" + virtualMachineName + ") to NodeMetadata"); VirtualMachinePowerState vmState = freshVm.getRuntime().getPowerState(); NodeMetadata.Status nodeState = toPortableNodeStatus.get(vmState); if (nodeState == null) nodeState = Status.UNRECOGNIZED; nodeMetadataBuilder.name(virtualMachineName).ids(virtualMachineName) .location(locationBuilder.build()) .hostname(virtualMachineName); String host = freshVm.getServerConnection().getUrl().getHost(); try { nodeMetadataBuilder.uri(new URI("https://" + host + ":9443/vsphere-client/vmrc/vmrc.jsp?vm=urn:vmomi:VirtualMachine:" + vmMORId + ":" + freshVm.getSummary().getConfig().getUuid())); } catch (URISyntaxException e) { } Set<String> ipv4Addresses = newHashSet(); Set<String> ipv6Addresses = newHashSet(); if (nodeState == Status.RUNNING && !freshVm.getConfig().isTemplate() && VSpherePredicate.IsToolsStatusEquals(VirtualMachineToolsStatus.toolsOk).apply(freshVm) && VSpherePredicate.isNicConnected.apply(freshVm)) { Predicates2.retry(new Predicate<VirtualMachine>() { @Override public boolean apply(VirtualMachine vm) { try { return !Strings.isNullOrEmpty(vm.getGuest().getIpAddress()); } catch (Exception e) { return false; } } }, 60 * 1000 * 10, 10 * 1000, TimeUnit.MILLISECONDS).apply(freshVm); } if (VSpherePredicate.IsToolsStatusIsIn(Lists.newArrayList(VirtualMachineToolsStatus.toolsNotInstalled, VirtualMachineToolsStatus.toolsNotRunning)).apply(freshVm)) logger.trace("<< No VMware tools installed or not running ( " + virtualMachineName + " )"); else if (nodeState == Status.RUNNING && not(VSpherePredicate.isTemplatePredicate).apply(freshVm)) { int retries = 0; while (ipv4Addresses.size() < 1) { ipv4Addresses.clear(); ipv6Addresses.clear(); GuestNicInfo[] nics = freshVm.getGuest().getNet(); boolean nicConnected = false; if (null != nics) { for (GuestNicInfo nic : nics) { nicConnected = nicConnected || nic.connected; String[] addresses = nic.getIpAddress(); if (null != addresses) { for (String address : addresses) { if (logger.isTraceEnabled()) logger.trace("<< find IP addresses " + address + " for " + virtualMachineName); if (isInet4Address.apply(address)) { ipv4Addresses.add(address); } else if (isInet6Address.apply(address)) { ipv6Addresses.add(address); } } } } } if (toPortableNodeStatus.get(freshVm.getRuntime().getPowerState()) != Status.RUNNING) { logger.trace(">> Node is not running. EXIT IP search."); break; } if (freshVm.getGuest().getToolsVersionStatus2().equals("guestToolsUnmanaged") && nics == null) { String ip = freshVm.getGuest().getIpAddress(); if (!Strings.isNullOrEmpty(ip)) { if (isInet4Address.apply(ip)) { ipv4Addresses.add(ip); } else if (isInet6Address.apply(ip)) { ipv6Addresses.add(ip); } } break; } if (!nicConnected && retries == 5) { logger.trace("<< VM does NOT have any NIC connected."); break; } if (ipv4Addresses.size() < 1 && null != nics) { //nicConfigurationRecovery(instance, freshVm); logger.warn("<< can't find IPv4 address for vm: " + virtualMachineName); retries++; Thread.sleep(6000); } if (ipv4Addresses.size() < 1 && retries == 15) { logger.error("<< can't find IPv4 address after " + retries + " retries for vm: " + virtualMachineName); break; } } nodeMetadataBuilder.publicAddresses(filter(ipv4Addresses, not(isPrivateAddress))); nodeMetadataBuilder.privateAddresses(filter(ipv4Addresses, isPrivateAddress)); } CustomFieldValue[] customFieldValues = freshVm.getCustomValue(); if (customFieldValues != null) { for (CustomFieldValue customFieldValue : customFieldValues) { if (customFieldValue.getKey() == customFields.get().get(VSphereConstants.JCLOUDS_TAGS).getKey()) { nodeMetadataBuilder.tags(COMMA_SPLITTER.split(((CustomFieldStringValue) customFieldValue).getValue())); } else if (customFieldValue.getKey() == customFields.get().get(VSphereConstants.JCLOUDS_GROUP).getKey()) { nodeMetadataBuilder.group(((CustomFieldStringValue) customFieldValue).getValue()); } } } nodeMetadataBuilder.status(nodeState); return nodeMetadataBuilder.build(); } catch (Throwable t) { logger.error("Got an exception for virtual machine name : " + virtualMachineName); logger.error("The exception is : " + t.toString()); Throwables.propagate(t); return nodeMetadataBuilder.build(); } } private void nicConfigurationRecovery(VSphereServiceInstance instance, VirtualMachine freshVm) throws RemoteException, InterruptedException { List<VirtualDeviceConfigSpec> updates = Lists.newArrayList(); String originalKey = ""; for (VirtualDevice device : freshVm.getConfig().getHardware().getDevice()) { if (device instanceof VirtualEthernetCard) { VirtualDeviceConfigSpec nicSpec = new VirtualDeviceConfigSpec(); VirtualEthernetCard ethernetCard = (VirtualEthernetCard) device; ethernetCard.getConnectable().setConnected(true); VirtualDeviceBackingInfo backingInfo = ethernetCard.getBacking(); logger.trace(">> VirtualDeviceBackingInfo: " + backingInfo.getClass().getName()); if (backingInfo instanceof VirtualEthernetCardDistributedVirtualPortBackingInfo) { ManagedEntity[] virtualPortgroups = new InventoryNavigator(instance.getInstance().getRootFolder()).searchManagedEntities("DistributedVirtualPortgroup"); VirtualEthernetCardDistributedVirtualPortBackingInfo virtualPortBackingInfo = (VirtualEthernetCardDistributedVirtualPortBackingInfo) backingInfo; DistributedVirtualPortgroup virtualPortgroup = null; originalKey = virtualPortBackingInfo.getPort().getPortgroupKey(); for (ManagedEntity entity : virtualPortgroups) { virtualPortgroup = (DistributedVirtualPortgroup) entity; if (!virtualPortgroup.getKey().equals(originalKey)) { break; } } DistributedVirtualSwitchPortConnection port = new DistributedVirtualSwitchPortConnection(); DistributedVirtualSwitchPortConnection origPort = virtualPortBackingInfo.getPort(); port.setPortgroupKey(virtualPortgroup.getKey()); port.setSwitchUuid(origPort.getSwitchUuid()); virtualPortBackingInfo.setPort(port); } else { VirtualEthernetCardNetworkBackingInfo networkBackingInfo = (VirtualEthernetCardNetworkBackingInfo) backingInfo; originalKey = networkBackingInfo.getDeviceName(); networkBackingInfo.setDeviceName("VM Network"); } nicSpec.setOperation(VirtualDeviceConfigSpecOperation.edit); nicSpec.setDevice(device); updates.add(nicSpec); } } VirtualMachineConfigSpec spec = new VirtualMachineConfigSpec(); spec.setDeviceChange(updates.toArray(new VirtualDeviceConfigSpec[updates.size()])); Task task = freshVm.reconfigVM_Task(spec); String result = task.waitForTask(); if (result.equals(Task.SUCCESS)) { updates.clear(); for (VirtualDevice device : freshVm.getConfig().getHardware().getDevice()) { if (device instanceof VirtualEthernetCard) { VirtualDeviceConfigSpec nicSpec = new VirtualDeviceConfigSpec(); VirtualEthernetCard ethernetCard = (VirtualEthernetCard) device; ethernetCard.getConnectable().setConnected(true); VirtualDeviceBackingInfo backingInfo = ethernetCard.getBacking(); if (backingInfo instanceof VirtualEthernetCardDistributedVirtualPortBackingInfo) { VirtualEthernetCardDistributedVirtualPortBackingInfo virtualPortBackingInfo = (VirtualEthernetCardDistributedVirtualPortBackingInfo) backingInfo; DistributedVirtualSwitchPortConnection port = new DistributedVirtualSwitchPortConnection(); DistributedVirtualSwitchPortConnection origPort = virtualPortBackingInfo.getPort(); port.setPortgroupKey(originalKey); port.setSwitchUuid(origPort.getSwitchUuid()); virtualPortBackingInfo.setPort(port); } else { VirtualEthernetCardNetworkBackingInfo networkBackingInfo = (VirtualEthernetCardNetworkBackingInfo) backingInfo; networkBackingInfo.setDeviceName(originalKey); } nicSpec.setOperation(VirtualDeviceConfigSpecOperation.edit); nicSpec.setDevice(device); updates.add(nicSpec); } } spec = new VirtualMachineConfigSpec(); spec.setDeviceChange(updates.toArray(new VirtualDeviceConfigSpec[updates.size()])); task = freshVm.reconfigVM_Task(spec); result = task.waitForTask(); if (result.equals(Task.SUCCESS)) { GuestOperationsManager gom = serviceInstanceSupplier.get().getInstance().getGuestOperationsManager(); GuestAuthManager gam = gom.getAuthManager(freshVm); final NamePasswordAuthentication npa = new NamePasswordAuthentication(); npa.setUsername("root"); npa.setPassword(vmInitPassword); GuestProgramSpec gps = new GuestProgramSpec(); gps.programPath = "/bin/sh"; gps.arguments = "-c \"service network restart\""; List<String> env = Lists.newArrayList("PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin", "SHELL=/bin/bash"); gps.setEnvVariables(env.toArray(new String[env.size()])); GuestProcessManager gpm = gom.getProcessManager(freshVm); final long pid = gpm.startProgramInGuest(npa, gps); Predicates2.retry(new Predicate<GuestProcessManager>() { @Override public boolean apply(GuestProcessManager o) { try { GuestProcessInfo[] guestProcessInfos = o.listProcessesInGuest(npa, new long[]{pid}); return guestProcessInfos == null || guestProcessInfos.length == 0; } catch (RemoteException e) { return false; } } }, 20 * 1000, 1000, TimeUnit.MILLISECONDS).apply(gpm); } } } Predicate<String> ipAddressTester = new Predicate<String>() { @Override public boolean apply(String input) { return !input.isEmpty(); } }; private static final Predicate<String> isPrivateAddress = new Predicate<String>() { public boolean apply(String in) { return InetAddresses2.IsPrivateIPAddress.INSTANCE.apply(in); } }; public static final Predicate<String> isInet4Address = new Predicate<String>() { @Override public boolean apply(String input) { try { // Note we can do this, as InetAddress is now on the white list return InetAddresses.forString(input) instanceof Inet4Address; } catch (IllegalArgumentException e) { // could be a hostname return false; } } }; public static final Predicate<String> isInet6Address = new Predicate<String>() { @Override public boolean apply(String input) { try { // Note we can do this, as InetAddress is now on the white list return InetAddresses.forString(input) instanceof Inet6Address; } catch (IllegalArgumentException e) { // could be a hostname return false; } } }; }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package groovy.util; import groovy.lang.GroovyObjectSupport; import groovy.lang.GroovyRuntimeException; import groovy.lang.Writable; import org.apache.groovy.io.StringBuilderWriter; import org.codehaus.groovy.runtime.DefaultGroovyMethods; import org.codehaus.groovy.runtime.InvokerHelper; import org.codehaus.groovy.runtime.StringGroovyMethods; import org.codehaus.groovy.syntax.Types; import java.io.BufferedWriter; import java.io.IOException; import java.io.Writer; import java.net.URL; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Properties; import java.util.Set; /** * A ConfigObject at a simple level is a Map that creates configuration entries (other ConfigObjects) when referencing them. * This means that navigating to foo.bar.stuff will not return null but nested ConfigObjects which are of course empty maps * The Groovy truth can be used to check for the existence of "real" entries. * * @author Graeme Rocher * @author Guillaume Laforge (rewrite in Java related to security constraints on Google App Engine) * @since 1.5 */ public class ConfigObject extends GroovyObjectSupport implements Writable, Map, Cloneable { static final Collection<String> KEYWORDS = Types.getKeywords(); static final String TAB_CHARACTER = "\t"; /** * The config file that was used when parsing this ConfigObject */ private URL configFile; private HashMap delegateMap = new LinkedHashMap(); public ConfigObject(URL file) { this.configFile = file; } public ConfigObject() { this(null); } public URL getConfigFile() { return configFile; } public void setConfigFile(URL configFile) { this.configFile = configFile; } /** * Writes this config object into a String serialized representation which can later be parsed back using the parse() * method * * @see groovy.lang.Writable#writeTo(java.io.Writer) */ public Writer writeTo(Writer outArg) throws IOException { BufferedWriter out = new BufferedWriter(outArg); try { writeConfig("", this, out, 0, false); } finally { out.flush(); } return outArg; } /** * Overrides the default getProperty implementation to create nested ConfigObject instances on demand * for non-existent keys */ public Object getProperty(String name) { if ("configFile".equals(name)) return this.configFile; if (!containsKey(name)) { ConfigObject prop = new ConfigObject(this.configFile); put(name, prop); return prop; } return get(name); } /** * A ConfigObject is a tree structure consisting of nested maps. This flattens the maps into * a single level structure like a properties file */ public Map flatten() { return flatten(null); } /** * Flattens this ConfigObject populating the results into the target Map * * @see ConfigObject#flatten() */ public Map flatten(Map target) { if (target == null) target = new ConfigObject(); populate("", target, this); return target; } /** * Merges the given map with this ConfigObject overriding any matching configuration entries in this ConfigObject * * @param other The ConfigObject to merge with * @return The result of the merge */ public Map merge(ConfigObject other) { return doMerge(this, other); } /** * Converts this ConfigObject into a the java.util.Properties format, flattening the tree structure beforehand * * @return A java.util.Properties instance */ public Properties toProperties() { Properties props = new Properties(); flatten(props); props = convertValuesToString(props); return props; } /** * Converts this ConfigObject ino the java.util.Properties format, flatten the tree and prefixing all entries with the given prefix * * @param prefix The prefix to append before property entries * @return A java.util.Properties instance */ public Properties toProperties(String prefix) { Properties props = new Properties(); populate(prefix + ".", props, this); props = convertValuesToString(props); return props; } private Map doMerge(Map config, Map other) { for (Object o : other.entrySet()) { Map.Entry next = (Map.Entry) o; Object key = next.getKey(); Object value = next.getValue(); Object configEntry = config.get(key); if (configEntry == null) { config.put(key, value); } else { if (configEntry instanceof Map && !((Map) configEntry).isEmpty() && value instanceof Map) { // recur doMerge((Map) configEntry, (Map) value); } else { config.put(key, value); } } } return config; } private void writeConfig(String prefix, ConfigObject map, BufferedWriter out, int tab, boolean apply) throws IOException { String space = apply ? StringGroovyMethods.multiply(TAB_CHARACTER, tab) : ""; for (Object o1 : map.keySet()) { String key = (String) o1; Object v = map.get(key); if (v instanceof ConfigObject) { ConfigObject value = (ConfigObject) v; if (!value.isEmpty()) { Object dotsInKeys = null; for (Object o : value.entrySet()) { Entry e = (Entry) o; String k = (String) e.getKey(); if (k.indexOf('.') > -1) { dotsInKeys = e; break; } } int configSize = value.size(); Object firstKey = value.keySet().iterator().next(); Object firstValue = value.values().iterator().next(); int firstSize; if (firstValue instanceof ConfigObject) { firstSize = ((ConfigObject) firstValue).size(); } else { firstSize = 1; } if (configSize == 1 || DefaultGroovyMethods.asBoolean(dotsInKeys)) { if (firstSize == 1 && firstValue instanceof ConfigObject) { key = KEYWORDS.contains(key) ? InvokerHelper.inspect(key) : key; String writePrefix = prefix + key + "." + firstKey + "."; writeConfig(writePrefix, (ConfigObject) firstValue, out, tab, true); } else if (!DefaultGroovyMethods.asBoolean(dotsInKeys) && firstValue instanceof ConfigObject) { writeNode(key, space, tab, value, out); } else { for (Object j : value.keySet()) { Object v2 = value.get(j); Object k2 = ((String) j).indexOf('.') > -1 ? InvokerHelper.inspect(j) : j; if (v2 instanceof ConfigObject) { key = KEYWORDS.contains(key) ? InvokerHelper.inspect(key) : key; writeConfig(prefix + key, (ConfigObject) v2, out, tab, false); } else { writeValue(key + "." + k2, space, prefix, v2, out); } } } } else { writeNode(key, space, tab, value, out); } } } else { writeValue(key, space, prefix, v, out); } } } private static void writeValue(String key, String space, String prefix, Object value, BufferedWriter out) throws IOException { // key = key.indexOf('.') > -1 ? InvokerHelper.inspect(key) : key; boolean isKeyword = KEYWORDS.contains(key); key = isKeyword ? InvokerHelper.inspect(key) : key; if (!StringGroovyMethods.asBoolean(prefix) && isKeyword) prefix = "this."; out.append(space).append(prefix).append(key).append('=').append(InvokerHelper.inspect(value)); out.newLine(); } private void writeNode(String key, String space, int tab, ConfigObject value, BufferedWriter out) throws IOException { key = KEYWORDS.contains(key) ? InvokerHelper.inspect(key) : key; out.append(space).append(key).append(" {"); out.newLine(); writeConfig("", value, out, tab + 1, true); out.append(space).append('}'); out.newLine(); } private static Properties convertValuesToString(Map props) { Properties newProps = new Properties(); for (Object o : props.entrySet()) { Map.Entry next = (Map.Entry) o; Object key = next.getKey(); Object value = next.getValue(); newProps.put(key, value != null ? value.toString() : null); } return newProps; } private void populate(String suffix, Map config, Map map) { for (Object o : map.entrySet()) { Map.Entry next = (Map.Entry) o; Object key = next.getKey(); Object value = next.getValue(); if (value instanceof Map) { populate(suffix + key + ".", config, (Map) value); } else { try { config.put(suffix + key, value); } catch (NullPointerException e) { // it is idiotic story but if config map doesn't allow null values (like Hashtable) // we can't do too much } } } } public int size() { return delegateMap.size(); } public boolean isEmpty() { return delegateMap.isEmpty(); } public boolean containsKey(Object key) { return delegateMap.containsKey(key); } public boolean containsValue(Object value) { return delegateMap.containsValue(value); } public Object get(Object key) { return delegateMap.get(key); } public Object put(Object key, Object value) { return delegateMap.put(key, value); } public Object remove(Object key) { return delegateMap.remove(key); } public void putAll(Map m) { delegateMap.putAll(m); } public void clear() { delegateMap.clear(); } public Set keySet() { return delegateMap.keySet(); } public Collection values() { return delegateMap.values(); } public Set entrySet() { return delegateMap.entrySet(); } /** * Returns a shallow copy of this ConfigObject, keys and configuration entries are not cloned. * @return a shallow copy of this ConfigObject */ public ConfigObject clone() { try { ConfigObject clone = (ConfigObject) super.clone(); clone.configFile = configFile; clone.delegateMap = (LinkedHashMap) delegateMap.clone(); return clone; } catch (CloneNotSupportedException e) { throw new AssertionError(); } } /** * Checks if a config option is set. Example usage: * <pre class="groovyTestCase"> * def config = new ConfigSlurper().parse("foo { password='' }") * assert config.foo.isSet('password') * assert config.foo.isSet('username') == false * </pre> * * The check works <b>only</v> for options <b>one</b> block below the current block. * E.g. <code>config.isSet('foo.password')</code> will always return false. * * @param option The name of the option * @return <code>true</code> if the option is set <code>false</code> otherwise * @since 2.3.0 */ public Boolean isSet(String option) { if (delegateMap.containsKey(option)) { Object entry = delegateMap.get(option); if (!(entry instanceof ConfigObject) || !((ConfigObject) entry).isEmpty()) { return Boolean.TRUE; } } return Boolean.FALSE; } public String prettyPrint() { Writer sw = new StringBuilderWriter(); try { writeTo(sw); } catch (IOException e) { throw new GroovyRuntimeException(e); } return sw.toString(); } @Override public String toString() { Writer sw = new StringBuilderWriter(); try { InvokerHelper.write(sw, this); } catch (IOException e) { throw new GroovyRuntimeException(e); } return sw.toString(); } }
package com.wuest.prefab.structures.events; import com.wuest.prefab.ModRegistry; import com.wuest.prefab.Prefab; import com.wuest.prefab.Tuple; import com.wuest.prefab.blocks.BlockFlags; import com.wuest.prefab.config.EntityPlayerConfiguration; import com.wuest.prefab.proxy.CommonProxy; import com.wuest.prefab.proxy.messages.PlayerEntityTagMessage; import com.wuest.prefab.structures.base.BuildBlock; import com.wuest.prefab.structures.base.BuildEntity; import com.wuest.prefab.structures.base.BuildingMethods; import com.wuest.prefab.structures.base.Structure; import net.minecraft.core.BlockPos; import net.minecraft.core.Direction; import net.minecraft.nbt.CompoundTag; import net.minecraft.nbt.DoubleTag; import net.minecraft.nbt.ListTag; import net.minecraft.server.level.ServerPlayer; import net.minecraft.world.entity.Entity; import net.minecraft.world.entity.EntityType; import net.minecraft.world.entity.LivingEntity; import net.minecraft.world.entity.decoration.HangingEntity; import net.minecraft.world.entity.decoration.ItemFrame; import net.minecraft.world.entity.decoration.Painting; import net.minecraft.world.entity.player.Player; import net.minecraft.world.entity.vehicle.AbstractMinecart; import net.minecraft.world.item.ItemStack; import net.minecraft.world.level.block.*; import net.minecraft.world.level.block.state.BlockState; import net.minecraft.world.level.block.state.properties.BlockStateProperties; import net.minecraft.world.level.block.state.properties.DoubleBlockHalf; import net.minecraft.world.level.chunk.ChunkAccess; import net.minecraft.world.level.material.Material; import net.minecraft.world.phys.AABB; import net.minecraft.world.phys.shapes.Shapes; import net.minecraftforge.event.TickEvent; import net.minecraftforge.event.TickEvent.ServerTickEvent; import net.minecraftforge.event.entity.player.PlayerEvent; import net.minecraftforge.event.entity.player.PlayerEvent.PlayerLoggedInEvent; import net.minecraftforge.eventbus.api.SubscribeEvent; import net.minecraftforge.fml.common.Mod.EventBusSubscriber; import net.minecraftforge.network.NetworkDirection; import java.util.*; import java.util.Map.Entry; /** * This is the structure event handler. * * @author WuestMan */ @SuppressWarnings({"ConstantConditions", "UnusedAssignment", "unused"}) @EventBusSubscriber(modid = Prefab.MODID) public final class StructureEventHandler { /** * Contains a hashmap for the structures to build and for whom. */ public static HashMap<Player, ArrayList<Structure>> structuresToBuild = new HashMap<>(); public static ArrayList<Tuple<Structure, BuildEntity>> entitiesToGenerate = new ArrayList<>(); public static int ticksSinceLastEntitiesGenerated = 0; /** * This event is used to determine if the player should be given the starting house item when they log in. * * @param event The event object. */ @SubscribeEvent public static void PlayerLoggedIn(PlayerLoggedInEvent event) { if (!event.getPlayer().level.isClientSide() && event.getPlayer() instanceof ServerPlayer) { ServerPlayer player = (ServerPlayer) event.getPlayer(); EntityPlayerConfiguration playerConfig = EntityPlayerConfiguration.loadFromEntityData(player); String startingItem = CommonProxy.proxyConfiguration.serverConfiguration.startingItem; if (!playerConfig.givenHouseBuilder && startingItem != null) { ItemStack stack = ItemStack.EMPTY; switch (startingItem.toLowerCase()) { case "starting house": { stack = new ItemStack(ModRegistry.StartHouse.get()); break; } case "moderate house": { stack = new ItemStack(ModRegistry.ModerateHouse.get()); break; } } if (!stack.isEmpty()) { System.out.println(player.getDisplayName().getString() + " joined the game for the first time. Giving them starting item."); player.getInventory().add(stack); player.containerMenu.broadcastChanges(); // Make sure to set the tag for this player so they don't get the item again. playerConfig.givenHouseBuilder = true; playerConfig.saveToPlayer(player); } } // Send the tag to the client. Prefab.network.sendTo( new PlayerEntityTagMessage(playerConfig.getModIsPlayerNewTag(player)), ((ServerPlayer) event.getPlayer()).connection.connection, NetworkDirection.PLAY_TO_CLIENT); } } /** * This event is primarily used to build 100 blocks for any queued structures for all players. * * @param event The event object. */ @SubscribeEvent public static void onServerTick(ServerTickEvent event) { if (event.phase == TickEvent.Phase.START) { ArrayList<Player> playersToRemove = new ArrayList<>(); if (StructureEventHandler.entitiesToGenerate.size() > 0) { StructureEventHandler.ticksSinceLastEntitiesGenerated++; if (StructureEventHandler.ticksSinceLastEntitiesGenerated > 40) { // Process any entities. StructureEventHandler.processStructureEntities(); StructureEventHandler.ticksSinceLastEntitiesGenerated = 0; } } if (StructureEventHandler.structuresToBuild.size() > 0) { for (Entry<Player, ArrayList<Structure>> entry : StructureEventHandler.structuresToBuild.entrySet()) { ArrayList<Structure> structuresToRemove = new ArrayList<>(); // Build the first 100 blocks of each structure for this player. for (Structure structure : entry.getValue()) { if (!structure.entitiesRemoved) { // Go through each block and find any entities there. If there are any; kill them if they aren't players. // If there is a player there...they will probably die anyways..... for (BlockPos clearedPos : structure.clearedBlockPos) { AABB axisPos = Shapes.block().bounds().move(clearedPos); List<Entity> list = structure.world.getEntities(null, axisPos); if (!list.isEmpty()) { for (Entity entity : list) { // Don't kill living entities. if (!(entity instanceof LivingEntity)) { if (entity instanceof HangingEntity) { structure.BeforeHangingEntityRemoved((HangingEntity) entity); } entity.setRemoved(Entity.RemovalReason.DISCARDED); } } } } structure.entitiesRemoved = true; } if (structure.airBlocks.size() > 0) { structure.hasAirBlocks = true; } for (int i = 0; i < 10; i++) { i = StructureEventHandler.setBlock(i, structure, structuresToRemove); } // After building the blocks for this tick, find waterlogged blocks and remove them. StructureEventHandler.removeWaterLogging(structure); } // Update the list of structures to remove this structure since it's done building. StructureEventHandler.removeStructuresFromList(structuresToRemove, entry); if (entry.getValue().size() == 0) { playersToRemove.add(entry.getKey()); } } } // Remove each player that has their structure's built. for (Player player : playersToRemove) { StructureEventHandler.structuresToBuild.remove(player); } } } /** * This occurs when a player dies and is used to make sure that a player does not get a duplicate starting house. * * @param event The player clone event. */ @SubscribeEvent public static void onClone(PlayerEvent.Clone event) { if (event.getPlayer() instanceof ServerPlayer) { // Don't add the tag unless the house item was added. This way it can be added if the feature is turned on. // When the player is cloned, make sure to copy the tag. If this is not done the item can be given to the // player again if they die before the log out and log back in. CompoundTag originalTag = event.getOriginal().getPersistentData(); // Use the server configuration to determine if the house should be added for this player. String startingItem = CommonProxy.proxyConfiguration.serverConfiguration.startingItem; if (startingItem != null && !startingItem.equalsIgnoreCase("Nothing")) { if (originalTag.contains(EntityPlayerConfiguration.PLAYER_ENTITY_TAG)) { CompoundTag newPlayerTag = event.getPlayer().getPersistentData(); newPlayerTag.put(EntityPlayerConfiguration.PLAYER_ENTITY_TAG, originalTag.get(EntityPlayerConfiguration.PLAYER_ENTITY_TAG)); // Send the persist tag to the client. Prefab.network.sendTo( new PlayerEntityTagMessage(originalTag.getCompound(EntityPlayerConfiguration.PLAYER_ENTITY_TAG)), ((ServerPlayer) event.getPlayer()).connection.connection, NetworkDirection.PLAY_TO_CLIENT); } } } } private static int setBlock(int i, Structure structure, ArrayList<Structure> structuresToRemove) { // Structure clearing happens before anything else. // Don't bother clearing the area for water-based structures // Anything which should be air will be air if (structure.clearedBlockPos.size() > 0 && !structure.hasAirBlocks) { BlockPos currentPos = structure.clearedBlockPos.get(0); structure.clearedBlockPos.remove(0); BlockState clearBlockState = structure.world.getBlockState(currentPos); // If this block is not specifically air then set it to air. // This will also break other mod's logic blocks but they would probably be broken due to structure // generation anyways. if (clearBlockState.getMaterial() != Material.AIR) { structure.BeforeClearSpaceBlockReplaced(currentPos); for (Direction adjacentBlock : Direction.values()) { BlockPos tempPos = currentPos.relative(adjacentBlock); BlockState foundState = structure.world.getBlockState(tempPos); Block foundBlock = foundState.getBlock(); // Check if this block is one that is attached to a facing, if it is, remove it first. if (foundBlock instanceof TorchBlock || foundBlock instanceof SignBlock || foundBlock instanceof LeverBlock || foundBlock instanceof ButtonBlock || foundBlock instanceof BedBlock || foundBlock instanceof CarpetBlock || foundBlock instanceof FlowerPotBlock || foundBlock instanceof SugarCaneBlock || foundBlock instanceof BasePressurePlateBlock || foundBlock instanceof DoorBlock || foundBlock instanceof LadderBlock || foundBlock instanceof VineBlock || foundBlock instanceof RedStoneWireBlock || foundBlock instanceof DiodeBlock || foundBlock instanceof AbstractBannerBlock || foundBlock instanceof LanternBlock || foundBlock instanceof BaseRailBlock) { structure.BeforeClearSpaceBlockReplaced(currentPos); if (!(foundBlock instanceof BedBlock)) { structure.world.removeBlock(tempPos, false); } else if (foundBlock instanceof DoorBlock) { // Make sure to remove both parts before going on. DoubleBlockHalf currentHalf = foundState.getValue(BlockStateProperties.DOUBLE_BLOCK_HALF); BlockPos otherHalfPos = currentHalf == DoubleBlockHalf.LOWER ? tempPos.above() : tempPos.below(); structure.world.setBlock(tempPos, Blocks.AIR.defaultBlockState(), 35); structure.world.setBlock(otherHalfPos, Blocks.AIR.defaultBlockState(), 35); } else { // Found a bed, try to find the other part of the bed and remove it. for (Direction currentDirection : Direction.values()) { BlockPos bedPos = tempPos.relative(currentDirection); BlockState bedState = structure.world.getBlockState(bedPos); if (bedState.getBlock() instanceof BedBlock) { // found the other part of the bed. Remove the current block and this one. structure.world.setBlock(tempPos, Blocks.AIR.defaultBlockState(), 35); structure.world.setBlock(bedPos, Blocks.AIR.defaultBlockState(), 35); break; } } } } } structure.world.removeBlock(currentPos, false); } else { // This is just an air block, move onto the next block don't need to wait for the next tick. i--; } return i; } BuildBlock currentBlock = null; if (structure.priorityOneBlocks.size() > 0) { currentBlock = structure.priorityOneBlocks.get(0); structure.priorityOneBlocks.remove(0); } else { // There are no more blocks to set. structuresToRemove.add(structure); return 999; } BlockState state = currentBlock.getBlockState(); BlockPos setBlockPos = currentBlock.getStartingPosition().getRelativePosition(structure.originalPos, structure.getClearSpace().getShape().getDirection(), structure.configuration.houseFacing); BuildingMethods.ReplaceBlock(structure.world, setBlockPos, state, 2); // After placing the initial block, set the sub-block. This needs to happen as the list isn't always in the // correct order. if (currentBlock.getSubBlock() != null) { BuildBlock subBlock = currentBlock.getSubBlock(); BuildingMethods.ReplaceBlock(structure.world, subBlock.getStartingPosition().getRelativePosition(structure.originalPos, structure.getClearSpace().getShape().getDirection(), structure.configuration.houseFacing), subBlock.getBlockState(), BlockFlags.DEFAULT_AND_RERENDER); } return i; } private static void removeStructuresFromList(ArrayList<Structure> structuresToRemove, Entry<Player, ArrayList<Structure>> entry) { for (Structure structure : structuresToRemove) { StructureEventHandler.removeWaterLogging(structure); for (BuildEntity buildEntity : structure.entities) { Optional<EntityType<?>> entityType = EntityType.byString(buildEntity.getEntityResourceString()); if (entityType.isPresent()) { StructureEventHandler.entitiesToGenerate.add(new Tuple<>(structure, buildEntity)); } } // This structure is done building. entry.getValue().remove(structure); } } private static void processStructureEntities() { for (Tuple<Structure, BuildEntity> entityRecords : StructureEventHandler.entitiesToGenerate) { BuildEntity buildEntity = entityRecords.getSecond(); Structure structure = entityRecords.getFirst(); Optional<EntityType<?>> entityType = EntityType.byString(buildEntity.getEntityResourceString()); if (entityType.isPresent()) { Entity entity = entityType.get().create(structure.world); if (entity != null) { CompoundTag tagCompound = buildEntity.getEntityDataTag(); BlockPos entityPos = buildEntity.getStartingPosition().getRelativePosition(structure.originalPos, structure.getClearSpace().getShape().getDirection(), structure.configuration.houseFacing); if (tagCompound != null) { if (tagCompound.hasUUID("UUID")) { tagCompound.putUUID("UUID", UUID.randomUUID()); } ListTag nbttaglist = new ListTag(); nbttaglist.add(DoubleTag.valueOf(entityPos.getX())); nbttaglist.add(DoubleTag.valueOf(entityPos.getY())); nbttaglist.add(DoubleTag.valueOf(entityPos.getZ())); tagCompound.put("Pos", nbttaglist); entity.load(tagCompound); } // Set item frame facing and rotation here. if (entity instanceof ItemFrame) { entity = StructureEventHandler.setItemFrameFacingAndRotation((ItemFrame) entity, buildEntity, entityPos, structure); } else if (entity instanceof Painting) { entity = StructureEventHandler.setPaintingFacingAndRotation((Painting) entity, buildEntity, entityPos, structure); } else if (entity instanceof AbstractMinecart) { // Minecarts need to be slightly higher to account for the rails; otherwise they will fall through the rail and the block below the rail. buildEntity.entityYAxisOffset = buildEntity.entityYAxisOffset + .2; entity = StructureEventHandler.setEntityFacingAndRotation(entity, buildEntity, entityPos, structure); } else { // Other entities entity = StructureEventHandler.setEntityFacingAndRotation(entity, buildEntity, entityPos, structure); } structure.world.addFreshEntity(entity); } } } // All entities generated; clear out the list. StructureEventHandler.entitiesToGenerate.clear(); } private static void removeWaterLogging(Structure structure) { if (structure.hasAirBlocks) { for (BlockPos currentPos : structure.allBlockPositions) { BlockState currentState = structure.world.getBlockState(currentPos); if (currentState.hasProperty(BlockStateProperties.WATERLOGGED)) { // This is a water loggable block and there were air blocks, make sure that it's no longer water logged. currentState = currentState.setValue((BlockStateProperties.WATERLOGGED), false); structure.world.setBlock(currentPos, currentState, 3); } else if (currentState.getMaterial() == Material.WATER) { structure.world.setBlock(currentPos, Blocks.AIR.defaultBlockState(), 3); } } } } private static Entity setPaintingFacingAndRotation(Painting entity, BuildEntity buildEntity, BlockPos entityPos, Structure structure) { float yaw = entity.getYRot(); Rotation rotation = Rotation.NONE; double x_axis_offset = 0; double z_axis_offset = 0; Direction facing = entity.getDirection(); double y_axis_offset = buildEntity.entityYAxisOffset * -1; Direction structureDirection = structure.getClearSpace().getShape().getDirection(); Direction configurationDirection = structure.configuration.houseFacing.getOpposite(); if (configurationDirection == structureDirection.getOpposite()) { rotation = Rotation.CLOCKWISE_180; facing = facing.getOpposite(); } else if (configurationDirection == structureDirection.getClockWise()) { rotation = Rotation.CLOCKWISE_90; facing = facing.getClockWise(); } else if (configurationDirection == structureDirection.getCounterClockWise()) { rotation = Rotation.COUNTERCLOCKWISE_90; facing = facing.getCounterClockWise(); } int paintingBlockWidth = entity.motive.getWidth() / 16; int paintingBlockHeight = entity.motive.getHeight() / 16; if ((paintingBlockHeight > paintingBlockWidth || paintingBlockHeight > 1) && !(paintingBlockWidth == 4 && paintingBlockHeight == 3)) { y_axis_offset--; } yaw = entity.rotate(rotation); CompoundTag compound = new CompoundTag(); ((HangingEntity) entity).addAdditionalSaveData(compound); compound.putByte("Facing", (byte) facing.get2DDataValue()); ((HangingEntity) entity).readAdditionalSaveData(compound); StructureEventHandler.updateEntityHangingBoundingBox(entity); entity.moveTo(entityPos.getX() + x_axis_offset, entityPos.getY() + y_axis_offset, entityPos.getZ() + z_axis_offset, yaw, entity.getXRot()); StructureEventHandler.updateEntityHangingBoundingBox(entity); ChunkAccess chunk = structure.world.getChunkAt(entityPos); chunk.setUnsaved(true); return entity; } private static Entity setItemFrameFacingAndRotation(ItemFrame frame, BuildEntity buildEntity, BlockPos entityPos, Structure structure) { float yaw = frame.getYRot(); Rotation rotation = Rotation.NONE; double x_axis_offset = buildEntity.entityXAxisOffset; double z_axis_offset = buildEntity.entityZAxisOffset; Direction facing = frame.getDirection(); double y_axis_offset = buildEntity.entityYAxisOffset; x_axis_offset = x_axis_offset * -1; z_axis_offset = z_axis_offset * -1; Direction structureDirection = structure.getClearSpace().getShape().getDirection(); Direction configurationDirection = structure.configuration.houseFacing.getOpposite(); if (facing != Direction.UP && facing != Direction.DOWN) { if (configurationDirection == structureDirection.getOpposite()) { rotation = Rotation.CLOCKWISE_180; facing = facing.getOpposite(); } else if (configurationDirection == structureDirection.getClockWise()) { rotation = Rotation.CLOCKWISE_90; facing = facing.getClockWise(); } else if (configurationDirection == structureDirection.getCounterClockWise()) { rotation = Rotation.COUNTERCLOCKWISE_90; facing = facing.getCounterClockWise(); } else { x_axis_offset = 0; z_axis_offset = 0; } } yaw = frame.rotate(rotation); CompoundTag compound = new CompoundTag(); ((HangingEntity) frame).addAdditionalSaveData(compound); compound.putByte("Facing", (byte) facing.get3DDataValue()); ((HangingEntity) frame).readAdditionalSaveData(compound); StructureEventHandler.updateEntityHangingBoundingBox(frame); frame.moveTo(entityPos.getX() + x_axis_offset, entityPos.getY() + y_axis_offset, entityPos.getZ() + z_axis_offset, yaw, frame.getXRot()); StructureEventHandler.updateEntityHangingBoundingBox(frame); ChunkAccess chunk = structure.world.getChunkAt(entityPos); chunk.setUnsaved(true); return frame; } private static Entity setEntityFacingAndRotation(Entity entity, BuildEntity buildEntity, BlockPos entityPos, Structure structure) { float yaw = entity.getYRot(); Rotation rotation = Rotation.NONE; double x_axis_offset = buildEntity.entityXAxisOffset; double z_axis_offset = buildEntity.entityZAxisOffset; Direction facing = structure.getClearSpace().getShape().getDirection(); double y_axis_offset = buildEntity.entityYAxisOffset; Direction configurationDirection = structure.configuration.houseFacing.getOpposite(); if (configurationDirection == facing.getOpposite()) { rotation = Rotation.CLOCKWISE_180; x_axis_offset = x_axis_offset * -1; z_axis_offset = z_axis_offset * -1; } else if (configurationDirection == facing.getClockWise()) { rotation = Rotation.CLOCKWISE_90; x_axis_offset = x_axis_offset * -1; z_axis_offset = z_axis_offset * -1; } else if (configurationDirection == facing.getCounterClockWise()) { rotation = Rotation.COUNTERCLOCKWISE_90; x_axis_offset = x_axis_offset * -1; z_axis_offset = z_axis_offset * -1; } else { x_axis_offset = 0; z_axis_offset = 0; } yaw = entity.rotate(rotation); entity.moveTo(entityPos.getX() + x_axis_offset, entityPos.getY() + y_axis_offset, entityPos.getZ() + z_axis_offset, yaw, entity.getXRot()); return entity; } private static void updateEntityHangingBoundingBox(HangingEntity entity) { double d0 = (double) entity.getPos().getX() + 0.5D; double d1 = (double) entity.getPos().getY() + 0.5D; double d2 = (double) entity.getPos().getZ() + 0.5D; double d3 = 0.46875D; double d4 = entity.getWidth() % 32 == 0 ? 0.5D : 0.0D; double d5 = entity.getHeight() % 32 == 0 ? 0.5D : 0.0D; Direction horizontal = entity.getDirection(); d0 = d0 - (double) horizontal.getStepX() * 0.46875D; d2 = d2 - (double) horizontal.getStepZ() * 0.46875D; d1 = d1 + d5; Direction direction = horizontal == Direction.DOWN || horizontal == Direction.UP ? horizontal.getOpposite() : horizontal.getCounterClockWise(); d0 = d0 + d4 * (double) direction.getStepX(); d2 = d2 + d4 * (double) direction.getStepZ(); // The function call below set the following fields from the "entity" class. posX, posY, posZ. // This will probably have to change when the mappings get updated. entity.setPosRaw(d0, d1, d2); double d6 = entity.getWidth(); double d7 = entity.getHeight(); double d8 = entity.getWidth(); if (horizontal.getAxis() == Direction.Axis.Z) { d8 = 1.0D; } else { d6 = 1.0D; } d6 = d6 / 32.0D; d7 = d7 / 32.0D; d8 = d8 / 32.0D; entity.setBoundingBox(new AABB(d0 - d6, d1 - d7, d2 - d8, d0 + d6, d1 + d7, d2 + d8)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import junit.framework.Assert; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.server.namenode.BlockPlacementPolicyRaid.CachedFullPathNames; import org.apache.hadoop.hdfs.server.namenode.BlockPlacementPolicyRaid.CachedLocatedBlocks; import org.apache.hadoop.hdfs.server.namenode.BlockPlacementPolicyRaid.FileType; import org.apache.hadoop.raid.Codec; import org.apache.hadoop.raid.RaidNode; import org.apache.hadoop.raid.Utils; import org.junit.Test; public class TestBlockPlacementPolicyRaid { private Configuration conf = null; private MiniDFSCluster cluster = null; private FSNamesystem namesystem = null; private BlockPlacementPolicyRaid policy = null; private FileSystem fs = null; String[] rack1 = {"/rack1"}; String[] rack2 = {"/rack2"}; String[] host1 = {"host1.rack1.com"}; String[] host2 = {"host2.rack2.com"}; String xorPrefix = null; String raidTempPrefix = null; String raidrsTempPrefix = null; String raidrsHarTempPrefix = null; final static Log LOG = LogFactory.getLog(TestBlockPlacementPolicyRaid.class); protected void setupCluster() throws IOException { conf = new Configuration(); conf.setLong("dfs.blockreport.intervalMsec", 1000L); conf.set("dfs.replication.pending.timeout.sec", "2"); conf.setLong("dfs.block.size", 1L); conf.set("dfs.block.replicator.classname", "org.apache.hadoop.hdfs.server.namenode.BlockPlacementPolicyRaid"); Utils.loadTestCodecs(conf, 2, 1, 3, "/raid", "/raidrs"); conf.setInt("io.bytes.per.checksum", 1); // start the cluster with one datanode cluster = new MiniDFSCluster(conf, 1, true, rack1, host1); cluster.waitActive(); namesystem = cluster.getNameNode().getNamesystem(); Assert.assertTrue("BlockPlacementPolicy type is not correct.", namesystem.replicator instanceof BlockPlacementPolicyRaid); policy = (BlockPlacementPolicyRaid) namesystem.replicator; fs = cluster.getFileSystem(); xorPrefix = Codec.getCodec("xor").parityDirectory; raidTempPrefix = Codec.getCodec("xor").tmpParityDirectory; raidrsTempPrefix = Codec.getCodec("rs").parityDirectory; raidrsHarTempPrefix = Codec.getCodec("rs").tmpParityDirectory; } /** * Test BlockPlacementPolicyRaid.CachedLocatedBlocks and * BlockPlacementPolicyRaid.CachedFullPathNames * Verify that the results obtained from cache is the same as * the results obtained directly */ @Test public void testCachedResults() throws IOException { setupCluster(); try { // test blocks cache CachedLocatedBlocks cachedBlocks = new CachedLocatedBlocks(conf, namesystem); String file1 = "/dir/file1"; String file2 = "/dir/file2"; DFSTestUtil.createFile(fs, new Path(file1), 3, (short)1, 0L); DFSTestUtil.createFile(fs, new Path(file2), 4, (short)1, 0L); verifyCachedBlocksResult(cachedBlocks, namesystem, file1); verifyCachedBlocksResult(cachedBlocks, namesystem, file1); verifyCachedBlocksResult(cachedBlocks, namesystem, file2); verifyCachedBlocksResult(cachedBlocks, namesystem, file2); try { Thread.sleep(1200L); } catch (InterruptedException e) { } verifyCachedBlocksResult(cachedBlocks, namesystem, file2); verifyCachedBlocksResult(cachedBlocks, namesystem, file1); // test full path cache CachedFullPathNames cachedFullPathNames = new CachedFullPathNames(conf, namesystem); FSInodeInfo inode1 = null; FSInodeInfo inode2 = null; namesystem.dir.readLock(); try { inode1 = namesystem.dir.rootDir.getNode(file1); inode2 = namesystem.dir.rootDir.getNode(file2); } finally { namesystem.dir.readUnlock(); } verifyCachedFullPathNameResult(cachedFullPathNames, inode1); verifyCachedFullPathNameResult(cachedFullPathNames, inode1); verifyCachedFullPathNameResult(cachedFullPathNames, inode2); verifyCachedFullPathNameResult(cachedFullPathNames, inode2); try { Thread.sleep(1200L); } catch (InterruptedException e) { } verifyCachedFullPathNameResult(cachedFullPathNames, inode2); verifyCachedFullPathNameResult(cachedFullPathNames, inode1); } finally { if (cluster != null) { cluster.shutdown(); } } } /** * Test the result of getCompanionBlocks() on the unraided files */ @Test public void testGetCompanionBLocks() throws IOException { setupCluster(); try { String file1 = "/dir/file1"; String file2 = "/raid/dir/file2"; String file3 = "/raidrs/dir/file3"; // Set the policy to default policy to place the block in the default way setBlockPlacementPolicy(namesystem, new BlockPlacementPolicyDefault( conf, namesystem, namesystem.clusterMap)); DFSTestUtil.createFile(fs, new Path(file1), 3, (short)1, 0L); DFSTestUtil.createFile(fs, new Path(file2), 4, (short)1, 0L); DFSTestUtil.createFile(fs, new Path(file3), 8, (short)1, 0L); Collection<LocatedBlock> companionBlocks; companionBlocks = getCompanionBlocks( namesystem, policy, getBlocks(namesystem, file1).get(0).getBlock()); Assert.assertTrue(companionBlocks == null || companionBlocks.size() == 0); companionBlocks = getCompanionBlocks( namesystem, policy, getBlocks(namesystem, file1).get(2).getBlock()); Assert.assertTrue(companionBlocks == null || companionBlocks.size() == 0); companionBlocks = getCompanionBlocks( namesystem, policy, getBlocks(namesystem, file2).get(0).getBlock()); Assert.assertEquals(1, companionBlocks.size()); companionBlocks = getCompanionBlocks( namesystem, policy, getBlocks(namesystem, file2).get(3).getBlock()); Assert.assertEquals(1, companionBlocks.size()); int rsParityLength = Codec.getCodec("rs").parityLength; companionBlocks = getCompanionBlocks( namesystem, policy, getBlocks(namesystem, file3).get(0).getBlock()); Assert.assertEquals(rsParityLength, companionBlocks.size()); companionBlocks = getCompanionBlocks( namesystem, policy, getBlocks(namesystem, file3).get(4).getBlock()); Assert.assertEquals(rsParityLength, companionBlocks.size()); companionBlocks = getCompanionBlocks( namesystem, policy, getBlocks(namesystem, file3).get(6).getBlock()); Assert.assertEquals(2, companionBlocks.size()); } finally { if (cluster != null) { cluster.shutdown(); } } } static void setBlockPlacementPolicy( FSNamesystem namesystem, BlockPlacementPolicy policy) { namesystem.writeLock(); try { namesystem.replicator = policy; } finally { namesystem.writeUnlock(); } } /** * Test BlockPlacementPolicyRaid actually deletes the correct replica. * Start 2 datanodes and create 1 source file and its parity file. * 1) Start host1, create the parity file with replication 1 * 2) Start host2, create the source file with replication 2 * 3) Set repliation of source file to 1 * Verify that the policy should delete the block with more companion blocks. */ @Test public void testDeleteReplica() throws IOException { setupCluster(); try { // Set the policy to default policy to place the block in the default way setBlockPlacementPolicy(namesystem, new BlockPlacementPolicyDefault( conf, namesystem, namesystem.clusterMap)); DatanodeDescriptor datanode1 = namesystem.datanodeMap.values().iterator().next(); String source = "/dir/file"; String parity = xorPrefix + source; final Path parityPath = new Path(parity); DFSTestUtil.createFile(fs, parityPath, 3, (short)1, 0L); DFSTestUtil.waitReplication(fs, parityPath, (short)1); // start one more datanode cluster.startDataNodes(conf, 1, true, null, rack2, host2, null); DatanodeDescriptor datanode2 = null; for (DatanodeDescriptor d : namesystem.datanodeMap.values()) { if (!d.getName().equals(datanode1.getName())) { datanode2 = d; } } Assert.assertTrue(datanode2 != null); cluster.waitActive(); final Path sourcePath = new Path(source); DFSTestUtil.createFile(fs, sourcePath, 5, (short)2, 0L); DFSTestUtil.waitReplication(fs, sourcePath, (short)2); refreshPolicy(); Assert.assertEquals(source, policy.getSourceFile(parity, xorPrefix)); List<LocatedBlock> sourceBlocks = getBlocks(namesystem, source); List<LocatedBlock> parityBlocks = getBlocks(namesystem, parity); Assert.assertEquals(5, sourceBlocks.size()); Assert.assertEquals(3, parityBlocks.size()); // verify the result of getCompanionBlocks() Collection<LocatedBlock> companionBlocks; companionBlocks = getCompanionBlocks( namesystem, policy, sourceBlocks.get(0).getBlock()); verifyCompanionBlocks(companionBlocks, sourceBlocks, parityBlocks, new int[]{0, 1}, new int[]{0}); companionBlocks = getCompanionBlocks( namesystem, policy, sourceBlocks.get(1).getBlock()); verifyCompanionBlocks(companionBlocks, sourceBlocks, parityBlocks, new int[]{0, 1}, new int[]{0}); companionBlocks = getCompanionBlocks( namesystem, policy, sourceBlocks.get(2).getBlock()); verifyCompanionBlocks(companionBlocks, sourceBlocks, parityBlocks, new int[]{2, 3}, new int[]{1}); companionBlocks = getCompanionBlocks( namesystem, policy, sourceBlocks.get(3).getBlock()); verifyCompanionBlocks(companionBlocks, sourceBlocks, parityBlocks, new int[]{2, 3}, new int[]{1}); companionBlocks = getCompanionBlocks( namesystem, policy, sourceBlocks.get(4).getBlock()); verifyCompanionBlocks(companionBlocks, sourceBlocks, parityBlocks, new int[]{4}, new int[]{2}); companionBlocks = getCompanionBlocks( namesystem, policy, parityBlocks.get(0).getBlock()); verifyCompanionBlocks(companionBlocks, sourceBlocks, parityBlocks, new int[]{0, 1}, new int[]{0}); companionBlocks = getCompanionBlocks( namesystem, policy, parityBlocks.get(1).getBlock()); verifyCompanionBlocks(companionBlocks, sourceBlocks, parityBlocks, new int[]{2, 3}, new int[]{1}); companionBlocks = getCompanionBlocks( namesystem, policy, parityBlocks.get(2).getBlock()); verifyCompanionBlocks(companionBlocks, sourceBlocks, parityBlocks, new int[]{4}, new int[]{2}); // Set the policy back to raid policy. We have to create a new object // here to clear the block location cache refreshPolicy(); setBlockPlacementPolicy(namesystem, policy); // verify policy deletes the correct blocks. companion blocks should be // evenly distributed. fs.setReplication(sourcePath, (short)1); DFSTestUtil.waitReplication(fs, sourcePath, (short)1); Map<String, Integer> counters = new HashMap<String, Integer>(); refreshPolicy(); for (int i = 0; i < parityBlocks.size(); i++) { companionBlocks = getCompanionBlocks( namesystem, policy, parityBlocks.get(i).getBlock()); counters = BlockPlacementPolicyRaid.countCompanionBlocks( companionBlocks, false); Assert.assertTrue(counters.get(datanode1.getName()) >= 1 && counters.get(datanode1.getName()) <= 2); Assert.assertTrue(counters.get(datanode1.getName()) + counters.get(datanode2.getName()) == companionBlocks.size()); counters = BlockPlacementPolicyRaid.countCompanionBlocks( companionBlocks, true); Assert.assertTrue(counters.get(datanode1.getParent().getName()) >= 1 && counters.get(datanode1.getParent().getName()) <= 2); Assert.assertTrue(counters.get(datanode1.getParent().getName()) + counters.get(datanode2.getParent().getName()) == companionBlocks.size()); } } finally { if (cluster != null) { cluster.shutdown(); } } } // create a new BlockPlacementPolicyRaid to clear the cache private void refreshPolicy() { policy = new BlockPlacementPolicyRaid(); policy.initialize(conf, namesystem, namesystem.clusterMap, null, null, namesystem); } private void verifyCompanionBlocks(Collection<LocatedBlock> companionBlocks, List<LocatedBlock> sourceBlocks, List<LocatedBlock> parityBlocks, int[] sourceBlockIndexes, int[] parityBlockIndexes) { Set<Block> blockSet = new HashSet<Block>(); for (LocatedBlock b : companionBlocks) { blockSet.add(b.getBlock()); } Assert.assertEquals(sourceBlockIndexes.length + parityBlockIndexes.length, blockSet.size()); for (int index : sourceBlockIndexes) { Assert.assertTrue(blockSet.contains(sourceBlocks.get(index).getBlock())); } for (int index : parityBlockIndexes) { Assert.assertTrue(blockSet.contains(parityBlocks.get(index).getBlock())); } } private void verifyCachedFullPathNameResult( CachedFullPathNames cachedFullPathNames, FSInodeInfo inode) throws IOException { Assert.assertEquals(cachedFullPathNames.get(inode), inode.getFullPathName()); } private void verifyCachedBlocksResult(CachedLocatedBlocks cachedBlocks, FSNamesystem namesystem, String file) throws IOException{ long len = namesystem.getFileInfo(file).getLen(); List<LocatedBlock> res1 = namesystem.getBlockLocations(file, 0L, len).getLocatedBlocks(); List<LocatedBlock> res2 = cachedBlocks.get(file); for (int i = 0; i < res1.size(); i++) { Assert.assertEquals(res1.get(i).getBlock(), res2.get(i).getBlock()); } } private Collection<LocatedBlock> getCompanionBlocks( FSNamesystem namesystem, BlockPlacementPolicyRaid policy, Block block) throws IOException { INodeFile inode = namesystem.blocksMap.getINode(block); BlockPlacementPolicyRaid.FileInfo info = policy.getFileInfo(inode.getFullPathName()); return policy.getCompanionBlocks(inode.getFullPathName(), info, block); } private List<LocatedBlock> getBlocks(FSNamesystem namesystem, String file) throws IOException { FileStatus stat = namesystem.getFileInfo(file); return namesystem.getBlockLocations( file, 0, stat.getLen()).getLocatedBlocks(); } }
/* * (C) Copyright 2014 mjahnen <jahnen@in.tum.de> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.github.mjdev.libaums.fs.fat32; import java.nio.ByteBuffer; import java.nio.ByteOrder; /** * This class represents the FAT32 boot sector which is always located at the * beginning of every FAT32 file system. It holds important information about * the file system such as the cluster size and the start cluster of the root * directory. * * @author mjahnen * */ /* package */class Fat32BootSector { private static final int BYTES_PER_SECTOR_OFF = 11; private static final int SECTORS_PER_CLUSTER_OFF = 13; private static final int RESERVED_COUNT_OFF = 14; private static final int FAT_COUNT_OFF = 16; private static final int TOTAL_SECTORS_OFF = 32; private static final int SECTORS_PER_FAT_OFF = 36; private static final int FLAGS_OFF = 40; private static final int ROOT_DIR_CLUSTER_OFF = 44; private static final int FS_INFO_SECTOR_OFF = 48; private static final int VOLUME_LABEL_OFF = 48; private short bytesPerSector; private short sectorsPerCluster; private short reservedSectors; private byte fatCount; private long totalNumberOfSectors; private long sectorsPerFat; private long rootDirStartCluster; private short fsInfoStartSector; private boolean fatMirrored; private byte validFat; private String volumeLabel; private Fat32BootSector() { } /** * Reads a FAT32 boot sector from the given buffer. The buffer has to be 512 * (the size of a boot sector) bytes. * * @param buffer * The data where the boot sector is located. * @return A newly created boot sector. */ /* package */static Fat32BootSector read(ByteBuffer buffer) { Fat32BootSector result = new Fat32BootSector(); buffer.order(ByteOrder.LITTLE_ENDIAN); result.bytesPerSector = buffer.getShort(BYTES_PER_SECTOR_OFF); result.sectorsPerCluster = (short) (buffer.get(SECTORS_PER_CLUSTER_OFF) & 0xff); result.reservedSectors = buffer.getShort(RESERVED_COUNT_OFF); result.fatCount = buffer.get(FAT_COUNT_OFF); result.totalNumberOfSectors = buffer.getInt(TOTAL_SECTORS_OFF) & 0xffffffffl; result.sectorsPerFat = buffer.getInt(SECTORS_PER_FAT_OFF) & 0xffffffffl; result.rootDirStartCluster = buffer.getInt(ROOT_DIR_CLUSTER_OFF) & 0xffffffffl; result.fsInfoStartSector = buffer.getShort(FS_INFO_SECTOR_OFF); short flag = buffer.getShort(FLAGS_OFF); result.fatMirrored = ((byte) flag & 0x80) == 0; result.validFat = (byte) ((byte) flag & 0x7); StringBuilder builder = new StringBuilder(); for (int i = 0; i < 11; i++) { byte b = buffer.get(VOLUME_LABEL_OFF + i); if (b == 0) break; builder.append((char) b); } result.volumeLabel = builder.toString(); return result; } /** * Returns the number of bytes in one single sector of a FAT32 file system. * * @return Number of bytes. */ /* package */short getBytesPerSector() { return bytesPerSector; } /** * Returns the number of sectors in one single cluster of a FAT32 file * system. * * @return Number of bytes. */ /* package */short getSectorsPerCluster() { return sectorsPerCluster; } /** * Returns the number of reserved sectors at the beginning of the FAT32 file * system. This includes one sector for the boot sector. * * @return Number of sectors. */ /* package */short getReservedSectors() { return reservedSectors; } /** * Returns the number of the FATs in the FAT32 file system. This is mostly * 2. * * @return Number of FATs. */ /* package */byte getFatCount() { return fatCount; } /** * Returns the total number of sectors in the file system. * * @return Total number of sectors. */ /* package */long getTotalNumberOfSectors() { return totalNumberOfSectors; } /** * Returns the total number of sectors in one file allocation table. The * FATs have a fixed size. * * @return Number of sectors in one FAT. */ /* package */long getSectorsPerFat() { return sectorsPerFat; } /** * Returns the start cluster of the root directory in the FAT32 file system. * * @return Root directory start cluster. */ /* package */long getRootDirStartCluster() { return rootDirStartCluster; } /** * Returns the start sector of the file system info structure. * * @return FSInfo Structure start sector. */ /* package */short getFsInfoStartSector() { return fsInfoStartSector; } /** * Returns if the different FATs in the file system are mirrored, ie. all of * them are holding the same data. This is used for backup purposes. * * @return True if the FAT is mirrored. * @see #getValidFat() * @see #getFatCount() */ /* package */boolean isFatMirrored() { return fatMirrored; } /** * Returns the valid FATs which shall be used if the FATs are not mirrored. * * @return Number of the valid FAT. * @see #isFatMirrored() * @see #getFatCount() */ /* package */byte getValidFat() { return validFat; } /** * Returns the amount in bytes in one cluster. * * @return Amount of bytes. */ /* package */int getBytesPerCluster() { return sectorsPerCluster * bytesPerSector; } /** * Returns the FAT offset in bytes from the beginning of the file system for * the given FAT number. * * @param fatNumber * The number of the FAT. * @return Offset in bytes. * @see #isFatMirrored() * @see #fatCount() * @see #getValidFat() */ /* package */long getFatOffset(int fatNumber) { return getBytesPerSector() * (getReservedSectors() + fatNumber * getSectorsPerFat()); } /** * Returns the offset in bytes from the beginning of the file system of the * data area. The data area is the area where the contents of directories * and files are saved. * * @return Offset in bytes. */ /* package */long getDataAreaOffset() { return getFatOffset(0) + getFatCount() * getSectorsPerFat() * getBytesPerSector(); } /** * This returns the volume label stored in the boot sector. This is mostly * not used and you should instead use {@link FatDirectory#getVolumeLabel()} * of the root directory. * * @return The volume label. */ /* package */String getVolumeLabel() { return volumeLabel; } }
package app.bennsandoval.com.woodmin.fragments; import android.app.Activity; import android.app.SearchManager; import android.app.SearchableInfo; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.support.v4.view.MenuItemCompat; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.SearchView; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import com.google.android.gms.actions.SearchIntents; import java.util.List; import app.bennsandoval.com.woodmin.R; import app.bennsandoval.com.woodmin.activities.MainActivity; import app.bennsandoval.com.woodmin.activities.OrderDetail; import app.bennsandoval.com.woodmin.adapters.OrderAdapter; import app.bennsandoval.com.woodmin.data.WoodminContract; import app.bennsandoval.com.woodmin.sync.WoodminSyncAdapter; public class OrdersFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor>, SearchView.OnQueryTextListener { private final String LOG_TAG = OrdersFragment.class.getSimpleName(); private static final String ARG_SECTION_NUMBER = "section_number"; private OrderAdapter mAdapter; private SwipeRefreshLayout mSwipeLayout; private RecyclerView mRecyclerView; private static final int ORDER_LOADER = 100; private static final String[] ORDER_PROJECTION = { WoodminContract.OrdersEntry._ID, WoodminContract.OrdersEntry.COLUMN_ID, WoodminContract.OrdersEntry.COLUMN_JSON, }; private SearchView mSearchView; private String mQuery; public static OrdersFragment newInstance(int sectionNumber) { OrdersFragment fragment = new OrdersFragment(); Bundle args = new Bundle(); args.putInt(ARG_SECTION_NUMBER, sectionNumber); fragment.setArguments(args); return fragment; } public OrdersFragment() { } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); setHasOptionsMenu(true); onNewIntent(getActivity().getIntent()); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_orders, container, false); View.OnClickListener onClickListener = new View.OnClickListener(){ @Override public void onClick(final View view) { int position = mRecyclerView.getChildPosition(view); mAdapter.getCursor().moveToPosition(position); int idSelected = mAdapter.getCursor().getInt(mAdapter.getCursor().getColumnIndex(WoodminContract.OrdersEntry.COLUMN_ID)); Intent orderIntent = new Intent(getActivity(), OrderDetail.class); orderIntent.putExtra("order", idSelected); startActivity(orderIntent); } }; mAdapter = new OrderAdapter(getActivity().getApplicationContext(), R.layout.fragment_order_list_item, null, onClickListener); mRecyclerView = (RecyclerView)rootView.findViewById(R.id.list_orders); LinearLayoutManager layoutManager = new LinearLayoutManager(getActivity()); layoutManager.setOrientation(LinearLayoutManager.VERTICAL); mRecyclerView.setLayoutManager(layoutManager); mRecyclerView.setAdapter(mAdapter); getActivity().getSupportLoaderManager().initLoader(ORDER_LOADER, null, this); mSwipeLayout = (SwipeRefreshLayout) rootView.findViewById(R.id.swipe_container); mSwipeLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { WoodminSyncAdapter.syncImmediately(getActivity()); } }); mSwipeLayout.setColorSchemeResources(android.R.color.holo_blue_bright, android.R.color.holo_green_light, android.R.color.holo_orange_light, android.R.color.holo_red_light); mRecyclerView.setOnScrollListener(new RecyclerView.OnScrollListener() { @Override public void onScrollStateChanged(RecyclerView view, int scrollState) { } @Override public void onScrolled(RecyclerView view, int dx, int dy) { boolean enable = false; if (view != null && view.getChildCount() > 0) { boolean topOfFirstItemVisible = view.getChildAt(0).getTop() == 0; enable = topOfFirstItemVisible; } mSwipeLayout.setEnabled(enable); } }); return rootView; } @Override public void onAttach(Activity activity) { super.onAttach(activity); ((MainActivity) activity).onSectionAttached( getArguments().getInt(ARG_SECTION_NUMBER)); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { //menu.clear(); super.onCreateOptionsMenu(menu,inflater); inflater.inflate(R.menu.order_fragment_menu, menu); MenuItem searchItem = menu.findItem(R.id.action_search); mSearchView = (SearchView) MenuItemCompat.getActionView(searchItem); SearchManager searchManager = (SearchManager) getActivity().getSystemService(Context.SEARCH_SERVICE); if (mSearchView != null) { List<SearchableInfo> searchables = searchManager.getSearchablesInGlobalSearch(); SearchableInfo info = searchManager.getSearchableInfo(getActivity().getComponentName()); for (SearchableInfo inf : searchables) { if (inf.getSuggestAuthority() != null && inf.getSuggestAuthority().startsWith("applications")) { info = inf; } } mSearchView.setSearchableInfo(info); mSearchView.setOnQueryTextListener(this); mSearchView.setQueryHint(getActivity().getString(R.string.order_title_search)); if(mQuery != null && mQuery.length() > 0) { mSearchView.setQuery(mQuery, true); mSearchView.setIconifiedByDefault(false); mSearchView.performClick(); mSearchView.requestFocus(); } else { mSearchView.setIconifiedByDefault(true); } } } @Override public boolean onOptionsItemSelected(MenuItem item) { return super.onOptionsItemSelected(item); } @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { Log.d(LOG_TAG, "onCreateLoader"); String sortOrder = WoodminContract.OrdersEntry.COLUMN_ORDER_NUMBER + " DESC"; CursorLoader cursorLoader; Uri ordersUri = WoodminContract.OrdersEntry.CONTENT_URI; switch (id) { case ORDER_LOADER: if(mQuery != null && mQuery.length()>0){ String query = WoodminContract.OrdersEntry.COLUMN_ORDER_NUMBER + " LIKE ? OR " + WoodminContract.OrdersEntry.COLUMN_CUSTOMER_FIRST_NAME + " LIKE ? OR " + WoodminContract.OrdersEntry.COLUMN_CUSTOMER_LAST_NAME + " LIKE ? OR " + WoodminContract.OrdersEntry.COLUMN_BILLING_FIRST_NAME + " LIKE ? OR " + WoodminContract.OrdersEntry.COLUMN_BILLING_FIRST_NAME + " LIKE ? OR " + WoodminContract.OrdersEntry.COLUMN_BILLING_FIRST_NAME + " LIKE ?" ; String[] parameters = new String[]{ "%"+mQuery+"%", "%"+mQuery+"%", "%"+mQuery+"%", "%"+mQuery+"%", "%"+mQuery+"%" }; cursorLoader = new CursorLoader( getActivity().getApplicationContext(), ordersUri, ORDER_PROJECTION, query, parameters, sortOrder); } else { cursorLoader = new CursorLoader( getActivity().getApplicationContext(), ordersUri, ORDER_PROJECTION, null, null, sortOrder); } break; default: cursorLoader = null; break; } return cursorLoader; } @Override public void onLoadFinished(Loader<Cursor> cursorLoader, Cursor cursor) { switch (cursorLoader.getId()) { case ORDER_LOADER: if(mSwipeLayout != null){ mSwipeLayout.setRefreshing(false); } mAdapter.changeCursor(cursor); break; default: break; } } @Override public void onLoaderReset(Loader<Cursor> cursorLoader) { Log.d(LOG_TAG, "onLoaderReset"); switch (cursorLoader.getId()) { case ORDER_LOADER: mAdapter.notifyDataSetChanged(); break; default: break; } } @Override public boolean onQueryTextSubmit(String query) { mQuery = query; doSearch(); return true; } @Override public boolean onQueryTextChange(String newText) { mQuery = newText; doSearch(); return true; } protected void onNewIntent(Intent intent) { String action = intent.getAction(); if (action != null && (action.equals(Intent.ACTION_SEARCH) || action.equals(SearchIntents.ACTION_SEARCH))) { mQuery = intent.getStringExtra(SearchManager.QUERY); mQuery = mQuery.replace(getString(R.string.order_voice_search)+" ",""); } } private void doSearch() { getActivity().getSupportLoaderManager().restartLoader(ORDER_LOADER, null, this); getActivity().getSupportLoaderManager().getLoader(ORDER_LOADER).forceLoad(); } }
/* * Copyright 2013-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.gateway.filter.factory; import java.net.URI; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; import org.springframework.cloud.gateway.filter.GatewayFilter; import org.springframework.cloud.gateway.filter.factory.RewriteLocationResponseHeaderGatewayFilterFactory.Config; import org.springframework.cloud.gateway.filter.factory.RewriteLocationResponseHeaderGatewayFilterFactory.StripVersion; import org.springframework.http.HttpHeaders; import org.springframework.http.server.reactive.ServerHttpRequest; import org.springframework.http.server.reactive.ServerHttpResponse; import org.springframework.web.server.ServerWebExchange; import static org.assertj.core.api.Assertions.assertThat; @RunWith(MockitoJUnitRunner.class) public class RewriteLocationResponseHeaderGatewayFilterFactoryUnitTests { @InjectMocks private RewriteLocationResponseHeaderGatewayFilterFactory filter; @Mock private ServerWebExchange exchange; @Mock private ServerHttpRequest request; @Mock private ServerHttpResponse response; @Mock private HttpHeaders requestHeaders; @Mock private HttpHeaders responseHeaders; private URI uri; private Config config; @Before public void setUp() { filter = new RewriteLocationResponseHeaderGatewayFilterFactory(); Mockito.when(exchange.getRequest()).thenReturn(request); Mockito.when(exchange.getResponse()).thenReturn(response); Mockito.when(request.getHeaders()).thenReturn(requestHeaders); Mockito.when(response.getHeaders()).thenReturn(responseHeaders); config = new Config(); } private void setupTest(String location, String host, String path) { Mockito.when(responseHeaders.getFirst(HttpHeaders.LOCATION)).thenReturn(location); Mockito.when(requestHeaders.getFirst(HttpHeaders.HOST)).thenReturn(host); uri = URI.create("http://" + host + path); Mockito.when(request.getURI()).thenReturn(uri); } @Test public void rewriteLocationNullLocation() { setupTest(null, "host", "/path"); filter.rewriteLocation(exchange, config); Mockito.verify(responseHeaders, Mockito.never()).set(Mockito.anyString(), Mockito.anyString()); } @Test public void rewriteLocationNullHost() { setupTest("location", null, "/path"); filter.rewriteLocation(exchange, config); Mockito.verify(responseHeaders, Mockito.never()).set(Mockito.anyString(), Mockito.anyString()); } @Test public void rewriteLocation() { setupTest("location", "host", "/path"); filter.rewriteLocation(exchange, config); Mockito.verify(responseHeaders).set(Mockito.eq("Location"), Mockito.eq("location")); } @Test public void rewriteLocationCustomHeaderName() { setupTest("location", "host", "/path"); Mockito.when(responseHeaders.getFirst("Link")).thenReturn("link"); config.setLocationHeaderName("Link"); filter.rewriteLocation(exchange, config); Mockito.verify(responseHeaders).set(Mockito.eq("Link"), Mockito.eq("link")); } @Test public void rewriteLocationCustomHostValue() { setupTest("https://replaceme/some/path", "host", "/some/path"); config.setHostValue("different.host"); filter.rewriteLocation(exchange, config); Mockito.verify(responseHeaders).set(Mockito.eq("Location"), Mockito.eq("https://different.host/some/path")); } @Test public void rewriteLocationCustomProtocols() { setupTest("https://replaceme/some/path", "host", "/some/path"); config.setProtocols("gopher|whatever"); filter.rewriteLocation(exchange, config); Mockito.verify(responseHeaders).set(Mockito.eq("Location"), Mockito.eq("https://replaceme/some/path")); } @Test public void fixedLocationVersionedAlwaysStrip() { String location = "https://backend-url.example.com:443/v1/path/to/riches"; String host = "example.com:443"; String path = "/v1/path/to/riches"; setupTest(location, host, path); assertThat(filter.fixedLocation(location, host, path, StripVersion.ALWAYS_STRIP, config.getHostPortPattern(), config.getHostPortVersionPattern())).isEqualTo("https://example.com:443/path/to/riches"); } @Test public void fixedLocationVersionedStripAsInRequest() { String location = "https://backend-url.example.com:443/v1/path/to/riches"; String host = "example.com:443"; String path = "/v1/path/to/riches"; setupTest(location, host, path); assertThat(filter.fixedLocation(location, host, path, StripVersion.AS_IN_REQUEST, config.getHostPortPattern(), config.getHostPortVersionPattern())).isEqualTo("https://example.com:443/v1/path/to/riches"); } @Test public void fixedLocationVersionedDontStrip() { String location = "https://backend-url.example.com:443/v1/path/to/riches"; String host = "example.com:443"; String path = "/v1/path/to/riches"; setupTest(location, host, path); assertThat(filter.fixedLocation(location, host, path, StripVersion.NEVER_STRIP, config.getHostPortPattern(), config.getHostPortVersionPattern())).isEqualTo("https://example.com:443/v1/path/to/riches"); } @Test public void fixedLocationUnversionedAlwaysStrip() { String location = "https://backend-url.example.com:443/v2/path/to/riches"; String host = "api.example.com:443"; String path = "/path/to/riches"; setupTest(location, host, path); assertThat(filter.fixedLocation(location, host, path, StripVersion.ALWAYS_STRIP, config.getHostPortPattern(), config.getHostPortVersionPattern())).isEqualTo("https://api.example.com:443/path/to/riches"); } @Test public void fixedLocationUnversionedStripAsInRequest() { String location = "https://backend-url.example.com:443/v2/path/to/riches"; String host = "api.example.com:443"; String path = "/path/to/riches"; setupTest(location, host, path); assertThat(filter.fixedLocation(location, host, path, StripVersion.AS_IN_REQUEST, config.getHostPortPattern(), config.getHostPortVersionPattern())).isEqualTo("https://api.example.com:443/path/to/riches"); } @Test public void fixedLocationUnversionedDontStrip() { String location = "https://backend-url.example.com:443/v2/path/to/riches"; String host = "api.example.com:443"; String path = "/path/to/riches"; setupTest(location, host, path); assertThat(filter.fixedLocation(location, host, path, StripVersion.NEVER_STRIP, config.getHostPortPattern(), config.getHostPortVersionPattern())).isEqualTo("https://api.example.com:443/v2/path/to/riches"); } @Test public void fixedLocationNoPort() { String location = "https://backend-url.example.com/v2/path/to/riches"; String host = "api.example.com:443"; String path = "/path/to/riches"; setupTest(location, host, path); assertThat(filter.fixedLocation(location, host, path, StripVersion.AS_IN_REQUEST, config.getHostPortPattern(), config.getHostPortVersionPattern())).isEqualTo("https://api.example.com:443/path/to/riches"); } @Test public void toStringFormat() { // @formatter:off Config config = new Config().setStripVersion(StripVersion.ALWAYS_STRIP) .setLocationHeaderName("mylocation") .setHostValue("myhost") .setProtocols("myproto"); GatewayFilter filter = new RewriteLocationResponseHeaderGatewayFilterFactory() .apply(config); assertThat(filter.toString()) .contains("ALWAYS_STRIP") .contains("mylocation") .contains("myhost") .contains("myproto"); // @formatter:on } }
/** * jetbrick-template * http://subchen.github.io/jetbrick-template/ * * Copyright 2010-2013 Guoqiang Chen. All rights reserved. * Email: subchen@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrick.template.runtime; import java.io.*; public abstract class JetWriter { public static JetWriter create(Writer os, String encoding) { return new JetWriterImpl(os, encoding); } public static JetWriter create(OutputStream os, String encoding) { return new JetOutputStreamImpl(os, encoding); } public abstract boolean isStreaming(); public abstract void print(String text, byte[] bytes) throws IOException; public void print(boolean x) throws IOException { print(x ? "true" : "false"); } public void print(byte x) throws IOException { print(String.valueOf(x)); } public void print(char x) throws IOException { print(String.valueOf(x)); } public void print(short x) throws IOException { print(String.valueOf(x)); } public void print(int x) throws IOException { print(String.valueOf(x)); } public void print(long x) throws IOException { print(String.valueOf(x)); } public void print(float x) throws IOException { print(String.valueOf(x)); } public void print(double x) throws IOException { print(String.valueOf(x)); } public abstract void print(byte x[]) throws IOException; public abstract void print(char x[]) throws IOException; public abstract void print(CharSequence x) throws IOException; public void print(Object x) throws IOException { if (x != null) { if (x instanceof byte[]) { print((byte[]) x); } else if (x instanceof char[]) { print((char[]) x); } else { print(x.toString()); } } } public abstract void println() throws IOException; public void println(boolean x) throws IOException { print(x); println(); } public void println(byte x) throws IOException { print(x); println(); } public void println(char x) throws IOException { print(x); println(); } public void println(short x) throws IOException { print(x); println(); } public void println(int x) throws IOException { print(x); println(); } public void println(long x) throws IOException { print(x); println(); } public void println(float x) throws IOException { print(x); println(); } public void println(double x) throws IOException { print(x); println(); } public void println(byte x[]) throws IOException { if (x != null) { print(x); println(); } } public void println(char x[]) throws IOException { if (x != null) { print(x); println(); } } public void println(CharSequence x) throws IOException { if (x != null) { print(x); println(); } } public void println(Object x) throws IOException { if (x != null) { print(x); println(); } } public abstract void flush() throws IOException; public abstract void close() throws IOException; static class JetWriterImpl extends JetWriter { private static final String NEWLINE = "\r\n"; private final Writer os; private final String encoding; public JetWriterImpl(Writer os, String encoding) { this.os = os; this.encoding = encoding; } @Override public boolean isStreaming() { return false; } @Override public void print(String text, byte[] bytes) throws IOException { os.write(text); } @Override public void print(byte x[]) throws IOException { if (x != null) { os.write(new String(x, encoding)); } } @Override public void print(char x[]) throws IOException { if (x != null) { os.write(x); } } @Override public void print(CharSequence x) throws IOException { if (x != null) { os.write(x.toString()); } } @Override public void println() throws IOException { os.write(NEWLINE); } @Override public void flush() throws IOException { os.flush(); } @Override public void close() throws IOException { os.close(); } } static class JetOutputStreamImpl extends JetWriter { private static final byte[] NEWLINE = new byte[] { '\r', '\n' }; private final OutputStream os; private final String encoding; public JetOutputStreamImpl(OutputStream os, String encoding) { this.os = os; this.encoding = encoding; } @Override public boolean isStreaming() { return true; } @Override public void print(String text, byte[] bytes) throws IOException { os.write(bytes); } @Override public void print(byte x[]) throws IOException { if (x != null) { os.write(x); } } @Override public void print(char x[]) throws IOException { if (x != null) { os.write(new String(x).getBytes(encoding)); } } @Override public void print(CharSequence x) throws IOException { if (x != null) { os.write(x.toString().getBytes(encoding)); } } @Override public void println() throws IOException { os.write(NEWLINE); } @Override public void flush() throws IOException { os.flush(); } @Override public void close() throws IOException { os.close(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.query; import java.io.FileOutputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.LineNumberReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.text.ParseException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.jackrabbit.JcrConstants; import org.apache.jackrabbit.oak.api.ContentRepository; import org.apache.jackrabbit.oak.api.ContentSession; import org.apache.jackrabbit.oak.api.PropertyValue; import org.apache.jackrabbit.oak.api.Result; import org.apache.jackrabbit.oak.api.ResultRow; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.api.QueryEngine; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.spi.query.PropertyValues; import org.junit.Before; import org.junit.Test; import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NAME; import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NODE_TYPE; import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.REINDEX_PROPERTY_NAME; import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.TYPE_PROPERTY_NAME; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * AbstractQueryTest... */ public abstract class AbstractQueryTest { protected static final String TEST_INDEX_NAME = "test-index"; protected QueryEngine qe; protected ContentSession session; protected Root root; @Before public void before() throws Exception { session = createRepository().login(null, null); root = session.getLatestRoot(); qe = root.getQueryEngine(); createTestIndexNode(); } protected abstract ContentRepository createRepository(); /** * Override this method to add your default index definition * * {@link #createTestIndexNode(Tree, String)} for a helper method */ protected void createTestIndexNode() throws Exception { Tree index = root.getTree("/"); createTestIndexNode(index, "unknown"); root.commit(); } protected static Tree createTestIndexNode(Tree index, String type) throws Exception { Tree indexDef = index.addChild(INDEX_DEFINITIONS_NAME).addChild( TEST_INDEX_NAME); indexDef.setProperty(JcrConstants.JCR_PRIMARYTYPE, INDEX_DEFINITIONS_NODE_TYPE, Type.NAME); indexDef.setProperty(TYPE_PROPERTY_NAME, type); indexDef.setProperty(REINDEX_PROPERTY_NAME, true); return indexDef; } protected Result executeQuery(String statement, String language, Map<String, PropertyValue> sv) throws ParseException { return qe.executeQuery(statement, language, Long.MAX_VALUE, 0, sv, null); } @Test public void sql1() throws Exception { test("sql1.txt"); } @Test public void sql2() throws Exception { test("sql2.txt"); } @Test public void xpath() throws Exception { test("xpath.txt"); } @Test public void sql2Measure() throws Exception { test("sql2_measure.txt"); } @Test public void bindVariableTest() throws Exception { JsopUtil.apply( root, "/ + \"test\": { \"hello\": {\"id\": \"1\"}, \"world\": {\"id\": \"2\"}}"); root.commit(); Map<String, PropertyValue> sv = new HashMap<String, PropertyValue>(); sv.put("id", PropertyValues.newString("1")); Iterator<? extends ResultRow> result; result = executeQuery("select * from [nt:base] where id = $id", QueryEngineImpl.SQL2, sv).getRows().iterator(); assertTrue(result.hasNext()); assertEquals("/test/hello", result.next().getPath()); sv.put("id", PropertyValues.newString("2")); result = executeQuery("select * from [nt:base] where id = $id", QueryEngineImpl.SQL2, sv).getRows().iterator(); assertTrue(result.hasNext()); assertEquals("/test/world", result.next().getPath()); } protected void test(String file) throws Exception { InputStream in = AbstractQueryTest.class.getResourceAsStream(file); LineNumberReader r = new LineNumberReader(new InputStreamReader(in)); PrintWriter w = new PrintWriter(new OutputStreamWriter( new FileOutputStream("target/" + getClass().getName() + "_" + file))); HashSet<String> knownQueries = new HashSet<String>(); boolean errors = false; try { while (true) { String line = r.readLine(); if (line == null) { break; } line = line.trim(); if (line.startsWith("#") || line.length() == 0) { w.println(line); } else if (line.startsWith("xpath2sql")) { line = line.substring("xpath2sql".length()).trim(); w.println("xpath2sql " + line); XPathToSQL2Converter c = new XPathToSQL2Converter(); String got; try { got = c.convert(line); executeQuery(got, QueryEngineImpl.SQL2, null); } catch (ParseException e) { got = "invalid: " + e.getMessage().replace('\n', ' '); } catch (Exception e) { // e.printStackTrace(); got = "error: " + e.toString().replace('\n', ' '); } if (!knownQueries.add(line)) { got = "duplicate xpath2sql query"; } line = r.readLine().trim(); w.println(got); if (!line.equals(got)) { errors = true; } } else if (line.startsWith("select") || line.startsWith("explain") || line.startsWith("measure") || line.startsWith("sql1") || line.startsWith("xpath")) { w.println(line); String language = QueryEngineImpl.SQL2; if (line.startsWith("sql1 ")) { language = QueryEngineImpl.SQL; line = line.substring("sql1 ".length()); } else if (line.startsWith("xpath ")) { language = QueryEngineImpl.XPATH; line = line.substring("xpath ".length()); } boolean readEnd = true; for (String resultLine : executeQuery(line, language)) { w.println(resultLine); if (readEnd) { line = r.readLine(); if (line == null) { errors = true; readEnd = false; } else { line = line.trim(); if (line.length() == 0) { errors = true; readEnd = false; } else { if (!line.equals(resultLine)) { errors = true; } } } } } w.println(""); if (readEnd) { while (true) { line = r.readLine(); if (line == null) { break; } line = line.trim(); if (line.length() == 0) { break; } errors = true; } } } else if (line.startsWith("commit")) { w.println(line); line = line.substring("commit".length()).trim(); JsopUtil.apply(root, line); root.commit(); } w.flush(); } } finally { w.close(); r.close(); } if (errors) { throw new Exception("Results in target/" + file + " don't match expected " + "results in src/test/resources/" + file + "; compare the files for details"); } } protected List<String> executeQuery(String query, String language) { long time = System.currentTimeMillis(); List<String> lines = new ArrayList<String>(); try { Result result = executeQuery(query, language, null); for (ResultRow row : result.getRows()) { lines.add(readRow(row)); } if (!query.contains("order by")) { Collections.sort(lines); } } catch (ParseException e) { lines.add(e.toString()); } catch (IllegalArgumentException e) { lines.add(e.toString()); } time = System.currentTimeMillis() - time; if (time > 3000 && !isDebugModeEnabled()) { fail("Query took too long: " + query + " took " + time + " ms"); } return lines; } protected static String readRow(ResultRow row) { StringBuilder buff = new StringBuilder(); PropertyValue[] values = row.getValues(); for (int i = 0; i < values.length; i++) { if (i > 0) { buff.append(", "); } PropertyValue v = values[i]; buff.append(v == null ? "null" : v.getValue(Type.STRING)); } return buff.toString(); } /** * Check whether the test is running in debug mode. * * @return true if debug most is (most likely) enabled */ protected static boolean isDebugModeEnabled() { return java.lang.management.ManagementFactory.getRuntimeMXBean() .getInputArguments().toString().indexOf("-agentlib:jdwp") > 0; } }
/* * Generated by the Jasper component of Apache Tomcat * Version: JspC/ApacheTomcat8 * Generated at: 2016-08-23 16:29:30 UTC * Note: The last modified time of this file was set to * the last modified time of the source file after * generation to assist with modification tracking. */ package org.jivesoftware.openfire.admin; import javax.servlet.*; import javax.servlet.http.*; import javax.servlet.jsp.*; import org.jivesoftware.openfire.SessionManager; import org.jivesoftware.openfire.session.ComponentSession; import org.jivesoftware.openfire.session.Session; import org.jivesoftware.util.JiveGlobals; import org.jivesoftware.util.StringUtils; import org.jivesoftware.util.ParamUtils; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Date; public final class component_002dsession_002dsummary_jsp extends org.apache.jasper.runtime.HttpJspBase implements org.apache.jasper.runtime.JspSourceDependent { final int DEFAULT_RANGE = 15; final int[] RANGE_PRESETS = {15, 25, 50, 75, 100}; private static final javax.servlet.jsp.JspFactory _jspxFactory = javax.servlet.jsp.JspFactory.getDefaultFactory(); private static java.util.Map<java.lang.String,java.lang.Long> _jspx_dependants; private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody; private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey; private org.apache.jasper.runtime.TagHandlerPool _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody; private javax.el.ExpressionFactory _el_expressionfactory; private org.apache.tomcat.InstanceManager _jsp_instancemanager; public java.util.Map<java.lang.String,java.lang.Long> getDependants() { return _jspx_dependants; } public void _jspInit() { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig()); _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig()); _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig()); _el_expressionfactory = _jspxFactory.getJspApplicationContext(getServletConfig().getServletContext()).getExpressionFactory(); _jsp_instancemanager = org.apache.jasper.runtime.InstanceManagerFactory.getInstanceManager(getServletConfig()); } public void _jspDestroy() { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.release(); _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey.release(); _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.release(); } public void _jspService(final javax.servlet.http.HttpServletRequest request, final javax.servlet.http.HttpServletResponse response) throws java.io.IOException, javax.servlet.ServletException { final javax.servlet.jsp.PageContext pageContext; javax.servlet.http.HttpSession session = null; final javax.servlet.ServletContext application; final javax.servlet.ServletConfig config; javax.servlet.jsp.JspWriter out = null; final java.lang.Object page = this; javax.servlet.jsp.JspWriter _jspx_out = null; javax.servlet.jsp.PageContext _jspx_page_context = null; try { response.setContentType("text/html"); pageContext = _jspxFactory.getPageContext(this, request, response, "error.jsp", true, 8192, true); _jspx_page_context = pageContext; application = pageContext.getServletContext(); config = pageContext.getServletConfig(); session = pageContext.getSession(); out = pageContext.getOut(); _jspx_out = out; out.write("\n\n\n\n\n\n\n\n\n\n"); out.write('\n'); out.write('\n'); org.jivesoftware.util.WebManager admin = null; admin = (org.jivesoftware.util.WebManager) _jspx_page_context.getAttribute("admin", javax.servlet.jsp.PageContext.PAGE_SCOPE); if (admin == null){ admin = new org.jivesoftware.util.WebManager(); _jspx_page_context.setAttribute("admin", admin, javax.servlet.jsp.PageContext.PAGE_SCOPE); } out.write('\n'); admin.init(request, response, session, application, out ); out.write('\n'); out.write('\n'); // Get parameters int start = ParamUtils.getIntParameter(request, "start", 0); int range = ParamUtils .getIntParameter(request, "range", admin.getRowsPerPage("component-session-summary", DEFAULT_RANGE)); boolean close = ParamUtils.getBooleanParameter(request, "close"); String jid = ParamUtils.getParameter(request, "jid"); if (request.getParameter("range") != null) { admin.setRowsPerPage("component-session-summary", range); } // Get the session manager SessionManager sessionManager = admin.getSessionManager(); Collection<ComponentSession> sessions = sessionManager.getComponentSessions(); // Get the session count int sessionCount = sessions.size(); // Close the external component connection if (close) { try { Session sess = sessionManager.getComponentSession(jid); if (sess != null) { sess.close(); } // Log the event admin.logEvent("closed component session for "+jid, null); // wait one second Thread.sleep(1000L); } catch (Exception ignored) { // Session might have disappeared on its own } // redirect back to this page response.sendRedirect("component-session-summary.jsp?close=success"); return; } // paginator vars int numPages = (int) Math.ceil((double) sessionCount / (double) range); int curPage = (start / range) + 1; int maxIndex = (start + range <= sessionCount ? start + range : sessionCount); out.write("\n\n<html>\n <head>\n <title>"); if (_jspx_meth_fmt_005fmessage_005f0(_jspx_page_context)) return; out.write("</title>\n <meta name=\"pageID\" content=\"component-session-summary\"/>\n </head>\n <body>\n\n"); if ("success".equals(request.getParameter("close"))) { out.write("\n\n <p class=\"jive-success-text\">\n "); if (_jspx_meth_fmt_005fmessage_005f1(_jspx_page_context)) return; out.write("\n </p>\n\n"); } out.write("\n\n<p>\n"); if (_jspx_meth_fmt_005fmessage_005f2(_jspx_page_context)) return; out.write(": <b>"); out.print( sessions.size() ); out.write("</b>\n\n"); if (numPages > 1) { out.write("\n\n - "); if (_jspx_meth_fmt_005fmessage_005f3(_jspx_page_context)) return; out.write(' '); out.print( (start+1) ); out.write('-'); out.print( (start+range) ); out.write('\n'); out.write('\n'); } out.write("\n - "); if (_jspx_meth_fmt_005fmessage_005f4(_jspx_page_context)) return; out.write(":\n<select size=\"1\" onchange=\"location.href='component-session-summary.jsp?start=0&range=' + this.options[this.selectedIndex].value;\">\n\n "); for (int aRANGE_PRESETS : RANGE_PRESETS) { out.write("\n\n <option value=\""); out.print( aRANGE_PRESETS ); out.write("\"\n "); out.print( (aRANGE_PRESETS == range ? "selected" : "") ); out.write('>'); out.print( aRANGE_PRESETS ); out.write("\n </option>\n\n "); } out.write("\n\n</select>\n</p>\n\n"); if (numPages > 1) { out.write("\n\n <p>\n "); if (_jspx_meth_fmt_005fmessage_005f5(_jspx_page_context)) return; out.write(":\n [\n "); for (int i=0; i<numPages; i++) { String sep = ((i+1)<numPages) ? " " : ""; boolean isCurrent = (i+1) == curPage; out.write("\n <a href=\"component-session-summary.jsp?start="); out.print( (i*range) ); out.write("\"\n class=\""); out.print( ((isCurrent) ? "jive-current" : "") ); out.write("\"\n >"); out.print( (i+1) ); out.write("</a>"); out.print( sep ); out.write("\n\n "); } out.write("\n ]\n </p>\n\n"); } out.write("\n\n<p>\n"); if (_jspx_meth_fmt_005fmessage_005f6(_jspx_page_context)) return; out.write("\n</p>\n\n<div class=\"jive-table\">\n<table cellpadding=\"0\" cellspacing=\"0\" border=\"0\" width=\"100%\">\n<thead>\n <tr>\n <th>&nbsp;</th>\n <th nowrap>"); if (_jspx_meth_fmt_005fmessage_005f7(_jspx_page_context)) return; out.write("</th>\n <th>&nbsp;</th>\n <th nowrap>"); if (_jspx_meth_fmt_005fmessage_005f8(_jspx_page_context)) return; out.write("</th>\n <th nowrap>"); if (_jspx_meth_fmt_005fmessage_005f9(_jspx_page_context)) return; out.write("</th>\n <th nowrap>"); if (_jspx_meth_fmt_005fmessage_005f10(_jspx_page_context)) return; out.write("</th>\n <th nowrap>"); if (_jspx_meth_fmt_005fmessage_005f11(_jspx_page_context)) return; out.write("</th>\n <th nowrap>"); if (_jspx_meth_fmt_005fmessage_005f12(_jspx_page_context)) return; out.write("</th>\n <th nowrap>"); if (_jspx_meth_fmt_005fmessage_005f13(_jspx_page_context)) return; out.write("</th>\n </tr>\n</thead>\n<tbody>\n "); // Check if no out/in connection to/from a remote server exists if (sessions.isEmpty()) { out.write("\n <tr>\n <td colspan=\"9\">\n\n "); if (_jspx_meth_fmt_005fmessage_005f14(_jspx_page_context)) return; out.write("\n\n </td>\n </tr>\n\n "); } out.write("\n\n "); int count = 0; sessions = new ArrayList<ComponentSession>(sessions).subList(start, maxIndex); for (ComponentSession componentSession : sessions) { count++; out.write("\n <tr class=\"jive-"); out.print( (((count % 2) == 0) ? "even" : "odd") ); out.write("\">\n <td width=\"1%\" nowrap>"); out.print( count ); out.write("</td>\n <td width=\"43%\" nowrap>\n <a href=\"component-session-details.jsp?jid="); out.print( URLEncoder.encode(componentSession.getAddress().toString(), "UTF-8") ); out.write("\" title=\""); if (_jspx_meth_fmt_005fmessage_005f15(_jspx_page_context)) return; out.write('"'); out.write('>'); out.print( componentSession.getAddress() ); out.write("</a>\n </td>\n <td width=\"1%\">\n "); if (componentSession.isSecure()) { if (componentSession.getPeerCertificates() != null && componentSession.getPeerCertificates().length > 0) { out.write("\n <img src=\"images/lock_both.gif\" width=\"16\" height=\"16\" border=\"0\" title=\""); if (_jspx_meth_fmt_005fmessage_005f16(_jspx_page_context)) return; out.write(" (mutual authentication)\" alt=\""); if (_jspx_meth_fmt_005fmessage_005f17(_jspx_page_context)) return; out.write(" (mutual authentication)\">\n "); } else { out.write("\n <img src=\"images/lock.gif\" width=\"16\" height=\"16\" border=\"0\" title=\""); if (_jspx_meth_fmt_005fmessage_005f18(_jspx_page_context)) return; out.write("\" alt=\""); if (_jspx_meth_fmt_005fmessage_005f19(_jspx_page_context)) return; out.write("\">\n "); } } else { out.write("\n <img src=\"images/blank.gif\" width=\"1\" height=\"1\" alt=\"\">\n "); } out.write("\n </td>\n <td width=\"15%\" nowrap>\n "); out.print( StringUtils.escapeHTMLTags(componentSession.getExternalComponent().getName()) ); out.write("\n </td>\n <td width=\"10%\" nowrap>\n "); out.print( StringUtils.escapeHTMLTags(componentSession.getExternalComponent().getCategory()) ); out.write("\n </td>\n <td width=\"10%\" nowrap>\n <table border=\"0\">\n <tr valign=\"center\">\n "); if ("gateway".equals(componentSession.getExternalComponent().getCategory())) { if ("msn".equals(componentSession.getExternalComponent().getType())) { out.write("\n <td><img src=\"images/msn.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"MSN\"></td>\n "); } else if ("aim".equals(componentSession.getExternalComponent().getType())) { out.write("\n <td><img src=\"images/aim.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"AIM\"></td>\n "); } else if ("yahoo".equals(componentSession.getExternalComponent().getType())) { out.write("\n <td><img src=\"images/yahoo.gif\" width=\"22\" height=\"16\" border=\"0\" alt=\"Yahoo!\"></td>\n "); } else if ("icq".equals(componentSession.getExternalComponent().getType())) { out.write("\n <td><img src=\"images/icq.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"ICQ\"></td>\n "); } else if ("irc".equals(componentSession.getExternalComponent().getType())) { out.write("\n <td><img src=\"images/irc.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"IRC\"></td>\n "); } } out.write("\n <td>"); out.print( StringUtils.escapeHTMLTags(componentSession.getExternalComponent().getType()) ); out.write("</td>\n </tr></table>\n </td>\n "); Date creationDate = componentSession.getCreationDate(); Calendar creationCal = Calendar.getInstance(); creationCal.setTime(creationDate); Date lastActiveDate = componentSession.getLastActiveDate(); Calendar lastActiveCal = Calendar.getInstance(); lastActiveCal.setTime(lastActiveDate); Calendar nowCal = Calendar.getInstance(); boolean sameCreationDay = nowCal.get(Calendar.DAY_OF_YEAR) == creationCal.get(Calendar.DAY_OF_YEAR) && nowCal.get(Calendar.YEAR) == creationCal.get(Calendar.YEAR); boolean sameActiveDay = nowCal.get(Calendar.DAY_OF_YEAR) == lastActiveCal.get(Calendar.DAY_OF_YEAR) && nowCal.get(Calendar.YEAR) == lastActiveCal.get(Calendar.YEAR); out.write("\n <td width=\"9%\" nowrap>\n "); out.print( sameCreationDay ? JiveGlobals.formatTime(creationDate) : JiveGlobals.formatDateTime(creationDate) ); out.write("\n </td>\n <td width=\"9%\" nowrap>\n "); out.print( sameActiveDay ? JiveGlobals.formatTime(lastActiveDate) : JiveGlobals.formatDateTime(lastActiveDate) ); out.write("\n </td>\n\n <td width=\"1%\" nowrap align=\"center\" style=\"border-right:1px #ccc solid;\">\n <a href=\"component-session-summary.jsp?jid="); out.print( URLEncoder.encode(componentSession.getAddress().toString(), "UTF-8") ); out.write("&close=true\"\n title=\""); if (_jspx_meth_fmt_005fmessage_005f20(_jspx_page_context)) return; out.write("\"\n onclick=\"return confirm('"); if (_jspx_meth_fmt_005fmessage_005f21(_jspx_page_context)) return; out.write("');\"\n ><img src=\"images/delete-16x16.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"\"></a>\n </td>\n </tr>\n "); } out.write("\n\n</tbody>\n</table>\n</div>\n\n"); if (numPages > 1) { out.write("\n\n <p>\n "); if (_jspx_meth_fmt_005fmessage_005f22(_jspx_page_context)) return; out.write(":\n [\n "); for (int i=0; i<numPages; i++) { String sep = ((i+1)<numPages) ? " " : ""; boolean isCurrent = (i+1) == curPage; out.write("\n <a href=\"component-session-summary.jsp?start="); out.print( (i*range) ); out.write("\"\n class=\""); out.print( ((isCurrent) ? "jive-current" : "") ); out.write("\"\n >"); out.print( (i+1) ); out.write("</a>"); out.print( sep ); out.write("\n\n "); } out.write("\n ]\n </p>\n\n"); } out.write("\n\n<br>\n<p>\n"); if (_jspx_meth_fmt_005fmessage_005f23(_jspx_page_context)) return; out.write(':'); out.write(' '); out.print( JiveGlobals.formatDateTime(new Date()) ); out.write("\n</p>\n\n </body>\n</html>\n"); } catch (java.lang.Throwable t) { if (!(t instanceof javax.servlet.jsp.SkipPageException)){ out = _jspx_out; if (out != null && out.getBufferSize() != 0) try { if (response.isCommitted()) { out.flush(); } else { out.clearBuffer(); } } catch (java.io.IOException e) {} if (_jspx_page_context != null) _jspx_page_context.handlePageException(t); else throw new ServletException(t); } } finally { _jspxFactory.releasePageContext(_jspx_page_context); } } private boolean _jspx_meth_fmt_005fmessage_005f0(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f0 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f0.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f0.setParent(null); // /component-session-summary.jsp(91,15) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f0.setKey("component.session.summary.title"); int _jspx_eval_fmt_005fmessage_005f0 = _jspx_th_fmt_005fmessage_005f0.doStartTag(); if (_jspx_th_fmt_005fmessage_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f0); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f0); return false; } private boolean _jspx_meth_fmt_005fmessage_005f1(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f1 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f1.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f1.setParent(null); // /component-session-summary.jsp(99,4) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f1.setKey("component.session.summary.close"); int _jspx_eval_fmt_005fmessage_005f1 = _jspx_th_fmt_005fmessage_005f1.doStartTag(); if (_jspx_th_fmt_005fmessage_005f1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f1); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f1); return false; } private boolean _jspx_meth_fmt_005fmessage_005f2(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f2 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f2.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f2.setParent(null); // /component-session-summary.jsp(105,0) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f2.setKey("component.session.summary.active"); int _jspx_eval_fmt_005fmessage_005f2 = _jspx_th_fmt_005fmessage_005f2.doStartTag(); if (_jspx_th_fmt_005fmessage_005f2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f2); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f2); return false; } private boolean _jspx_meth_fmt_005fmessage_005f3(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f3 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f3.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f3.setParent(null); // /component-session-summary.jsp(109,6) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f3.setKey("global.showing"); int _jspx_eval_fmt_005fmessage_005f3 = _jspx_th_fmt_005fmessage_005f3.doStartTag(); if (_jspx_th_fmt_005fmessage_005f3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f3); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f3); return false; } private boolean _jspx_meth_fmt_005fmessage_005f4(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f4 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f4.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f4.setParent(null); // /component-session-summary.jsp(112,3) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f4.setKey("component.session.summary.sessions_per_page"); int _jspx_eval_fmt_005fmessage_005f4 = _jspx_th_fmt_005fmessage_005f4.doStartTag(); if (_jspx_th_fmt_005fmessage_005f4.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f4); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f4); return false; } private boolean _jspx_meth_fmt_005fmessage_005f5(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f5 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f5.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f5.setParent(null); // /component-session-summary.jsp(129,4) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f5.setKey("global.pages"); int _jspx_eval_fmt_005fmessage_005f5 = _jspx_th_fmt_005fmessage_005f5.doStartTag(); if (_jspx_th_fmt_005fmessage_005f5.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f5); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f5); return false; } private boolean _jspx_meth_fmt_005fmessage_005f6(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f6 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f6.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f6.setParent(null); // /component-session-summary.jsp(146,0) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f6.setKey("component.session.summary.info"); int _jspx_eval_fmt_005fmessage_005f6 = _jspx_th_fmt_005fmessage_005f6.doStartTag(); if (_jspx_eval_fmt_005fmessage_005f6 != javax.servlet.jsp.tagext.Tag.SKIP_BODY) { if (_jspx_eval_fmt_005fmessage_005f6 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) { out = _jspx_page_context.pushBody(); _jspx_th_fmt_005fmessage_005f6.setBodyContent((javax.servlet.jsp.tagext.BodyContent) out); _jspx_th_fmt_005fmessage_005f6.doInitBody(); } do { out.write("\n "); if (_jspx_meth_fmt_005fparam_005f0(_jspx_th_fmt_005fmessage_005f6, _jspx_page_context)) return true; out.write("\n "); if (_jspx_meth_fmt_005fparam_005f1(_jspx_th_fmt_005fmessage_005f6, _jspx_page_context)) return true; out.write('\n'); int evalDoAfterBody = _jspx_th_fmt_005fmessage_005f6.doAfterBody(); if (evalDoAfterBody != javax.servlet.jsp.tagext.BodyTag.EVAL_BODY_AGAIN) break; } while (true); if (_jspx_eval_fmt_005fmessage_005f6 != javax.servlet.jsp.tagext.Tag.EVAL_BODY_INCLUDE) { out = _jspx_page_context.popBody(); } } if (_jspx_th_fmt_005fmessage_005f6.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey.reuse(_jspx_th_fmt_005fmessage_005f6); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey.reuse(_jspx_th_fmt_005fmessage_005f6); return false; } private boolean _jspx_meth_fmt_005fparam_005f0(javax.servlet.jsp.tagext.JspTag _jspx_th_fmt_005fmessage_005f6, javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:param org.apache.taglibs.standard.tag.rt.fmt.ParamTag _jspx_th_fmt_005fparam_005f0 = (org.apache.taglibs.standard.tag.rt.fmt.ParamTag) _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.ParamTag.class); _jspx_th_fmt_005fparam_005f0.setPageContext(_jspx_page_context); _jspx_th_fmt_005fparam_005f0.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_fmt_005fmessage_005f6); // /component-session-summary.jsp(147,4) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fparam_005f0.setValue("<a href=\"connection-settings-external-components.jsp\">"); int _jspx_eval_fmt_005fparam_005f0 = _jspx_th_fmt_005fparam_005f0.doStartTag(); if (_jspx_th_fmt_005fparam_005f0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.reuse(_jspx_th_fmt_005fparam_005f0); return true; } _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.reuse(_jspx_th_fmt_005fparam_005f0); return false; } private boolean _jspx_meth_fmt_005fparam_005f1(javax.servlet.jsp.tagext.JspTag _jspx_th_fmt_005fmessage_005f6, javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:param org.apache.taglibs.standard.tag.rt.fmt.ParamTag _jspx_th_fmt_005fparam_005f1 = (org.apache.taglibs.standard.tag.rt.fmt.ParamTag) _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.ParamTag.class); _jspx_th_fmt_005fparam_005f1.setPageContext(_jspx_page_context); _jspx_th_fmt_005fparam_005f1.setParent((javax.servlet.jsp.tagext.Tag) _jspx_th_fmt_005fmessage_005f6); // /component-session-summary.jsp(148,4) name = value type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fparam_005f1.setValue("</a>"); int _jspx_eval_fmt_005fparam_005f1 = _jspx_th_fmt_005fparam_005f1.doStartTag(); if (_jspx_th_fmt_005fparam_005f1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.reuse(_jspx_th_fmt_005fparam_005f1); return true; } _005fjspx_005ftagPool_005ffmt_005fparam_0026_005fvalue_005fnobody.reuse(_jspx_th_fmt_005fparam_005f1); return false; } private boolean _jspx_meth_fmt_005fmessage_005f7(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f7 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f7.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f7.setParent(null); // /component-session-summary.jsp(157,19) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f7.setKey("component.session.label.domain"); int _jspx_eval_fmt_005fmessage_005f7 = _jspx_th_fmt_005fmessage_005f7.doStartTag(); if (_jspx_th_fmt_005fmessage_005f7.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f7); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f7); return false; } private boolean _jspx_meth_fmt_005fmessage_005f8(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f8 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f8.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f8.setParent(null); // /component-session-summary.jsp(159,19) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f8.setKey("component.session.label.name"); int _jspx_eval_fmt_005fmessage_005f8 = _jspx_th_fmt_005fmessage_005f8.doStartTag(); if (_jspx_th_fmt_005fmessage_005f8.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f8); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f8); return false; } private boolean _jspx_meth_fmt_005fmessage_005f9(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f9 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f9.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f9.setParent(null); // /component-session-summary.jsp(160,19) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f9.setKey("component.session.label.category"); int _jspx_eval_fmt_005fmessage_005f9 = _jspx_th_fmt_005fmessage_005f9.doStartTag(); if (_jspx_th_fmt_005fmessage_005f9.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f9); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f9); return false; } private boolean _jspx_meth_fmt_005fmessage_005f10(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f10 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f10.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f10.setParent(null); // /component-session-summary.jsp(161,19) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f10.setKey("component.session.label.type"); int _jspx_eval_fmt_005fmessage_005f10 = _jspx_th_fmt_005fmessage_005f10.doStartTag(); if (_jspx_th_fmt_005fmessage_005f10.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f10); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f10); return false; } private boolean _jspx_meth_fmt_005fmessage_005f11(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f11 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f11.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f11.setParent(null); // /component-session-summary.jsp(162,19) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f11.setKey("component.session.label.creation"); int _jspx_eval_fmt_005fmessage_005f11 = _jspx_th_fmt_005fmessage_005f11.doStartTag(); if (_jspx_th_fmt_005fmessage_005f11.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f11); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f11); return false; } private boolean _jspx_meth_fmt_005fmessage_005f12(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f12 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f12.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f12.setParent(null); // /component-session-summary.jsp(163,19) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f12.setKey("component.session.label.last_active"); int _jspx_eval_fmt_005fmessage_005f12 = _jspx_th_fmt_005fmessage_005f12.doStartTag(); if (_jspx_th_fmt_005fmessage_005f12.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f12); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f12); return false; } private boolean _jspx_meth_fmt_005fmessage_005f13(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f13 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f13.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f13.setParent(null); // /component-session-summary.jsp(164,19) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f13.setKey("component.session.label.close_connect"); int _jspx_eval_fmt_005fmessage_005f13 = _jspx_th_fmt_005fmessage_005f13.doStartTag(); if (_jspx_th_fmt_005fmessage_005f13.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f13); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f13); return false; } private boolean _jspx_meth_fmt_005fmessage_005f14(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f14 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f14.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f14.setParent(null); // /component-session-summary.jsp(174,16) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f14.setKey("component.session.summary.not_session"); int _jspx_eval_fmt_005fmessage_005f14 = _jspx_th_fmt_005fmessage_005f14.doStartTag(); if (_jspx_th_fmt_005fmessage_005f14.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f14); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f14); return false; } private boolean _jspx_meth_fmt_005fmessage_005f15(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f15 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f15.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f15.setParent(null); // /component-session-summary.jsp(189,139) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f15.setKey("session.row.cliked"); int _jspx_eval_fmt_005fmessage_005f15 = _jspx_th_fmt_005fmessage_005f15.doStartTag(); if (_jspx_th_fmt_005fmessage_005f15.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f15); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f15); return false; } private boolean _jspx_meth_fmt_005fmessage_005f16(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f16 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f16.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f16.setParent(null); // /component-session-summary.jsp(194,85) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f16.setKey("session.row.cliked_ssl"); int _jspx_eval_fmt_005fmessage_005f16 = _jspx_th_fmt_005fmessage_005f16.doStartTag(); if (_jspx_th_fmt_005fmessage_005f16.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f16); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f16); return false; } private boolean _jspx_meth_fmt_005fmessage_005f17(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f17 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f17.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f17.setParent(null); // /component-session-summary.jsp(194,160) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f17.setKey("session.row.cliked_ssl"); int _jspx_eval_fmt_005fmessage_005f17 = _jspx_th_fmt_005fmessage_005f17.doStartTag(); if (_jspx_th_fmt_005fmessage_005f17.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f17); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f17); return false; } private boolean _jspx_meth_fmt_005fmessage_005f18(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f18 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f18.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f18.setParent(null); // /component-session-summary.jsp(196,80) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f18.setKey("session.row.cliked_ssl"); int _jspx_eval_fmt_005fmessage_005f18 = _jspx_th_fmt_005fmessage_005f18.doStartTag(); if (_jspx_th_fmt_005fmessage_005f18.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f18); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f18); return false; } private boolean _jspx_meth_fmt_005fmessage_005f19(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f19 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f19.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f19.setParent(null); // /component-session-summary.jsp(196,131) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f19.setKey("session.row.cliked_ssl"); int _jspx_eval_fmt_005fmessage_005f19 = _jspx_th_fmt_005fmessage_005f19.doStartTag(); if (_jspx_th_fmt_005fmessage_005f19.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f19); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f19); return false; } private boolean _jspx_meth_fmt_005fmessage_005f20(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f20 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f20.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f20.setParent(null); // /component-session-summary.jsp(254,20) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f20.setKey("session.row.cliked_kill_session"); int _jspx_eval_fmt_005fmessage_005f20 = _jspx_th_fmt_005fmessage_005f20.doStartTag(); if (_jspx_th_fmt_005fmessage_005f20.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f20); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f20); return false; } private boolean _jspx_meth_fmt_005fmessage_005f21(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f21 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f21.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f21.setParent(null); // /component-session-summary.jsp(255,38) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f21.setKey("session.row.confirm_close"); int _jspx_eval_fmt_005fmessage_005f21 = _jspx_th_fmt_005fmessage_005f21.doStartTag(); if (_jspx_th_fmt_005fmessage_005f21.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f21); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f21); return false; } private boolean _jspx_meth_fmt_005fmessage_005f22(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f22 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f22.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f22.setParent(null); // /component-session-summary.jsp(268,4) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f22.setKey("global.pages"); int _jspx_eval_fmt_005fmessage_005f22 = _jspx_th_fmt_005fmessage_005f22.doStartTag(); if (_jspx_th_fmt_005fmessage_005f22.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f22); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f22); return false; } private boolean _jspx_meth_fmt_005fmessage_005f23(javax.servlet.jsp.PageContext _jspx_page_context) throws java.lang.Throwable { javax.servlet.jsp.PageContext pageContext = _jspx_page_context; javax.servlet.jsp.JspWriter out = _jspx_page_context.getOut(); // fmt:message org.apache.taglibs.standard.tag.rt.fmt.MessageTag _jspx_th_fmt_005fmessage_005f23 = (org.apache.taglibs.standard.tag.rt.fmt.MessageTag) _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.get(org.apache.taglibs.standard.tag.rt.fmt.MessageTag.class); _jspx_th_fmt_005fmessage_005f23.setPageContext(_jspx_page_context); _jspx_th_fmt_005fmessage_005f23.setParent(null); // /component-session-summary.jsp(286,0) name = key type = null reqTime = true required = false fragment = false deferredValue = false expectedTypeName = null deferredMethod = false methodSignature = null _jspx_th_fmt_005fmessage_005f23.setKey("component.session.summary.last_update"); int _jspx_eval_fmt_005fmessage_005f23 = _jspx_th_fmt_005fmessage_005f23.doStartTag(); if (_jspx_th_fmt_005fmessage_005f23.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) { _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f23); return true; } _005fjspx_005ftagPool_005ffmt_005fmessage_0026_005fkey_005fnobody.reuse(_jspx_th_fmt_005fmessage_005f23); return false; } }