gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.locator; import java.net.InetAddress; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.*; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; public class TokenMetadata { private static Logger logger = LoggerFactory.getLogger(TokenMetadata.class); /* Maintains token to endpoint map of every node in the cluster. */ private BiMap<Token, InetAddress> tokenToEndpointMap; // Suppose that there is a ring of nodes A, C and E, with replication factor 3. // Node D bootstraps between C and E, so its pending ranges will be E-A, A-C and C-D. // Now suppose node B bootstraps between A and C at the same time. Its pending ranges would be C-E, E-A and A-B. // Now both nodes have pending range E-A in their list, which will cause pending range collision // even though we're only talking about replica range, not even primary range. The same thing happens // for any nodes that boot simultaneously between same two nodes. For this we cannot simply make pending ranges a <tt>Multimap</tt>, // since that would make us unable to notice the real problem of two nodes trying to boot using the same token. // In order to do this properly, we need to know what tokens are booting at any time. private BiMap<Token, InetAddress> bootstrapTokens; // we will need to know at all times what nodes are leaving and calculate ranges accordingly. // An anonymous pending ranges list is not enough, as that does not tell which node is leaving // and/or if the ranges are there because of bootstrap or leave operation. // (See CASSANDRA-603 for more detail + examples). private Set<InetAddress> leavingEndpoints; private ConcurrentMap<String, Multimap<Range, InetAddress>> pendingRanges; /* Use this lock for manipulating the token map */ private final ReadWriteLock lock = new ReentrantReadWriteLock(true); private ArrayList<Token> sortedTokens; /* list of subscribers that are notified when the tokenToEndpointMap changed */ private final CopyOnWriteArrayList<AbstractReplicationStrategy> subscribers; public TokenMetadata() { this(null); } public TokenMetadata(BiMap<Token, InetAddress> tokenToEndpointMap) { if (tokenToEndpointMap == null) tokenToEndpointMap = HashBiMap.create(); this.tokenToEndpointMap = tokenToEndpointMap; bootstrapTokens = HashBiMap.create(); leavingEndpoints = new HashSet<InetAddress>(); pendingRanges = new ConcurrentHashMap<String, Multimap<Range, InetAddress>>(); sortedTokens = sortTokens(); subscribers = new CopyOnWriteArrayList<AbstractReplicationStrategy>(); } private ArrayList<Token> sortTokens() { ArrayList<Token> tokens = new ArrayList<Token>(tokenToEndpointMap.keySet()); Collections.sort(tokens); return tokens; } /** @return the number of nodes bootstrapping into source's primary range */ public int pendingRangeChanges(InetAddress source) { int n = 0; Range sourceRange = getPrimaryRangeFor(getToken(source)); for (Token token : bootstrapTokens.keySet()) if (sourceRange.contains(token)) n++; return n; } public void updateNormalToken(Token token, InetAddress endpoint) { assert token != null; assert endpoint != null; lock.writeLock().lock(); try { bootstrapTokens.inverse().remove(endpoint); tokenToEndpointMap.inverse().remove(endpoint); InetAddress prev = tokenToEndpointMap.put(token, endpoint); if (!endpoint.equals(prev)) { if (prev != null) logger.warn("Token " + token + " changing ownership from " + prev + " to " + endpoint); sortedTokens = sortTokens(); } leavingEndpoints.remove(endpoint); fireTokenToEndpointMapChanged(); } finally { lock.writeLock().unlock(); } } public void addBootstrapToken(Token token, InetAddress endpoint) { assert token != null; assert endpoint != null; lock.writeLock().lock(); try { InetAddress oldEndpoint; oldEndpoint = bootstrapTokens.get(token); if (oldEndpoint != null && !oldEndpoint.equals(endpoint)) throw new RuntimeException("Bootstrap Token collision between " + oldEndpoint + " and " + endpoint + " (token " + token); oldEndpoint = tokenToEndpointMap.get(token); if (oldEndpoint != null && !oldEndpoint.equals(endpoint)) throw new RuntimeException("Bootstrap Token collision between " + oldEndpoint + " and " + endpoint + " (token " + token); bootstrapTokens.inverse().remove(endpoint); bootstrapTokens.put(token, endpoint); } finally { lock.writeLock().unlock(); } } public void removeBootstrapToken(Token token) { assert token != null; lock.writeLock().lock(); try { bootstrapTokens.remove(token); } finally { lock.writeLock().unlock(); } } public void addLeavingEndpoint(InetAddress endpoint) { assert endpoint != null; lock.writeLock().lock(); try { leavingEndpoints.add(endpoint); } finally { lock.writeLock().unlock(); } } public void removeEndpoint(InetAddress endpoint) { assert endpoint != null; lock.writeLock().lock(); try { bootstrapTokens.inverse().remove(endpoint); tokenToEndpointMap.inverse().remove(endpoint); leavingEndpoints.remove(endpoint); sortedTokens = sortTokens(); fireTokenToEndpointMapChanged(); } finally { lock.writeLock().unlock(); } } public Token getToken(InetAddress endpoint) { assert endpoint != null; assert isMember(endpoint); // don't want to return nulls lock.readLock().lock(); try { return tokenToEndpointMap.inverse().get(endpoint); } finally { lock.readLock().unlock(); } } public boolean isMember(InetAddress endpoint) { assert endpoint != null; lock.readLock().lock(); try { return tokenToEndpointMap.inverse().containsKey(endpoint); } finally { lock.readLock().unlock(); } } public boolean isLeaving(InetAddress endpoint) { assert endpoint != null; lock.readLock().lock(); try { return leavingEndpoints.contains(endpoint); } finally { lock.readLock().unlock(); } } /** * Create a copy of TokenMetadata with only tokenToEndpointMap. That is, pending ranges, * bootstrap tokens and leaving endpoints are not included in the copy. */ public TokenMetadata cloneOnlyTokenMap() { lock.readLock().lock(); try { return new TokenMetadata(HashBiMap.create(tokenToEndpointMap)); } finally { lock.readLock().unlock(); } } /** * Create a copy of TokenMetadata with tokenToEndpointMap reflecting situation after all * current leave operations have finished. */ public TokenMetadata cloneAfterAllLeft() { lock.readLock().lock(); try { TokenMetadata allLeftMetadata = cloneOnlyTokenMap(); for (InetAddress endpoint : leavingEndpoints) allLeftMetadata.removeEndpoint(endpoint); return allLeftMetadata; } finally { lock.readLock().unlock(); } } public InetAddress getEndpoint(Token token) { lock.readLock().lock(); try { return tokenToEndpointMap.get(token); } finally { lock.readLock().unlock(); } } public Range getPrimaryRangeFor(Token right) { return new Range(getPredecessor(right), right); } public ArrayList<Token> sortedTokens() { lock.readLock().lock(); try { return sortedTokens; } finally { lock.readLock().unlock(); } } private synchronized Multimap<Range, InetAddress> getPendingRangesMM(String table) { Multimap<Range, InetAddress> map = pendingRanges.get(table); if (map == null) { map = HashMultimap.create(); pendingRanges.put(table, map); } return map; } /** a mutable map may be returned but caller should not modify it */ public Map<Range, Collection<InetAddress>> getPendingRanges(String table) { return getPendingRangesMM(table).asMap(); } public List<Range> getPendingRanges(String table, InetAddress endpoint) { List<Range> ranges = new ArrayList<Range>(); for (Map.Entry<Range, InetAddress> entry : getPendingRangesMM(table).entries()) { if (entry.getValue().equals(endpoint)) { ranges.add(entry.getKey()); } } return ranges; } public void setPendingRanges(String table, Multimap<Range, InetAddress> rangeMap) { pendingRanges.put(table, rangeMap); } public Token getPredecessor(Token token) { List tokens = sortedTokens(); int index = Collections.binarySearch(tokens, token); assert index >= 0 : token + " not found in " + StringUtils.join(tokenToEndpointMap.keySet(), ", "); return (Token) (index == 0 ? tokens.get(tokens.size() - 1) : tokens.get(index - 1)); } public Token getSuccessor(Token token) { List tokens = sortedTokens(); int index = Collections.binarySearch(tokens, token); assert index >= 0 : token + " not found in " + StringUtils.join(tokenToEndpointMap.keySet(), ", "); return (Token) ((index == (tokens.size() - 1)) ? tokens.get(0) : tokens.get(index + 1)); } /** caller should not modify bootstrapTokens */ public Map<Token, InetAddress> getBootstrapTokens() { return bootstrapTokens; } /** caller should not modify leavingEndpoints */ public Set<InetAddress> getLeavingEndpoints() { return leavingEndpoints; } public static int firstTokenIndex(final ArrayList ring, Token start) { assert ring.size() > 0; int i = Collections.binarySearch(ring, start); if (i < 0) { i = (i + 1) * (-1); if (i >= ring.size()) { i = 0; } } return i; } public static Token firstToken(final ArrayList<Token> ring, Token start) { return ring.get(firstTokenIndex(ring, start)); } /** * <tt>Iterator</tt> over the <tt>Token</tt>s in the given ring, starting with the token for the node owning start * (which does not have to be a <tt>Token</tt> in the ring) */ public static Iterator<Token> ringIterator(final ArrayList<Token> ring, Token start) { final int startIndex = firstTokenIndex(ring, start); return new AbstractIterator<Token>() { int j = startIndex; protected Token computeNext() { if (j < 0) return endOfData(); try { return ring.get(j); } finally { j = (j + 1) % ring.size(); if (j == startIndex) j = -1; } } }; } /** used by tests */ public void clearUnsafe() { bootstrapTokens.clear(); tokenToEndpointMap.clear(); leavingEndpoints.clear(); pendingRanges.clear(); fireTokenToEndpointMapChanged(); } public String toString() { StringBuilder sb = new StringBuilder(); lock.readLock().lock(); try { Set<InetAddress> eps = tokenToEndpointMap.inverse().keySet(); if (!eps.isEmpty()) { sb.append("Normal Tokens:"); sb.append(System.getProperty("line.separator")); for (InetAddress ep : eps) { sb.append(ep); sb.append(":"); sb.append(tokenToEndpointMap.inverse().get(ep)); sb.append(System.getProperty("line.separator")); } } if (!bootstrapTokens.isEmpty()) { sb.append("Bootstrapping Tokens:" ); sb.append(System.getProperty("line.separator")); for (Map.Entry<Token, InetAddress> entry : bootstrapTokens.entrySet()) { sb.append(entry.getValue() + ":" + entry.getKey()); sb.append(System.getProperty("line.separator")); } } if (!leavingEndpoints.isEmpty()) { sb.append("Leaving Endpoints:"); sb.append(System.getProperty("line.separator")); for (InetAddress ep : leavingEndpoints) { sb.append(ep); sb.append(System.getProperty("line.separator")); } } if (!pendingRanges.isEmpty()) { sb.append("Pending Ranges:"); sb.append(System.getProperty("line.separator")); sb.append(printPendingRanges()); } } finally { lock.readLock().unlock(); } return sb.toString(); } public String printPendingRanges() { StringBuilder sb = new StringBuilder(); for (Map.Entry<String, Multimap<Range, InetAddress>> entry : pendingRanges.entrySet()) { for (Map.Entry<Range, InetAddress> rmap : entry.getValue().entries()) { sb.append(rmap.getValue() + ":" + rmap.getKey()); sb.append(System.getProperty("line.separator")); } } return sb.toString(); } protected void fireTokenToEndpointMapChanged() { for (AbstractReplicationStrategy subscriber : subscribers) { subscriber.invalidateCachedTokenEndpointValues(); } } public void register(AbstractReplicationStrategy subscriber) { subscribers.add(subscriber); } /** * write endpoints may be different from read endpoints, because read endpoints only need care about the * "natural" nodes for a token, but write endpoints also need to account for nodes that are bootstrapping * into the ring, and write data there too so that they stay up to date during the bootstrap process. * Thus, this method may return more nodes than the Replication Factor. * * If possible, will return the same collection it was passed, for efficiency. * * Only ReplicationStrategy should care about this method (higher level users should only ask for Hinted). */ public Collection<InetAddress> getWriteEndpoints(Token token, String table, Collection<InetAddress> naturalEndpoints) { if (getPendingRanges(table).isEmpty()) return naturalEndpoints; List<InetAddress> endpoints = new ArrayList<InetAddress>(naturalEndpoints); for (Map.Entry<Range, Collection<InetAddress>> entry : getPendingRanges(table).entrySet()) { if (entry.getKey().contains(token)) { endpoints.addAll(entry.getValue()); } } return endpoints; } /** * Return the Token to Endpoint map for all the node in the cluster, including bootstrapping ones. */ public Map<Token, InetAddress> getTokenToEndpointMap() { Map<Token, InetAddress> map = new HashMap<Token, InetAddress>(tokenToEndpointMap.size() + bootstrapTokens.size()); map.putAll(tokenToEndpointMap); map.putAll(bootstrapTokens); return map; } }
/* * Copyright 2014 Guidewire Software, Inc. */ package gw.internal.gosu.parser; import gw.config.CommonServices; import gw.fs.IDirectory; import gw.fs.IDirectoryUtil; import gw.fs.IFile; import gw.fs.IResource; import gw.fs.IncludeModuleDirectory; import gw.lang.parser.FileSource; import gw.lang.parser.ISource; import gw.lang.reflect.ITypeLoader; import gw.lang.reflect.RefreshKind; import gw.lang.reflect.RefreshRequest; import gw.lang.reflect.gs.ClassType; import gw.lang.reflect.gs.GosuClassTypeLoader; import gw.lang.reflect.gs.IFileSystemGosuClassRepository; import gw.lang.reflect.gs.IGosuProgram; import gw.lang.reflect.gs.ISourceFileHandle; import gw.lang.reflect.gs.TypeName; import gw.lang.reflect.module.IModule; import gw.util.DynamicArray; import gw.util.StreamUtil; import gw.util.cache.FqnCache; import java.io.BufferedReader; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.Reader; import java.io.UncheckedIOException; import java.lang.ref.SoftReference; import java.net.URL; import java.nio.file.Files; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.stream.Stream; /** */ public class FileSystemGosuClassRepository implements IFileSystemGosuClassRepository { private final Map<String, FqnCache> _missCaches = new HashMap<String, FqnCache>(); public static final String RESOURCE_LOCATED_W_CLASSES = "gw/config/default.xml"; private final IModule _module; // Source paths private List<ClassPathEntry> _sourcePath = new CopyOnWriteArrayList<ClassPathEntry>(); // Excluded paths private Set<IDirectory> _excludedPath = new HashSet<IDirectory>(); private String[] _extensions = new String[0]; // Types and packages in the source paths private PackageToClassPathEntryTreeMap _rootNode; private Set<String> _allTypeNames; public FileSystemGosuClassRepository(IModule module) { _module = module; _extensions = GosuClassTypeLoader.ALL_EXTS; } @Override public IModule getModule() { return _module; } @Override public IDirectory[] getSourcePath() { IDirectory[] sourcepath = new IDirectory[_sourcePath.size()]; int i = 0; for( ClassPathEntry e : _sourcePath) { sourcepath[i++] = e.getPath(); } return sourcepath; } @Override public void setSourcePath(IDirectory[] sourcePath) { if( areDifferent( sourcePath, _sourcePath) ) { _sourcePath.clear(); Set<String> extensions = new HashSet<String>(); extensions.add(".java"); extensions.add(".xsd"); extensions.addAll(Arrays.asList(GosuClassTypeLoader.ALL_EXTS)); // Scan for potential extensions for( IDirectory file : sourcePath ) { _sourcePath.add( new ClassPathEntry( file, isTestFolder(file)) ); } _extensions = extensions.toArray(new String[extensions.size()]); reset(); } } @Override public IDirectory[] getExcludedPath() { return _excludedPath.toArray(new IDirectory[_excludedPath.size()]); } @Override public void setExcludedPath(IDirectory[] excludedPath) { _excludedPath = new HashSet<IDirectory>(Arrays.asList(excludedPath)); reset(); } @Override public ISourceFileHandle findClass(String strQualifiedClassName, String[] extensions) { IClassFileInfo fileInfo = findFileInfoOnDisk( strQualifiedClassName, extensions ); return fileInfo != null ? fileInfo.getSourceFileHandle() : null; } @Override public URL findResource( String resourceName ) { if( resourceName == null ) { return null; } resourceName = resourceName.replace( '/', '.' ); if( inMissCache( resourceName, _extensions) ) { return null; } if( resourceName.endsWith( "." ) ) { // Handle case where someone types "com.abc.Foo.". Otherwise it would be // treated as if the last dot was not there using the tokenizer. return null; } PackageToClassPathEntryTreeMap aPackage = getCachedPackage( resourceName ); URL resource = aPackage == null ? null : aPackage.resolveToResource( resourceName ); if( resource == null ) { addToMissCache( resourceName, _extensions); } return resource; } @Override public Set<String> getAllTypeNames() { if (_allTypeNames == null) { Set<String> classNames = new HashSet<String>(); for( ClassPathEntry path : _sourcePath) { addTypeNames(path.getPath(), path.getPath(), classNames, _extensions); } _allTypeNames = classNames; } return _allTypeNames; } @Override public Set<String> getAllTypeNames(String... extensions) { Set<String> enhancementNames = new HashSet<String>(); for( ClassPathEntry path : _sourcePath) { addTypeNames(path.getPath(), path.getPath(), enhancementNames, extensions); } return enhancementNames; } @Override public String getClassNameFromFile( IDirectory root, IFile file, String[] fileExts ) { String strClassPath = root.getPath().getFileSystemPathString() + File.separatorChar; String strQualifiedClassName = file.getPath().getFileSystemPathString().substring( strClassPath.length() ); if( !Util.isClassFileName( strQualifiedClassName, fileExts ) ) { String strExts = ""; for( String strExt : fileExts ) { strExts += " " + strExt; } throw new IllegalArgumentException( file.getPath().getName() + " is not a legal Gosu class name. It does not end with [" + strExts.trim() + "]" ); } strQualifiedClassName = strQualifiedClassName.substring( 0, strQualifiedClassName.lastIndexOf( '.' ) ); strQualifiedClassName = strQualifiedClassName.replace( '/', '.' ).replace( '\\', '.' ); if( strQualifiedClassName.startsWith( "." ) ) { strQualifiedClassName = strQualifiedClassName.substring( 1 ); } return strQualifiedClassName; } @Override public void typesRefreshed(RefreshRequest request) { synchronized (_missCaches) { if (request == null) { reset(); } else { for (FqnCache cache : _missCaches.values()) { cache.remove(request.types); } if (request.kind == RefreshKind.CREATION) { for (String type : request.types) { // cannot make this call because we have no way of finding out the right package for inner types // addToPackageCache(type, request.file); if (_allTypeNames != null) { _allTypeNames.add(type); } } } else if (request.kind == RefreshKind.DELETION) { if (_allTypeNames != null) { for (String type : request.types) { _allTypeNames.remove(type); } } } } } } private void reset() { synchronized (_missCaches) { _missCaches.clear(); _rootNode = null; _allTypeNames = null; } } private void addToPackageCache(String fqn, IResource file) { final ClassPathEntry classPathEntry = findClassPathEntry(file); if (_rootNode != null && classPathEntry != null) { PackageToClassPathEntryTreeMap node = _rootNode; while (fqn != null) { int i = fqn.indexOf('.'); String segment = fqn.substring(0, i < 0 ? fqn.length() : i); PackageToClassPathEntryTreeMap child = node.getChild(segment); if (child != null) { child.addClassPathEntry(classPathEntry); node = child; } else { node = node.createChildForDir(classPathEntry, segment); } if (i < 0) { break; } fqn = i + 1 < fqn.length() ? fqn.substring(i + 1) : null; } for( FqnCache cache : _missCaches.values() ) { cache.remove( fqn ); } } } private ClassPathEntry findClassPathEntry(IResource file) { for (ClassPathEntry classPathEntry : _sourcePath) { if (file.isDescendantOf(classPathEntry.getPath())) { return classPathEntry; } } return null; } private void removeFromPackageCache( String fqn, IDirectory dir ) { PackageToClassPathEntryTreeMap thePackage = getCachedPackageCorrectly(fqn); if (thePackage != null) { //## todo: the package could be split, we need to remove the directory // and only if its the last one them remove the package thePackage.delete( dir ); } } private synchronized PackageToClassPathEntryTreeMap getCachedPackageCorrectly(String fullyQualifiedName) { if (_rootNode == null) { _rootNode = loadPackageRoots(); } PackageToClassPathEntryTreeMap currNode = _rootNode; int iRelativeNameIndex = 0; while (iRelativeNameIndex != -1) { int iNextDot = fullyQualifiedName.indexOf('.', iRelativeNameIndex); String strRelativeName = fullyQualifiedName.substring(iRelativeNameIndex, iNextDot == -1 ? fullyQualifiedName.length() : iNextDot); iRelativeNameIndex = iNextDot == -1 ? -1 : iNextDot + 1; PackageToClassPathEntryTreeMap newNode = getChildPackage(currNode, strRelativeName); if (newNode == null) { return null; } currNode = newNode; } return currNode == _rootNode ? null : currNode; } private synchronized PackageToClassPathEntryTreeMap getCachedPackage( String fullyQualifiedName ) { if( _rootNode == null ) { _rootNode = loadPackageRoots(); } if( fullyQualifiedName.equals( "" ) ) { return _rootNode; } PackageToClassPathEntryTreeMap currNode = _rootNode; int iRelativeNameIndex = 0; while( iRelativeNameIndex != -1 ) { int iNextDot = fullyQualifiedName.indexOf( '.', iRelativeNameIndex ); String strRelativeName = fullyQualifiedName.substring( iRelativeNameIndex, iNextDot == -1 ? fullyQualifiedName.length() : iNextDot ); iRelativeNameIndex = iNextDot == -1 ? -1 : iNextDot + 1; PackageToClassPathEntryTreeMap newNode = getChildPackage( currNode, strRelativeName ); if( newNode == null ) { break; } currNode = newNode; } return currNode == _rootNode ? null : currNode; } private PackageToClassPathEntryTreeMap getChildPackage( PackageToClassPathEntryTreeMap parent, String strRelativeName ) { PackageToClassPathEntryTreeMap child; if( parent == _rootNode && strRelativeName.equals( "Libraries" ) ) { // Hack to support mixed case access to the "libraries" package. // Libaries used to be a global symbol with name "Libraries", so // there used to be a lot of access by that name. child = parent.getChild( "libraries" ); if( child == null ) { return null; } } else { child = parent.getChild( strRelativeName ); } return child; } private PackageToClassPathEntryTreeMap loadPackageRoots() { PackageToClassPathEntryTreeMap root = new PackageToClassPathEntryTreeMap( null, "", _module ); PackageToClassPathEntryTreeMap gw = root.createChildForDir( null, "gw" ); gw.createChildForDir( null, "lang" ); gw.createChildForDir( null, "util" ); root.createChildForDir(null, IGosuProgram.PACKAGE); for( ClassPathEntry dir : _sourcePath) { root.addClassPathEntry( dir ); processDirectory( root, dir, dir.getPath() ); } return root; } private void processDirectory(PackageToClassPathEntryTreeMap node, IFileSystemGosuClassRepository.ClassPathEntry entry, IDirectory path) { if (_excludedPath.contains(path)) { return; } IDirectory entryPath = entry.getPath(); if (entryPath.equals(path) || !CommonServices.getPlatformHelper().isPathIgnored(entryPath.relativePath(path))) { List<? extends IDirectory> dirs = path.listDirs(); for (IDirectory dir : dirs) { if (isValidDirectory(dir)) { PackageToClassPathEntryTreeMap child = node.createChildForDir(entry, dir.getName()); processDirectory(child, entry, dir); } } } } private boolean isValidDirectory(IDirectory dir) { return !dir.getName().equals("META-INF"); } private ClassFileInfo findFileInfoOnDisk( String strQualifiedClassName, String[] extensions ) { if( inMissCache( strQualifiedClassName, extensions ) ) { return null; } if( strQualifiedClassName == null ) { return null; } if( strQualifiedClassName.endsWith( "." ) ) { // Handle case where someone types "com.abc.Foo.". Otherwise it would be // treated as if the last dot was not there using the tokenizer. return null; } ClassFileInfo info = null; String packageName = getPackageName(strQualifiedClassName); PackageToClassPathEntryTreeMap aPackage = getCachedPackage(packageName); if( aPackage != null ) { info = aPackage.resolveToClassFileInfo( strQualifiedClassName, extensions ); } if( info == null ) { addToMissCache( strQualifiedClassName, extensions ); } return info; } private String getPackageName(String strQualifiedClassName) { int i = strQualifiedClassName.lastIndexOf('.'); return i < 0 ? "" : strQualifiedClassName.substring(0, i); } private boolean inMissCache(String strQualifiedClassName, String[] extensions) { synchronized( _missCaches ) { // Note we check for TRUE because it can happen that a subordinate type like Foo<BadType> is a miss, while Foo is not a miss for (String extension : extensions) { Object value = getMissCacheForExtension(extension).get(strQualifiedClassName); if (value != Boolean.TRUE) { return false; } } return true; } } private FqnCache getMissCacheForExtension(String extension) { FqnCache cache = _missCaches.get(extension); if (cache == null) { cache = new FqnCache(); _missCaches.put(extension, cache); } return cache; } private void addToMissCache(String strQualifiedClassName, String[] extensions) { synchronized( _missCaches ) { for (String extension : extensions) { getMissCacheForExtension(extension).add(strQualifiedClassName, Boolean.TRUE); } } } private void addTypeNames( final IDirectory root, IDirectory path, final Set<String> classNames, final String[] fileExts ) { DynamicArray<? extends IFile> iFiles = IDirectoryUtil.allContainedFilesExcludingIgnored(path); for (int i = 0; i < iFiles.size; i++) { IFile file = (IFile) iFiles.data[i]; if (Util.isClassFileName(file.getName(), fileExts)) { String className = getClassNameFromFile(root, file, fileExts); classNames.add(className); } } } public static final class FileSystemSourceFileHandle implements ISourceFileHandle { ISource _source; boolean _isTestClass; private int _classPathLength; private ClassFileInfo _fileInfo; private int _iOffset; private int _iEnd; public FileSystemSourceFileHandle( ClassFileInfo fileInfo, boolean isTestClass ) { _isTestClass = isTestClass; _fileInfo = fileInfo; _classPathLength = fileInfo.getClassPathLength(); } @Override public ISource getSource() { if( _source == null ) { _source = new FileSource( _fileInfo ); } return _source; } @Override public String getParentType() { return null; } @Override public String getNamespace() { String namespace = _fileInfo.getFilePath().substring(_fileInfo.getEntry().getPath().toString().length() + 1); int fileSeparatorIndex = namespace.lastIndexOf(File.separatorChar); if (fileSeparatorIndex >= 0) { namespace = namespace.substring(0, fileSeparatorIndex); namespace = namespace.replace(File.separatorChar, '.'); } else { namespace = "default"; } return namespace; } @Override public String getFilePath() { return _fileInfo.getFilePath(); } @Override public IFile getFile() { return _fileInfo.getFile(); } @Override public boolean isTestClass() { return _isTestClass; } @Override public boolean isValid() { return true; } @Override public boolean isStandardPath() { return !_fileInfo.getEntry().getPath().isAdditional(); } @Override public boolean isIncludeModulePath() { return _fileInfo.getEntry().getPath() instanceof IncludeModuleDirectory; } @Override public void cleanAfterCompile() { _source = null; } public ClassType getClassType() { String name = _fileInfo.getNonCanonicalFileName(); return ClassType.getFromFileName(name); } @Override public String getTypeNamespace() { String path = _fileInfo.getFilePath().replace( '/', '.' ).replace( '\\', '.' ); int startPos = _classPathLength; // this is a hack to support files inside jars in IJ if( path.charAt( startPos ) == '!' ) { ++startPos; } if( path.charAt( startPos ) == '.' ) { ++startPos; } int endPos = path.length() - (_fileInfo.getFileName().length() + 1); return endPos < startPos ? "" : path.substring( startPos, endPos ); } @Override public String getRelativeName() { String name = _fileInfo.getFileName(); return name.substring( 0, name.lastIndexOf( '.' ) ); } // @Override // public IDirectory getParentFile() // { // return _fileInfo.getParentFile(); // } // // @Override // public ClassFileInfo getFileInfo() // { // return _fileInfo; // } @Override public void setOffset( int iOffset ) { _iOffset = iOffset; } @Override public int getOffset() { return _iOffset; } @Override public void setEnd( int iEnd ) { _iEnd = iEnd; } @Override public int getEnd() { return _iEnd; } @Override public String getFileName() { String strFile = this.getFilePath(); int iIndex = strFile.lastIndexOf( File.separatorChar ); if( iIndex >= 0 ) { strFile = strFile.substring( iIndex + 1 ); } return strFile; } @Override public String toString() { return _fileInfo.getFilePath(); } } public static class ClassFileInfo implements IFileSystemGosuClassRepository.IClassFileInfo { private ClassPathEntry _entry; private ClassType _classType; private String _fileType; private List<String> _innerClassParts; private boolean _isTestClass; private SoftReference<String> _content; private IFile _file; public ClassFileInfo( ClassPathEntry entry, IFile file, boolean isTestClass ) { _entry = entry; _file = file; _classType = _file.getExtension().equalsIgnoreCase("java") ? ClassType.JavaClass : ClassType.Class; _innerClassParts = Collections.emptyList(); _isTestClass = isTestClass; _file = file; } public ClassFileInfo( ISourceFileHandle outerSfh, ClassType classType, String fileType, List<String> innerClassParts, boolean isTestClass ) { _classType = classType; _fileType = fileType; _innerClassParts = innerClassParts; _isTestClass = isTestClass; _file = outerSfh.getFile(); } @Override public IFile getFile() { return _file; } @Override public boolean hasInnerClass() { return _innerClassParts.size() > 0; } @Override public ISourceFileHandle getSourceFileHandle() { if( hasInnerClass() ) { String enclosingType = _fileType; for( int i = 0; i < _innerClassParts.size() - 1; i++ ) { enclosingType += "." + _innerClassParts.get( i ); } return new InnerClassFileSystemSourceFileHandle(_classType, enclosingType, _innerClassParts.get( _innerClassParts.size() - 1 ), _isTestClass ); } return new FileSystemSourceFileHandle( this, _isTestClass ); } @Override public ClassPathEntry getEntry() { return _entry; } @Override public IDirectory getParentFile() { return _file.getParent(); } @Override public Reader getReader() { try { return StreamUtil.getInputStreamReader(_file.openInputStream()); } catch (IOException e) { throw new RuntimeException(e); } } @Override public String getFileName() { return _file.getName(); } @Override public String getNonCanonicalFileName() { return _file.getName(); } @Override public String getFilePath() { return _file.getPath().getFileSystemPathString(); } @Override public int getClassPathLength() { return getEntry().getPath().getPath().getFileSystemPathString().length(); } @Override public String getContent() { String content = null; if( _content != null ) { content = _content.get(); } if( content == null ) { Stream<String> lines = null; try { if( _file.isJavaFile() && _classType != ClassType.JavaClass ) { lines = Files.lines( _file.toJavaFile().toPath() ); } else { BufferedReader reader = new BufferedReader( getReader() ); lines = reader.lines().onClose( callClose( reader ) ); } StringBuilder sb = new StringBuilder(); lines.forEach( line -> sb.append( line ).append( '\n') ); if( sb.length() > 0 ) { sb.setLength( sb.length() - 1 ); // remove last \n } content = sb.toString(); } catch( Exception e ) { throw new RuntimeException( e ); } finally { lines.close(); } _content = _classType == ClassType.JavaClass ? new SoftReference<>( null ) : new SoftReference<>(content); } return content; } @Override public void stopCachingContent() { _content.clear(); } private static Runnable callClose( Closeable c ) { return ()-> { try { c.close(); } catch( IOException e ) { throw new RuntimeException( e ); } }; } public String toString() { return _file.toString(); } } @Override public Set<TypeName> getTypeNames(String namespace, Set<String> extensions, ITypeLoader loader) { Set<TypeName> setNames = new HashSet<TypeName>(); if (namespace == null) { for (String name : getAllTypeNames()) { setNames.add(new TypeName(name, loader, TypeName.Kind.TYPE, TypeName.Visibility.PUBLIC)); } } else { PackageToClassPathEntryTreeMap cachedPackage = getCachedPackageCorrectly(namespace); if (cachedPackage != null) { return cachedPackage.getTypeNames(extensions, loader); } else { return Collections.EMPTY_SET; } } return setNames; } @Override public int hasNamespace(String namespace) { PackageToClassPathEntryTreeMap cachedNamespace = getCachedPackageCorrectly( namespace ); return cachedNamespace != null ? cachedNamespace.getSourceRootCount() : 0; } @Override public void namespaceRefreshed(String namespace, IDirectory dir, RefreshKind kind) { if (kind == RefreshKind.CREATION) { addToPackageCache(namespace, dir); } else if (kind == RefreshKind.DELETION) { removeFromPackageCache(namespace, dir); } } private boolean areDifferent(IDirectory[] cp1, List<ClassPathEntry> cp2) { if (cp1.length != cp2.size()) { return true; } for (int i = 0; i < cp1.length; i++) { if (!cp1[i].equals(cp2.get(i).getPath())) { return true; } } return false; } private static boolean isTestFolder(IDirectory file) { return file.toString().endsWith( "gtest" ); } @Override public IFile findFirstFile(String resourceName) { return findFirstFile(resourceName, _module.getSourcePath()); } private IFile findFirstFile(String resourceName, List<? extends IDirectory> searchPath) { for (IDirectory dir : searchPath) { IFile file = dir.file(resourceName); if (file != null && file.exists()) { return file; } } return null; } public String toString() { return _module.getName(); } }
/** * Bobo Browse Engine - High performance faceted/parametric search implementation * that handles various types of semi-structured data. Written in Java. * * Copyright (C) 2005-2006 John Wang * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * To contact the project administrators for the bobo-browse project, * please go to https://sourceforge.net/projects/bobo-browse/, or * send mail to owner@browseengine.com. */ package com.browseengine.bobo.api; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import org.apache.lucene.document.Document; import org.apache.lucene.document.DocumentStoredFieldVisitor; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.FilterAtomicReader; import org.apache.lucene.index.StoredFieldVisitor; import com.browseengine.bobo.facets.FacetHandler; import com.browseengine.bobo.facets.RuntimeFacetHandler; import com.browseengine.bobo.facets.RuntimeFacetHandlerFactory; public class BoboSegmentReader extends FilterAtomicReader { private static Logger logger = Logger.getLogger(BoboSegmentReader.class); protected Map<String, FacetHandler<?>> _facetHandlerMap; protected Collection<FacetHandler<?>> _facetHandlers; protected Collection<RuntimeFacetHandlerFactory<?, ?>> _runtimeFacetHandlerFactories; protected Map<String, RuntimeFacetHandlerFactory<?, ?>> _runtimeFacetHandlerFactoryMap; protected WorkArea _workArea; private final Map<String, Object> _facetDataMap = new HashMap<String, Object>(); private final ThreadLocal<Map<String, Object>> _runtimeFacetDataMap = new ThreadLocal<Map<String, Object>>() { @Override protected Map<String, Object> initialValue() { return new HashMap<String, Object>(); } }; private final ThreadLocal<Map<String, RuntimeFacetHandler<?>>> _runtimeFacetHandlerMap = new ThreadLocal<Map<String, RuntimeFacetHandler<?>>>() { @Override protected Map<String, RuntimeFacetHandler<?>> initialValue() { return new HashMap<String, RuntimeFacetHandler<?>>(); } }; public static BoboSegmentReader getInstance(AtomicReader reader, Collection<FacetHandler<?>> facetHandlers, Collection<RuntimeFacetHandlerFactory<?, ?>> facetHandlerFactories) throws IOException { return getInstance(reader, facetHandlers, facetHandlerFactories, new WorkArea()); } private static BoboSegmentReader getInstance(AtomicReader reader, Collection<FacetHandler<?>> facetHandlers, Collection<RuntimeFacetHandlerFactory<?, ?>> facetHandlerFactories, WorkArea workArea) throws IOException { BoboSegmentReader boboReader = new BoboSegmentReader(reader, facetHandlers, facetHandlerFactories, workArea); boboReader.facetInit(); return boboReader; } public Object getFacetData(String name) { return _facetDataMap.get(name); } public Object putFacetData(String name, Object data) { return _facetDataMap.put(name, data); } public Object getRuntimeFacetData(String name) { Map<String, Object> map = _runtimeFacetDataMap.get(); if (map == null) return null; return map.get(name); } public Object putRuntimeFacetData(String name, Object data) { Map<String, Object> map = _runtimeFacetDataMap.get(); if (map == null) { map = new HashMap<String, Object>(); _runtimeFacetDataMap.set(map); } return map.put(name, data); } public void clearRuntimeFacetData() { _runtimeFacetDataMap.set(null); } public RuntimeFacetHandler<?> getRuntimeFacetHandler(String name) { Map<String, RuntimeFacetHandler<?>> map = _runtimeFacetHandlerMap.get(); if (map == null) return null; return map.get(name); } public void putRuntimeFacetHandler(String name, RuntimeFacetHandler<?> data) { Map<String, RuntimeFacetHandler<?>> map = _runtimeFacetHandlerMap.get(); if (map == null) { map = new HashMap<String, RuntimeFacetHandler<?>>(); _runtimeFacetHandlerMap.set(map); } map.put(name, data); } public void clearRuntimeFacetHandler() { _runtimeFacetHandlerMap.set(null); } @Override protected void doClose() throws IOException { // do nothing } private void loadFacetHandler(String name, Set<String> loaded, Set<String> visited, WorkArea workArea) throws IOException { FacetHandler<?> facetHandler = _facetHandlerMap.get(name); if (facetHandler != null && !loaded.contains(name)) { visited.add(name); Set<String> dependsOn = facetHandler.getDependsOn(); if (dependsOn.size() > 0) { Iterator<String> iter = dependsOn.iterator(); while (iter.hasNext()) { String f = iter.next(); if (name.equals(f)) continue; if (!loaded.contains(f)) { if (visited.contains(f)) { throw new IOException("Facet handler dependency cycle detected, facet handler: " + name + " not loaded"); } loadFacetHandler(f, loaded, visited, workArea); } if (!loaded.contains(f)) { throw new IOException("unable to load facet handler: " + f); } facetHandler.putDependedFacetHandler(_facetHandlerMap.get(f)); } } long start = System.currentTimeMillis(); facetHandler.loadFacetData(this, workArea); long end = System.currentTimeMillis(); if (logger.isDebugEnabled()) { StringBuffer buf = new StringBuffer(); buf.append("facetHandler loaded: ").append(name).append(", took: ").append(end - start) .append(" ms"); logger.debug(buf.toString()); } loaded.add(name); } } private void loadFacetHandlers(WorkArea workArea) throws IOException { Set<String> loaded = new HashSet<String>(); Set<String> visited = new HashSet<String>(); for (String name : _facetHandlerMap.keySet()) { loadFacetHandler(name, loaded, visited, workArea); } } protected void initialize(Collection<FacetHandler<?>> facetHandlers) throws IOException { _facetHandlers = facetHandlers; _facetHandlerMap = new HashMap<String, FacetHandler<?>>(); for (FacetHandler<?> facetHandler : facetHandlers) { _facetHandlerMap.put(facetHandler.getName(), facetHandler); } } /** * @param reader * @param facetHandlers * @param facetHandlerFactories * @param workArea * the inner reader. false => we use the given reader as the inner reader. * @throws IOException */ protected BoboSegmentReader(AtomicReader reader, Collection<FacetHandler<?>> facetHandlers, Collection<RuntimeFacetHandlerFactory<?, ?>> facetHandlerFactories, WorkArea workArea) throws IOException { super(reader); _runtimeFacetHandlerFactories = facetHandlerFactories; _runtimeFacetHandlerFactoryMap = new HashMap<String, RuntimeFacetHandlerFactory<?, ?>>(); if (_runtimeFacetHandlerFactories != null) { for (RuntimeFacetHandlerFactory<?, ?> factory : _runtimeFacetHandlerFactories) { _runtimeFacetHandlerFactoryMap.put(factory.getName(), factory); } } _facetHandlers = facetHandlers; _workArea = workArea; } protected void facetInit() throws IOException { initialize(_facetHandlers); loadFacetHandlers(_workArea); } /** * Gets all the facet field names * * @return Set of facet field names */ public Set<String> getFacetNames() { return _facetHandlerMap.keySet(); } /** * Gets a facet handler * * @param fieldname * name * @return facet handler */ public FacetHandler<?> getFacetHandler(String fieldname) { FacetHandler<?> f = _facetHandlerMap.get(fieldname); if (f == null) f = getRuntimeFacetHandler(fieldname); return f; } /** * Gets the facet handler map * * @return facet handler map */ public Map<String, FacetHandler<?>> getFacetHandlerMap() { return _facetHandlerMap; } /** * @return the map of RuntimeFacetHandlerFactories */ public Map<String, RuntimeFacetHandlerFactory<?, ?>> getRuntimeFacetHandlerFactoryMap() { return _runtimeFacetHandlerFactoryMap; } /** * @return the map of RuntimeFacetHandlers */ public Map<String, RuntimeFacetHandler<?>> getRuntimeFacetHandlerMap() { return _runtimeFacetHandlerMap.get(); } /** * @return the map of RuntimeFacetData */ public Map<String, Object> getRuntimeFacetDataMap() { return _runtimeFacetDataMap.get(); } public void setRuntimeFacetHandlerMap(Map<String, RuntimeFacetHandler<?>> map) { _runtimeFacetHandlerMap.set(map); } public void setRuntimeFacetDataMap(Map<String, Object> map) { _runtimeFacetDataMap.set(map); } @Override public void document(int docID, StoredFieldVisitor visitor) throws IOException { super.document(docID, visitor); if (!(visitor instanceof DocumentStoredFieldVisitor)) { return; } Document doc = ((DocumentStoredFieldVisitor) visitor).getDocument(); Collection<FacetHandler<?>> facetHandlers = _facetHandlerMap.values(); for (FacetHandler<?> facetHandler : facetHandlers) { String[] vals = facetHandler.getFieldValues(this, docID); if (vals != null) { String[] values = doc.getValues(facetHandler.getName()); Set<String> storedVals = new HashSet<String>(Arrays.asList(values)); for (String val : vals) { storedVals.add(val); } doc.removeField(facetHandler.getName()); for (String val : storedVals) { doc.add(new StringField(facetHandler.getName(), val, Field.Store.NO)); } } } } public String[] getStoredFieldValue(int docid, final String fieldname) throws IOException { DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(fieldname); super.document(docid, visitor); Document doc = visitor.getDocument(); return doc.getValues(fieldname); } /** * Work area for loading */ public static class WorkArea { HashMap<Class<?>, Object> map = new HashMap<Class<?>, Object>(); @SuppressWarnings("unchecked") public <T> T get(Class<T> cls) { T obj = (T) map.get(cls); return obj; } public void put(Object obj) { map.put(obj.getClass(), obj); } public void clear() { map.clear(); } @Override public String toString() { return map.toString(); } } private BoboSegmentReader(AtomicReader in) { super(in); } public BoboSegmentReader copy(AtomicReader in) { BoboSegmentReader copy = new BoboSegmentReader(in); copy._facetHandlerMap = this._facetHandlerMap; copy._facetHandlers = this._facetHandlers; copy._runtimeFacetHandlerFactories = this._runtimeFacetHandlerFactories; copy._runtimeFacetHandlerFactoryMap = this._runtimeFacetHandlerFactoryMap; copy._workArea = this._workArea; copy._facetDataMap.putAll(this._facetDataMap); return copy; } public AtomicReader getInnerReader() { return in; } }
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package rr.org.objectweb.asm; import rr.org.objectweb.asm.ClassReader; /** * Defines the JVM opcodes, access flags and array type codes. This interface * does not define all the JVM opcodes because some opcodes are automatically * handled. For example, the xLOAD and xSTORE opcodes are automatically replaced * by xLOAD_n and xSTORE_n opcodes when possible. The xLOAD_n and xSTORE_n * opcodes are therefore not defined in this interface. Likewise for LDC, * automatically replaced by LDC_W or LDC2_W when necessary, WIDE, GOTO_W and * JSR_W. * * @author Eric Bruneton * @author Eugene Kuleshov */ public interface Opcodes { // ASM API versions int ASM4 = 4 << 16 | 0 << 8 | 0; int ASM5 = 5 << 16 | 0 << 8 | 0; // versions int V1_1 = 3 << 16 | 45; int V1_2 = 0 << 16 | 46; int V1_3 = 0 << 16 | 47; int V1_4 = 0 << 16 | 48; int V1_5 = 0 << 16 | 49; int V1_6 = 0 << 16 | 50; int V1_7 = 0 << 16 | 51; int V1_8 = 0 << 16 | 52; // access flags int ACC_PUBLIC = 0x0001; // class, field, method int ACC_PRIVATE = 0x0002; // class, field, method int ACC_PROTECTED = 0x0004; // class, field, method int ACC_STATIC = 0x0008; // field, method int ACC_FINAL = 0x0010; // class, field, method, parameter int ACC_SUPER = 0x0020; // class int ACC_SYNCHRONIZED = 0x0020; // method int ACC_VOLATILE = 0x0040; // field int ACC_BRIDGE = 0x0040; // method int ACC_VARARGS = 0x0080; // method int ACC_TRANSIENT = 0x0080; // field int ACC_NATIVE = 0x0100; // method int ACC_INTERFACE = 0x0200; // class int ACC_ABSTRACT = 0x0400; // class, method int ACC_STRICT = 0x0800; // method int ACC_SYNTHETIC = 0x1000; // class, field, method, parameter int ACC_ANNOTATION = 0x2000; // class int ACC_ENUM = 0x4000; // class(?) field inner int ACC_MANDATED = 0x8000; // parameter // ASM specific pseudo access flags int ACC_DEPRECATED = 0x20000; // class, field, method // types for NEWARRAY int T_BOOLEAN = 4; int T_CHAR = 5; int T_FLOAT = 6; int T_DOUBLE = 7; int T_BYTE = 8; int T_SHORT = 9; int T_INT = 10; int T_LONG = 11; // tags for Handle int H_GETFIELD = 1; int H_GETSTATIC = 2; int H_PUTFIELD = 3; int H_PUTSTATIC = 4; int H_INVOKEVIRTUAL = 5; int H_INVOKESTATIC = 6; int H_INVOKESPECIAL = 7; int H_NEWINVOKESPECIAL = 8; int H_INVOKEINTERFACE = 9; // stack map frame types /** * Represents an expanded frame. See {@link ClassReader#EXPAND_FRAMES}. */ int F_NEW = -1; /** * Represents a compressed frame with complete frame data. */ int F_FULL = 0; /** * Represents a compressed frame where locals are the same as the locals in * the previous frame, except that additional 1-3 locals are defined, and * with an empty stack. */ int F_APPEND = 1; /** * Represents a compressed frame where locals are the same as the locals in * the previous frame, except that the last 1-3 locals are absent and with * an empty stack. */ int F_CHOP = 2; /** * Represents a compressed frame with exactly the same locals as the * previous frame and with an empty stack. */ int F_SAME = 3; /** * Represents a compressed frame with exactly the same locals as the * previous frame and with a single value on the stack. */ int F_SAME1 = 4; Integer TOP = new Integer(0); Integer INTEGER = new Integer(1); Integer FLOAT = new Integer(2); Integer DOUBLE = new Integer(3); Integer LONG = new Integer(4); Integer NULL = new Integer(5); Integer UNINITIALIZED_THIS = new Integer(6); // opcodes // visit method (- = idem) int NOP = 0; // visitInsn int ACONST_NULL = 1; // - int ICONST_M1 = 2; // - int ICONST_0 = 3; // - int ICONST_1 = 4; // - int ICONST_2 = 5; // - int ICONST_3 = 6; // - int ICONST_4 = 7; // - int ICONST_5 = 8; // - int LCONST_0 = 9; // - int LCONST_1 = 10; // - int FCONST_0 = 11; // - int FCONST_1 = 12; // - int FCONST_2 = 13; // - int DCONST_0 = 14; // - int DCONST_1 = 15; // - int BIPUSH = 16; // visitIntInsn int SIPUSH = 17; // - int LDC = 18; // visitLdcInsn // int LDC_W = 19; // - // int LDC2_W = 20; // - int ILOAD = 21; // visitVarInsn int LLOAD = 22; // - int FLOAD = 23; // - int DLOAD = 24; // - int ALOAD = 25; // - // int ILOAD_0 = 26; // - // int ILOAD_1 = 27; // - // int ILOAD_2 = 28; // - // int ILOAD_3 = 29; // - // int LLOAD_0 = 30; // - // int LLOAD_1 = 31; // - // int LLOAD_2 = 32; // - // int LLOAD_3 = 33; // - // int FLOAD_0 = 34; // - // int FLOAD_1 = 35; // - // int FLOAD_2 = 36; // - // int FLOAD_3 = 37; // - // int DLOAD_0 = 38; // - // int DLOAD_1 = 39; // - // int DLOAD_2 = 40; // - // int DLOAD_3 = 41; // - // int ALOAD_0 = 42; // - // int ALOAD_1 = 43; // - // int ALOAD_2 = 44; // - // int ALOAD_3 = 45; // - int IALOAD = 46; // visitInsn int LALOAD = 47; // - int FALOAD = 48; // - int DALOAD = 49; // - int AALOAD = 50; // - int BALOAD = 51; // - int CALOAD = 52; // - int SALOAD = 53; // - int ISTORE = 54; // visitVarInsn int LSTORE = 55; // - int FSTORE = 56; // - int DSTORE = 57; // - int ASTORE = 58; // - // int ISTORE_0 = 59; // - // int ISTORE_1 = 60; // - // int ISTORE_2 = 61; // - // int ISTORE_3 = 62; // - // int LSTORE_0 = 63; // - // int LSTORE_1 = 64; // - // int LSTORE_2 = 65; // - // int LSTORE_3 = 66; // - // int FSTORE_0 = 67; // - // int FSTORE_1 = 68; // - // int FSTORE_2 = 69; // - // int FSTORE_3 = 70; // - // int DSTORE_0 = 71; // - // int DSTORE_1 = 72; // - // int DSTORE_2 = 73; // - // int DSTORE_3 = 74; // - // int ASTORE_0 = 75; // - // int ASTORE_1 = 76; // - // int ASTORE_2 = 77; // - // int ASTORE_3 = 78; // - int IASTORE = 79; // visitInsn int LASTORE = 80; // - int FASTORE = 81; // - int DASTORE = 82; // - int AASTORE = 83; // - int BASTORE = 84; // - int CASTORE = 85; // - int SASTORE = 86; // - int POP = 87; // - int POP2 = 88; // - int DUP = 89; // - int DUP_X1 = 90; // - int DUP_X2 = 91; // - int DUP2 = 92; // - int DUP2_X1 = 93; // - int DUP2_X2 = 94; // - int SWAP = 95; // - int IADD = 96; // - int LADD = 97; // - int FADD = 98; // - int DADD = 99; // - int ISUB = 100; // - int LSUB = 101; // - int FSUB = 102; // - int DSUB = 103; // - int IMUL = 104; // - int LMUL = 105; // - int FMUL = 106; // - int DMUL = 107; // - int IDIV = 108; // - int LDIV = 109; // - int FDIV = 110; // - int DDIV = 111; // - int IREM = 112; // - int LREM = 113; // - int FREM = 114; // - int DREM = 115; // - int INEG = 116; // - int LNEG = 117; // - int FNEG = 118; // - int DNEG = 119; // - int ISHL = 120; // - int LSHL = 121; // - int ISHR = 122; // - int LSHR = 123; // - int IUSHR = 124; // - int LUSHR = 125; // - int IAND = 126; // - int LAND = 127; // - int IOR = 128; // - int LOR = 129; // - int IXOR = 130; // - int LXOR = 131; // - int IINC = 132; // visitIincInsn int I2L = 133; // visitInsn int I2F = 134; // - int I2D = 135; // - int L2I = 136; // - int L2F = 137; // - int L2D = 138; // - int F2I = 139; // - int F2L = 140; // - int F2D = 141; // - int D2I = 142; // - int D2L = 143; // - int D2F = 144; // - int I2B = 145; // - int I2C = 146; // - int I2S = 147; // - int LCMP = 148; // - int FCMPL = 149; // - int FCMPG = 150; // - int DCMPL = 151; // - int DCMPG = 152; // - int IFEQ = 153; // visitJumpInsn int IFNE = 154; // - int IFLT = 155; // - int IFGE = 156; // - int IFGT = 157; // - int IFLE = 158; // - int IF_ICMPEQ = 159; // - int IF_ICMPNE = 160; // - int IF_ICMPLT = 161; // - int IF_ICMPGE = 162; // - int IF_ICMPGT = 163; // - int IF_ICMPLE = 164; // - int IF_ACMPEQ = 165; // - int IF_ACMPNE = 166; // - int GOTO = 167; // - int JSR = 168; // - int RET = 169; // visitVarInsn int TABLESWITCH = 170; // visiTableSwitchInsn int LOOKUPSWITCH = 171; // visitLookupSwitch int IRETURN = 172; // visitInsn int LRETURN = 173; // - int FRETURN = 174; // - int DRETURN = 175; // - int ARETURN = 176; // - int RETURN = 177; // - int GETSTATIC = 178; // visitFieldInsn int PUTSTATIC = 179; // - int GETFIELD = 180; // - int PUTFIELD = 181; // - int INVOKEVIRTUAL = 182; // visitMethodInsn int INVOKESPECIAL = 183; // - int INVOKESTATIC = 184; // - int INVOKEINTERFACE = 185; // - int INVOKEDYNAMIC = 186; // visitInvokeDynamicInsn int NEW = 187; // visitTypeInsn int NEWARRAY = 188; // visitIntInsn int ANEWARRAY = 189; // visitTypeInsn int ARRAYLENGTH = 190; // visitInsn int ATHROW = 191; // - int CHECKCAST = 192; // visitTypeInsn int INSTANCEOF = 193; // - int MONITORENTER = 194; // visitInsn int MONITOREXIT = 195; // - // int WIDE = 196; // NOT VISITED int MULTIANEWARRAY = 197; // visitMultiANewArrayInsn int IFNULL = 198; // visitJumpInsn int IFNONNULL = 199; // - // int GOTO_W = 200; // - // int JSR_W = 201; // - }
/* * This file is part of TimeWarp, licensed under the MIT License (MIT). * * Copyright (c) InspireNXE <http://github.com/InspireNXE/> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.inspirenxe.timewarp.world; import org.inspirenxe.timewarp.TimeWarp; import org.inspirenxe.timewarp.daypart.DayPart; import org.inspirenxe.timewarp.daypart.DayPartType; import org.spongepowered.api.Sponge; import org.spongepowered.api.world.storage.WorldProperties; import java.util.Optional; public class WorldDay { /** * The name of the world linked to this group. */ public final String worldName; /** * 0 = MORNING * 1 = DAY * 2 = EVENING * 3 = NIGHT */ private final DayPart[] dayparts = new DayPart[5]; private DayPartType wakeAtDayPart; private long daysPassed = 0; public WorldDay(String worldName) { this.worldName = worldName; } /** * Initializes all dayparts. * @return {@link WorldDay} for chaining. */ public WorldDay init() { final Optional<WorldProperties> optProperties = Sponge.getServer().getWorldProperties(worldName); if (optProperties.isPresent()) { final String rootPath = "sync.worlds." + worldName.toLowerCase(); for (DayPartType type : DayPartType.values()) { long daypartValue = TimeWarp.instance.storage.getChildNode(rootPath + ".dayparts." + type.name.toLowerCase()).getLong(); if (daypartValue != 0 && daypartValue < type.defaultLength) { TimeWarp.instance.logger.warn(String.format("Unable to use value [%1s] in [%2s] for DayPart [%3s] as the value is below " + "vanilla Minecraft length [%4s]. If you are trying to skip this DayPart please use 0 as the value.", daypartValue, rootPath, type.name, type.defaultLength)); daypartValue = type.defaultLength; } this.setDayPart(type, new DayPart(type, daypartValue)); } final String wakeAtPath = rootPath + ".wake-at-daypart"; final String dayPartCandidate = TimeWarp.instance.storage.getChildNode(wakeAtPath).getString(DayPartType.DAY.name.toUpperCase()).toUpperCase(); try { wakeAtDayPart = DayPartType.valueOf(dayPartCandidate); } catch (IllegalArgumentException e) { TimeWarp.instance.logger.warn("Unable to parse [" + dayPartCandidate + "] at [" + wakeAtPath + "]. Defaulting to DayPart [" + DayPartType.DAY.name.toUpperCase() + "]"); wakeAtDayPart = DayPartType.valueOf(DayPartType.DAY.name.toUpperCase()); } } return this; } /** * Gets the {@link DayPart} from the {@link DayPartType}. * @param type The {@link DayPartType} to get the {@link DayPart} from. * @return The {@link Optional<DayPart>}. */ public Optional<DayPart> getDayPart(DayPartType type) { if (type == null) { return Optional.empty(); } for (DayPart daypart : dayparts) { if (daypart.getType().equals(type)) { return Optional.of(daypart); } } return Optional.empty(); } /** * Gets the next available {@link DayPart} with a length that is not equal to 0. * @param type The current {@link DayPartType}. * @return The next available {@link Optional<DayPart>}, otherwise {@link Optional#empty()}. */ public Optional<DayPart> getNextDayPart(DayPartType type) { if (type == null) { return Optional.empty(); } // Attempt to find the next daypart excluding the current one for (DayPart dayPart : dayparts) { if (dayPart.getType() != type && dayPart.getLength() != 0) { return Optional.of(dayPart); } } // Attempt to return the current daypart final Optional<DayPart> optDayPartCandidate = this.getDayPart(type); if (optDayPartCandidate.isPresent() && optDayPartCandidate.get().getLength() != 0) { return optDayPartCandidate; } return Optional.empty(); } /** * Sets the daypart stored to the {@link DayPart} passed in. * @param type The {@link DayPartType} to set. * @param daypart The {@link DayPart} to set to. */ public void setDayPart(DayPartType type, DayPart daypart) { switch (type) { case DAY: dayparts[1] = daypart; break; case EVENING: dayparts[2] = daypart; break; case DUSK: dayparts[3] = daypart; break; case NIGHT: dayparts[4] = daypart; break; default: dayparts[0] = daypart; } } /** * Gets the total day length using the length of all dayparts added together. * @return The total day length. */ public long getDayLength() { long length = 0; for (DayPart daypart : dayparts) { length += daypart.getLength(); } return length; } /** * Gets the start time of the {@link DayPart}. * @param type The {@link DayPartType} to get the start time from. * @return The start time. */ public long getStartTime(DayPartType type) { long start = 0; for (DayPart daypart : dayparts) { if (type.equals(daypart.getType())) { return start; } start += daypart.getLength(); } return start; } /** * Gets the end time of the {@link DayPart}. * @param type The {@link DayPartType} to get the end time from. * @return The end time. */ public long getEndTime(DayPartType type) { long end = getDayLength(); for (int i = dayparts.length - 1; i > 0; i--) { if (type.equals(dayparts[i].getType())) { return end; } end -= dayparts[i].getLength(); } return end; } /** * Gets the current days passed in the world related to this. * @return The current days passed. */ public long getDaysPassed() { return this.daysPassed; } /** * Sets the current days passed. * @param daysPassed The current days passed. */ public void setDaysPassed(long daysPassed) { this.daysPassed = daysPassed; } /** * Gets the {@link DayPartType} to wake up at. * @return The {@link DayPartType} to wake up at. */ public DayPartType getWakeAtDayPart() { return this.wakeAtDayPart; } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFOxmVlanVidVer14 implements OFOxmVlanVid { private static final Logger logger = LoggerFactory.getLogger(OFOxmVlanVidVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int LENGTH = 6; private final static OFVlanVidMatch DEFAULT_VALUE = OFVlanVidMatch.NONE; // OF message fields private final OFVlanVidMatch value; // // Immutable default instance final static OFOxmVlanVidVer14 DEFAULT = new OFOxmVlanVidVer14( DEFAULT_VALUE ); // package private constructor - used by readers, builders, and factory OFOxmVlanVidVer14(OFVlanVidMatch value) { if(value == null) { throw new NullPointerException("OFOxmVlanVidVer14: property value cannot be null"); } this.value = value; } // Accessors for OF message fields @Override public long getTypeLen() { return 0x80000c02L; } @Override public OFVlanVidMatch getValue() { return value; } @Override public MatchField<OFVlanVidMatch> getMatchField() { return MatchField.VLAN_VID; } @Override public boolean isMasked() { return false; } public OFOxm<OFVlanVidMatch> getCanonical() { // exact match OXM is always canonical return this; } @Override public OFVlanVidMatch getMask()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property mask not supported in version 1.4"); } @Override public OFVersion getVersion() { return OFVersion.OF_14; } public OFOxmVlanVid.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFOxmVlanVid.Builder { final OFOxmVlanVidVer14 parentMessage; // OF message fields private boolean valueSet; private OFVlanVidMatch value; BuilderWithParent(OFOxmVlanVidVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public long getTypeLen() { return 0x80000c02L; } @Override public OFVlanVidMatch getValue() { return value; } @Override public OFOxmVlanVid.Builder setValue(OFVlanVidMatch value) { this.value = value; this.valueSet = true; return this; } @Override public MatchField<OFVlanVidMatch> getMatchField() { return MatchField.VLAN_VID; } @Override public boolean isMasked() { return false; } @Override public OFOxm<OFVlanVidMatch> getCanonical()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property canonical not supported in version 1.4"); } @Override public OFVlanVidMatch getMask()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property mask not supported in version 1.4"); } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFOxmVlanVid build() { OFVlanVidMatch value = this.valueSet ? this.value : parentMessage.value; if(value == null) throw new NullPointerException("Property value must not be null"); // return new OFOxmVlanVidVer14( value ); } } static class Builder implements OFOxmVlanVid.Builder { // OF message fields private boolean valueSet; private OFVlanVidMatch value; @Override public long getTypeLen() { return 0x80000c02L; } @Override public OFVlanVidMatch getValue() { return value; } @Override public OFOxmVlanVid.Builder setValue(OFVlanVidMatch value) { this.value = value; this.valueSet = true; return this; } @Override public MatchField<OFVlanVidMatch> getMatchField() { return MatchField.VLAN_VID; } @Override public boolean isMasked() { return false; } @Override public OFOxm<OFVlanVidMatch> getCanonical()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property canonical not supported in version 1.4"); } @Override public OFVlanVidMatch getMask()throws UnsupportedOperationException { throw new UnsupportedOperationException("Property mask not supported in version 1.4"); } @Override public OFVersion getVersion() { return OFVersion.OF_14; } // @Override public OFOxmVlanVid build() { OFVlanVidMatch value = this.valueSet ? this.value : DEFAULT_VALUE; if(value == null) throw new NullPointerException("Property value must not be null"); return new OFOxmVlanVidVer14( value ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFOxmVlanVid> { @Override public OFOxmVlanVid readFrom(ChannelBuffer bb) throws OFParseError { // fixed value property typeLen == 0x80000c02L int typeLen = bb.readInt(); if(typeLen != (int) 0x80000c02) throw new OFParseError("Wrong typeLen: Expected=0x80000c02L(0x80000c02L), got="+typeLen); OFVlanVidMatch value = OFVlanVidMatch.read2Bytes(bb); OFOxmVlanVidVer14 oxmVlanVidVer14 = new OFOxmVlanVidVer14( value ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", oxmVlanVidVer14); return oxmVlanVidVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFOxmVlanVidVer14Funnel FUNNEL = new OFOxmVlanVidVer14Funnel(); static class OFOxmVlanVidVer14Funnel implements Funnel<OFOxmVlanVidVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFOxmVlanVidVer14 message, PrimitiveSink sink) { // fixed value property typeLen = 0x80000c02L sink.putInt((int) 0x80000c02); message.value.putTo(sink); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFOxmVlanVidVer14> { @Override public void write(ChannelBuffer bb, OFOxmVlanVidVer14 message) { // fixed value property typeLen = 0x80000c02L bb.writeInt((int) 0x80000c02); message.value.write2Bytes(bb); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFOxmVlanVidVer14("); b.append("value=").append(value); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFOxmVlanVidVer14 other = (OFOxmVlanVidVer14) obj; if (value == null) { if (other.value != null) return false; } else if (!value.equals(other.value)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((value == null) ? 0 : value.hashCode()); return result; } }
package name.etapic.codejam.milkshakes; import java.io.BufferedReader; import java.util.ArrayList; import java.util.BitSet; import java.util.HashSet; import java.util.List; import java.util.Set; import name.etapic.codejam.ProblemSolver; import name.etapic.codejam.Solution; /** code.google.com/codejam/contest/32016/dashboard#s=p1 * Contest Analysis (Solution) - https://code.google.com/codejam/contest/dashboard?c=32016#s=a&a=1 * * I did not notice this little nugget in the problem statement: * 'At most one of the types a customer likes will be a "malted" flavor.' * * Which turns out to be the key to solving this problem within the 8-minute time window for the large data set. * * From the Contest Analysis - * * 'On the surface, this problem appears to require solving the classic problem "Satisfiability," the canonical * example of an NP-complete problem. The customers represent clauses, the milkshake flavors represent variables, * and malted and unmalted flavors represent whether the variable is negated. * * We are not evil enough to have chosen a problem that hard! The restriction that makes this problem easier is that * the customers can only like at most one malted flavor' * * From http://en.wikipedia.org/wiki/NP-complete - * * 'the time required to solve even moderately sized versions of many of these problems can easily reach into the * billions or trillions of years, using any amount of computing power available today' * * I spent a good amount of time making this code as efficient as possible. I think I got the runtime for the large * data set down to a few billion years. I'll save it and wait for faster hardware. * */ public class MilkshakesNPComplete implements ProblemSolver { @Override public Solution solve(BufferedReader reader) throws Exception { int size = Integer.parseInt(reader.readLine()); int customerCount = Integer.parseInt(reader.readLine()); List<List<Flavor>> customers = new ArrayList<>(customerCount); for (int i = 0; i < customerCount; i++) { customers.add(new ArrayList<Flavor>()); String[] flavorSpecs = reader.readLine().split(" "); for (int j = 1; j < flavorSpecs.length; j += 2) { int index = Integer.parseInt(flavorSpecs[j]) - 1; boolean malted = flavorSpecs[j + 1].equals("1"); customers.get(i).add(new Flavor(index, malted)); } } BitSet b = findLeastMaltedCustomerSatisfyingSolution(size, customers); return new Solution(render(b, size), customerCount); } private static class Flavor { private final int index; private final boolean malted; private Flavor(final int index, final boolean malted) { this.index = index; this.malted = malted; } @Override public String toString() { return String.format("index=%s, malted=%s", index, malted); } } /** * Returns null if no solution can be found. */ private static BitSet findLeastMaltedCustomerSatisfyingSolution(int size, List<List<Flavor>> customers) { CustomerSatisficer satisficer = new CustomerSatisficer(size, customers); int cardinalityPivot = size / 2 + 1; if (size % 2 == 1) { cardinalityPivot++; } int cardinality; for (cardinality = 0; cardinality < cardinalityPivot; cardinality++) { findBitSets(size, cardinality, satisficer); if (satisficer.solution != null) { return satisficer.solution; } } satisficer.reverseLogic = true; for (cardinality = cardinalityPivot; cardinality >= 0; cardinality--) { findBitSets(size, cardinality, satisficer); if (satisficer.solution != null) { return satisficer.solution; } } return null; } private static String render(BitSet b, int size) { if (b == null) { return "IMPOSSIBLE"; } StringBuilder builder = new StringBuilder(); for (int i = 0; i < size; i++) { builder.append(b.get(i) ? "1" : "0"); if (i < size - 1) { builder.append(" "); } } return builder.toString(); } private static interface BitSetStrategy { /** * Return true if processing should stop. */ boolean execute(BitSet b); } private static class BitSetCacher implements BitSetStrategy { final Set<BitSet> cache = new HashSet<>(); @Override public boolean execute(BitSet b) { cache.add((BitSet) b.clone()); return false; } @Override public String toString() { return String.format("cache=%s", cache); } } private static class CustomerSatisficer implements BitSetStrategy { private final int size; private final List<List<Flavor>> customers; private boolean reverseLogic = false; private BitSet solution = null; private CustomerSatisficer(final int size, final List<List<Flavor>> customers) { this.size = size; this.customers = customers; } @Override public boolean execute(BitSet b) { boolean foundFlavor = false; for (List<Flavor> customer : customers) { foundFlavor = false; for (Flavor flavor : customer) { foundFlavor = b.get(flavor.index) == flavor.malted; if (reverseLogic) { foundFlavor = !foundFlavor; } if (foundFlavor) { // this customer is satisfied break; } } if (!foundFlavor) { // fail fast on first dissatisfied customer return false; } } if (reverseLogic) { b.flip(0, size); } solution = b; // If it is possible to satisfy all your customers, there will be only one answer which minimizes the // number of malted batches. return true; } @Override public String toString() { return String.format("customers=%s, reverseLogic=%s, solution=%s", customers, reverseLogic, solution); } } /** * Find all BitSets of specified size and cardinality (number of bits set). */ private static void findBitSets(int size, int cardinality, BitSetStrategy strategy) { if (Thread.currentThread().isInterrupted()) { throw new RuntimeException("Timed out."); } if (size < cardinality) { return; } BitSet b = new BitSet(size); if (cardinality == 0) { strategy.execute(b); return; } if (size == cardinality) { b.set(0, size); strategy.execute(b); return; } BitSetCacher cacher = new BitSetCacher(); findBitSets(size - 1, cardinality, cacher); for (BitSet bSub : cacher.cache) { b.clear(); b.or(bSub); if (strategy.execute(b)) { return; } } cacher.cache.clear(); findBitSets(size - 1, cardinality - 1, cacher); for (BitSet bSub : cacher.cache) { b.clear(); b.or(bSub); b.set(size - 1); if (strategy.execute(b)) { return; } } } }
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.workbench.screens.guided.rule.client.validator; import org.drools.workbench.models.datamodel.oracle.DataType; import org.drools.workbench.models.datamodel.rule.BaseSingleFieldConstraint; import org.drools.workbench.models.datamodel.rule.CompositeFactPattern; import org.drools.workbench.models.datamodel.rule.ExpressionFormLine; import org.drools.workbench.models.datamodel.rule.ExpressionVariable; import org.drools.workbench.models.datamodel.rule.FactPattern; import org.drools.workbench.models.datamodel.rule.FromAccumulateCompositeFactPattern; import org.drools.workbench.models.datamodel.rule.FromCollectCompositeFactPattern; import org.drools.workbench.models.datamodel.rule.FromCompositeFactPattern; import org.drools.workbench.models.datamodel.rule.FromEntryPointFactPattern; import org.drools.workbench.models.datamodel.rule.IPattern; import org.drools.workbench.models.datamodel.rule.RuleModel; import org.drools.workbench.models.datamodel.rule.SingleFieldConstraint; import org.drools.workbench.screens.guided.rule.client.editor.validator.GuidedRuleEditorValidator; import org.drools.workbench.screens.guided.rule.client.resources.i18n.Constants; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; import static org.mockito.Mockito.*; public class GuidedRuleEditorValidatorTest { private static final String MISSING_FACT_PATTERN = "Missing fact pattern"; private static final String MISSING_VALUE_WHEN_OPERATOR_IS_SET = "Missing value when operator is set"; private static final String MISSING_RHS_FROM = "Missing RHS from"; private static final String MISSING_ENTRY_POINT = "Missing Entry Point"; private RuleModel model; private GuidedRuleEditorValidator validator; private Constants constants; @Before public void setUp() throws Exception { model = new RuleModel(); constants = mock( Constants.class ); when( constants.AreasMarkedWithRedAreMandatoryPleaseSetAValueBeforeSaving() ).thenReturn( MISSING_FACT_PATTERN ); when( constants.FactType0HasAField1ThatHasAnOperatorSetButNoValuePleaseAddAValueOrRemoveTheOperator( anyString(), anyString() ) ).thenReturn( MISSING_VALUE_WHEN_OPERATOR_IS_SET ); when( constants.WhenUsingFromTheSourceNeedsToBeSet() ).thenReturn( MISSING_RHS_FROM ); when( constants.PleaseSetTheEntryPoint() ).thenReturn( MISSING_ENTRY_POINT ); validator = new GuidedRuleEditorValidator( model, constants ); } @Test public void testEmpty() throws Exception { assertTrue( validator.isValid() ); assertTrue( validator.getErrors().isEmpty() ); } @Test public void testValidateFactPattern() throws Exception { model.addLhsItem( new FactPattern() ); assertTrue( validator.isValid() ); assertTrue( validator.getErrors().isEmpty() ); } @Test public void testValidateCompositeFactPatternFalse() throws Exception { CompositeFactPattern not = new CompositeFactPattern(); not.setType( "not" ); model.addLhsItem( not ); assertFalse( validator.isValid() ); assertTrue( validator.getErrors().get( 0 ).equals( MISSING_FACT_PATTERN ) ); } @Test public void testValidateCompositeFactPatternTrue() throws Exception { CompositeFactPattern not = new CompositeFactPattern(); not.setType( "not" ); not.addFactPattern( new FactPattern() ); model.addLhsItem( not ); assertTrue( validator.isValid() ); assertTrue( validator.getErrors().isEmpty() ); } @Test public void testMissingValueWhenOperatorExists() throws Exception { FactPattern pattern = new FactPattern( "Person" ); SingleFieldConstraint constraint = new SingleFieldConstraint( "age" ); constraint.setOperator( "==" ); pattern.addConstraint( constraint ); model.lhs = new IPattern[]{ pattern }; assertFalse( validator.isValid() ); assertEquals( 1, validator.getErrors().size() ); assertEquals( MISSING_VALUE_WHEN_OPERATOR_IS_SET, validator.getErrors().get( 0 ) ); verify( constants ).FactType0HasAField1ThatHasAnOperatorSetButNoValuePleaseAddAValueOrRemoveTheOperator( "Person", "age" ); } @Test public void testMissingValueWhenOperatorExistsInCompositePattern() throws Exception { FactPattern pattern = new FactPattern( "Person" ); SingleFieldConstraint constraint = new SingleFieldConstraint( "age" ); constraint.setOperator( "==" ); pattern.addConstraint( constraint ); CompositeFactPattern not = new CompositeFactPattern(); not.setType( "not" ); not.addFactPattern( pattern ); model.lhs = new IPattern[]{ not }; assertFalse( validator.isValid() ); assertEquals( 1, validator.getErrors().size() ); assertEquals( MISSING_VALUE_WHEN_OPERATOR_IS_SET, validator.getErrors().get( 0 ) ); verify( constants ).FactType0HasAField1ThatHasAnOperatorSetButNoValuePleaseAddAValueOrRemoveTheOperator( "Person", "age" ); } @Test public void testMissingValueWhenOperatorExistsIsNull() throws Exception { FactPattern pattern = new FactPattern( "Person" ); SingleFieldConstraint constraint = new SingleFieldConstraint( "age" ); constraint.setOperator( "== null" ); pattern.addConstraint( constraint ); CompositeFactPattern not = new CompositeFactPattern(); not.setType( "not" ); not.addFactPattern( pattern ); model.lhs = new IPattern[]{ not }; assertTrue( validator.isValid() ); assertTrue( validator.getErrors().isEmpty() ); } @Test public void testMissingValueWhenOperatorExistsIsNotNull() throws Exception { FactPattern pattern = new FactPattern( "Person" ); SingleFieldConstraint constraint = new SingleFieldConstraint( "age" ); constraint.setOperator( "!= null" ); pattern.addConstraint( constraint ); CompositeFactPattern not = new CompositeFactPattern(); not.setType( "not" ); not.addFactPattern( pattern ); model.lhs = new IPattern[]{ not }; assertTrue( validator.isValid() ); assertTrue( validator.getErrors().isEmpty() ); } @Test public void testMissingValueInFrom() throws Exception { FactPattern boundPattern = new FactPattern( "Person" ); boundPattern.setBoundName( "person" ); boundPattern.addConstraint( new SingleFieldConstraint( "addresses" ) ); FactPattern pattern = new FactPattern( "Address" ); SingleFieldConstraint constraint = new SingleFieldConstraint( "street" ); constraint.setOperator( "!=" ); pattern.addConstraint( constraint ); FromCompositeFactPattern fromCompositeFactPattern = new FromCompositeFactPattern(); fromCompositeFactPattern.setFactPattern( pattern ); ExpressionFormLine expression = new ExpressionFormLine(); expression.setBinding( "person.addresses" ); fromCompositeFactPattern.setExpression( expression ); model.lhs = new IPattern[]{ boundPattern, fromCompositeFactPattern }; assertFalse( validator.isValid() ); assertEquals( 1, validator.getErrors().size() ); assertEquals( MISSING_VALUE_WHEN_OPERATOR_IS_SET, validator.getErrors().get( 0 ) ); verify( constants ).FactType0HasAField1ThatHasAnOperatorSetButNoValuePleaseAddAValueOrRemoveTheOperator( "Address", "street" ); } @Test public void testWorkingFrom() throws Exception { FactPattern boundPattern = new FactPattern( "Person" ); boundPattern.setBoundName( "person" ); boundPattern.addConstraint( new SingleFieldConstraint( "addresses" ) ); FactPattern pattern = new FactPattern( "Address" ); SingleFieldConstraint constraint = new SingleFieldConstraint( "street" ); pattern.addConstraint( constraint ); FromCompositeFactPattern fromCompositeFactPattern = new FromCompositeFactPattern(); fromCompositeFactPattern.setFactPattern( pattern ); ExpressionFormLine expression = new ExpressionFormLine(); expression.setBinding( "person.addresses" ); fromCompositeFactPattern.setExpression( expression ); model.lhs = new IPattern[]{ boundPattern, fromCompositeFactPattern }; assertTrue( validator.isValid() ); } @Test public void testEmptyFrom() throws Exception { model.lhs = new IPattern[]{ new FromCompositeFactPattern() }; assertFalse( validator.isValid() ); assertEquals( 2, validator.getErrors().size() ); assertEquals( MISSING_FACT_PATTERN, validator.getErrors().get( 0 ) ); assertEquals( MISSING_RHS_FROM, validator.getErrors().get( 1 ) ); verify( constants ).AreasMarkedWithRedAreMandatoryPleaseSetAValueBeforeSaving(); verify( constants ).WhenUsingFromTheSourceNeedsToBeSet(); } @Test public void testValidFromCompositeFactPattern() throws Exception { FactPattern factPattern = new FactPattern( "SomeList" ); factPattern.setBoundName( "list" ); FromCompositeFactPattern fromCompositeFactPattern = new FromCompositeFactPattern(); fromCompositeFactPattern.setFactPattern( new FactPattern( "Person" ) ); ExpressionFormLine expression = new ExpressionFormLine(); expression.appendPart( new ExpressionVariable( "list", "SomeList", "SomeList" ) ); fromCompositeFactPattern.setExpression( expression ); model.lhs = new IPattern[]{ fromCompositeFactPattern }; assertTrue( validator.isValid() ); } @Test public void testMissingRHSPartInFrom() throws Exception { FactPattern pattern = new FactPattern( "Address" ); SingleFieldConstraint constraint = new SingleFieldConstraint( "street" ); pattern.addConstraint( constraint ); FromCompositeFactPattern fromCompositeFactPattern = new FromCompositeFactPattern(); fromCompositeFactPattern.setFactPattern( pattern ); ExpressionFormLine expression = new ExpressionFormLine(); fromCompositeFactPattern.setExpression( expression ); model.lhs = new IPattern[]{ fromCompositeFactPattern }; assertFalse( validator.isValid() ); assertEquals( 1, validator.getErrors().size() ); assertEquals( MISSING_RHS_FROM, validator.getErrors().get( 0 ) ); verify( constants ).WhenUsingFromTheSourceNeedsToBeSet(); } @Test public void testFromAccumulateCompositePattern() throws Exception { FactPattern pattern1 = new FactPattern( "Person" ); SingleFieldConstraint constraint1 = new SingleFieldConstraint( "name" ); constraint1.setOperator( "==" ); constraint1.setValue( "Toni" ); pattern1.addConstraint( constraint1 ); FactPattern pattern2 = new FactPattern( "Address" ); SingleFieldConstraint constraint2 = new SingleFieldConstraint( "street" ); constraint2.setOperator( "!=" ); constraint2.setValue( "some street" ); pattern2.addConstraint( constraint2 ); FromAccumulateCompositeFactPattern fromAccumulateCompositeFactPattern = new FromAccumulateCompositeFactPattern(); fromAccumulateCompositeFactPattern.setSourcePattern( pattern1 ); fromAccumulateCompositeFactPattern.setFactPattern( pattern2 ); ExpressionFormLine expression = new ExpressionFormLine(); expression.setBinding( "person.addresses" ); fromAccumulateCompositeFactPattern.setExpression( expression ); model.lhs = new IPattern[]{ fromAccumulateCompositeFactPattern }; assertTrue( validator.isValid() ); } @Test public void testFromAccumulateCompositePatternMissingValues() throws Exception { FactPattern pattern1 = new FactPattern( "Person" ); SingleFieldConstraint constraint1 = new SingleFieldConstraint( "name" ); constraint1.setOperator( "==" ); pattern1.addConstraint( constraint1 ); FactPattern pattern2 = new FactPattern( "Address" ); SingleFieldConstraint constraint2 = new SingleFieldConstraint( "street" ); constraint2.setOperator( "!=" ); pattern2.addConstraint( constraint2 ); FromAccumulateCompositeFactPattern fromAccumulateCompositeFactPattern = new FromAccumulateCompositeFactPattern(); fromAccumulateCompositeFactPattern.setSourcePattern( pattern1 ); fromAccumulateCompositeFactPattern.setFactPattern( pattern2 ); fromAccumulateCompositeFactPattern.setFunction( "test()" ); model.lhs = new IPattern[]{ fromAccumulateCompositeFactPattern }; assertFalse( validator.isValid() ); assertEquals( 2, validator.getErrors().size() ); verify( constants, never() ).WhenUsingFromTheSourceNeedsToBeSet(); verify( constants ).FactType0HasAField1ThatHasAnOperatorSetButNoValuePleaseAddAValueOrRemoveTheOperator( "Person", "name" ); verify( constants ).FactType0HasAField1ThatHasAnOperatorSetButNoValuePleaseAddAValueOrRemoveTheOperator( "Address", "street" ); } @Test public void testFromAccumulateCompositePatternMissingValues2() throws Exception { FactPattern pattern1 = new FactPattern( "Person" ); FactPattern pattern2 = new FactPattern( "Address" ); FromAccumulateCompositeFactPattern fromAccumulateCompositeFactPattern = new FromAccumulateCompositeFactPattern(); fromAccumulateCompositeFactPattern.setSourcePattern( pattern1 ); fromAccumulateCompositeFactPattern.setFactPattern( pattern2 ); fromAccumulateCompositeFactPattern.setFunction( "" ); fromAccumulateCompositeFactPattern.setReverseCode( "" ); fromAccumulateCompositeFactPattern.setInitCode( "" ); fromAccumulateCompositeFactPattern.setActionCode( "" ); fromAccumulateCompositeFactPattern.setResultCode( "" ); model.lhs = new IPattern[]{ fromAccumulateCompositeFactPattern }; assertFalse( validator.isValid() ); assertEquals( 1, validator.getErrors().size() ); verify( constants ).WhenUsingFromTheSourceNeedsToBeSet(); } @Test public void testFromAccumulateCompositePatternMissingValuesWithExistingFrom() throws Exception { FactPattern pattern1 = new FactPattern( "Person" ); SingleFieldConstraint constraint1 = new SingleFieldConstraint( "name" ); constraint1.setOperator( "==" ); pattern1.addConstraint( constraint1 ); FactPattern pattern2 = new FactPattern( "Address" ); SingleFieldConstraint constraint2 = new SingleFieldConstraint( "street" ); constraint2.setOperator( "!=" ); pattern2.addConstraint( constraint2 ); FromAccumulateCompositeFactPattern fromAccumulateCompositeFactPattern = new FromAccumulateCompositeFactPattern(); fromAccumulateCompositeFactPattern.setSourcePattern( pattern1 ); fromAccumulateCompositeFactPattern.setFactPattern( pattern2 ); fromAccumulateCompositeFactPattern.setInitCode( "int i = 0" ); fromAccumulateCompositeFactPattern.setActionCode( " i++;" ); fromAccumulateCompositeFactPattern.setReverseCode( "i--;" ); fromAccumulateCompositeFactPattern.setResultCode( "return i" ); model.lhs = new IPattern[]{ fromAccumulateCompositeFactPattern }; assertFalse( validator.isValid() ); assertEquals( 2, validator.getErrors().size() ); verify( constants, never() ).WhenUsingFromTheSourceNeedsToBeSet(); verify( constants ).FactType0HasAField1ThatHasAnOperatorSetButNoValuePleaseAddAValueOrRemoveTheOperator( "Person", "name" ); verify( constants ).FactType0HasAField1ThatHasAnOperatorSetButNoValuePleaseAddAValueOrRemoveTheOperator( "Address", "street" ); } @Test public void testEmptyFromEntryPointFactPattern() throws Exception { model.lhs = new IPattern[]{ new FromEntryPointFactPattern() }; assertFalse( validator.isValid() ); assertEquals( 2, validator.getErrors().size() ); assertEquals( MISSING_FACT_PATTERN, validator.getErrors().get( 0 ) ); assertEquals( MISSING_ENTRY_POINT, validator.getErrors().get( 1 ) ); verify( constants ).AreasMarkedWithRedAreMandatoryPleaseSetAValueBeforeSaving(); verify( constants ).PleaseSetTheEntryPoint(); } @Test public void testValidFromEntryPointFactPattern() throws Exception { FromEntryPointFactPattern fromEntryPointFactPattern = new FromEntryPointFactPattern(); fromEntryPointFactPattern.setFactPattern( new FactPattern( "Person" ) ); fromEntryPointFactPattern.setEntryPointName( "entryPoint" ); model.lhs = new IPattern[]{ fromEntryPointFactPattern }; assertTrue( validator.isValid() ); } @Test public void testComparingFieldToExpression() throws Exception { FactPattern f1 = new FactPattern(); f1.setBoundName( "f1" ); FactPattern f2 = new FactPattern(); SingleFieldConstraint constraint = new SingleFieldConstraint(); constraint.setOperator( "==" ); constraint.getExpressionValue().appendPart( new ExpressionVariable( "field", "java.lang.Number", "Number" ) ); f2.addConstraint( constraint ); model.lhs = new IPattern[]{ f1, f2 }; assertTrue( validator.isValid() ); } @Test public void testEmptyLiteralStringFieldConstraints() throws Exception { FactPattern f1 = new FactPattern( "Applicant" ); f1.setBoundName( "$a" ); SingleFieldConstraint constraint = new SingleFieldConstraint(); constraint.setConstraintValueType( BaseSingleFieldConstraint.TYPE_LITERAL ); constraint.setFieldType( DataType.TYPE_STRING ); constraint.setFactType( "Applicant" ); constraint.setOperator( "==" ); constraint.setFieldName( "name" ); f1.addConstraint( constraint ); model.lhs = new IPattern[]{ f1 }; assertTrue( validator.isValid() ); } @Test public void testEmptyLiteralNonStringFieldConstraints() throws Exception { FactPattern f1 = new FactPattern( "Applicant" ); f1.setBoundName( "$a" ); SingleFieldConstraint constraint = new SingleFieldConstraint(); constraint.setConstraintValueType( BaseSingleFieldConstraint.TYPE_LITERAL ); constraint.setFieldType( DataType.TYPE_NUMERIC_INTEGER ); constraint.setFactType( "Applicant" ); constraint.setOperator( "==" ); constraint.setFieldName( "age" ); f1.addConstraint( constraint ); model.lhs = new IPattern[]{ f1 }; assertFalse( validator.isValid() ); } @Test public void testMissingValueInFromCollect() throws Exception { FactPattern pattern = new FactPattern( "Person" ); pattern.setBoundName("person"); FromCompositeFactPattern fromCompositeFactPattern = new FromCollectCompositeFactPattern(); fromCompositeFactPattern.setFactPattern( pattern ); model.lhs = new IPattern[]{ fromCompositeFactPattern }; assertFalse( validator.isValid() ); assertEquals( 1, validator.getErrors().size() ); assertEquals( MISSING_FACT_PATTERN, validator.getErrors().get( 0 ) ); verify( constants ).AreasMarkedWithRedAreMandatoryPleaseSetAValueBeforeSaving(); } @Test public void testMissingFactTypeInFromCollect() throws Exception { FromCollectCompositeFactPattern fromCompositeFactPattern = new FromCollectCompositeFactPattern(); fromCompositeFactPattern.setRightPattern(new FactPattern("Person")); model.lhs = new IPattern[]{ fromCompositeFactPattern }; assertFalse( validator.isValid() ); assertEquals( 1, validator.getErrors().size() ); assertEquals( MISSING_FACT_PATTERN, validator.getErrors().get( 0 ) ); verify( constants ).AreasMarkedWithRedAreMandatoryPleaseSetAValueBeforeSaving(); } }
/* * Copyright 2019 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.server.services.impl; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.apache.commons.io.FileUtils; import org.assertj.core.api.Assertions; import org.drools.compiler.kie.builder.impl.InternalKieModule; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.kie.api.KieServices; import org.kie.api.builder.KieFileSystem; import org.kie.api.builder.KieModule; import org.kie.scanner.KieMavenRepository; import org.kie.server.api.KieServerConstants; import org.kie.server.api.KieServerEnvironment; import org.kie.server.api.commands.CommandScript; import org.kie.server.api.commands.CreateContainerCommand; import org.kie.server.api.commands.DisposeContainerCommand; import org.kie.server.api.commands.UpdateReleaseIdCommand; import org.kie.server.api.commands.UpdateScannerCommand; import org.kie.server.api.marshalling.MarshallingFormat; import org.kie.server.api.model.KieContainerResource; import org.kie.server.api.model.KieContainerResourceFilter; import org.kie.server.api.model.KieContainerResourceList; import org.kie.server.api.model.KieContainerStatus; import org.kie.server.api.model.KieScannerResource; import org.kie.server.api.model.KieScannerStatus; import org.kie.server.api.model.KieServerCommand; import org.kie.server.api.model.KieServerConfigItem; import org.kie.server.api.model.KieServerInfo; import org.kie.server.api.model.KieServerMode; import org.kie.server.api.model.KieServerStateInfo; import org.kie.server.api.model.KieServiceResponse; import org.kie.server.api.model.Message; import org.kie.server.api.model.ReleaseId; import org.kie.server.api.model.ServiceResponse; import org.kie.server.api.model.ServiceResponsesList; import org.kie.server.api.model.Severity; import org.kie.server.controller.api.KieServerController; import org.kie.server.controller.api.model.KieServerSetup; import org.kie.server.services.api.KieContainerInstance; import org.kie.server.services.api.KieControllerNotConnectedException; import org.kie.server.services.api.KieServerExtension; import org.kie.server.services.api.KieServerRegistry; import org.kie.server.services.api.SupportedTransports; import org.kie.server.services.impl.controller.DefaultRestControllerImpl; import org.kie.server.services.impl.storage.KieServerState; import org.kie.server.services.impl.storage.KieServerStateRepository; import org.kie.server.services.impl.storage.file.KieServerStateFileRepository; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public abstract class AbstractKieServerImplTest { static final File REPOSITORY_DIR = new File("target/repository-dir"); static final String KIE_SERVER_ID = "kie-server-impl-test"; static final String GROUP_ID = "org.kie.server.test"; static final String PRODUCTION_MODE_VERSION = "1.0.0.Final"; static final String DEVELOPMENT_MODE_VERSION = "1.0.0-SNAPSHOT"; protected KieServerMode mode; protected String testVersion; protected KieServerImpl kieServer; protected org.kie.api.builder.ReleaseId releaseId; protected String origServerId = null; protected List<KieServerExtension> extensions; abstract KieServerMode getTestMode(); @Before public void setupKieServerImpl() throws Exception { extensions = new ArrayList<>(); mode = getTestMode(); testVersion = getVersion(mode); origServerId = KieServerEnvironment.getServerId(); System.setProperty(KieServerConstants.KIE_SERVER_MODE, mode.name()); System.setProperty(KieServerConstants.KIE_SERVER_ID, KIE_SERVER_ID); KieServerEnvironment.setServerId(KIE_SERVER_ID); FileUtils.deleteDirectory(REPOSITORY_DIR); FileUtils.forceMkdir(REPOSITORY_DIR); kieServer = new KieServerImpl(new KieServerStateFileRepository(REPOSITORY_DIR)) { @Override public List<KieServerExtension> getServerExtensions() { return extensions; } }; kieServer.init(); } protected String getVersion(KieServerMode mode) { return mode.equals(KieServerMode.DEVELOPMENT) ? DEVELOPMENT_MODE_VERSION : PRODUCTION_MODE_VERSION; } @After public void cleanUp() { if (kieServer != null) { kieServer.destroy(); } KieServerEnvironment.setServerId(origServerId); } @Test public void testCheckMode() { assertSame(mode, kieServer.getInfo().getResult().getMode()); } @Test public void testReadinessCheck() { assertTrue(kieServer.isKieServerReady()); } @Test(timeout=10000) public void testReadinessCheckDelayedStart() throws Exception { CountDownLatch latch = new CountDownLatch(1); CountDownLatch startedlatch = new CountDownLatch(1); kieServer.destroy(); kieServer = delayedKieServer(latch, startedlatch); assertFalse(kieServer.isKieServerReady()); latch.countDown(); startedlatch.await(); assertTrue(kieServer.isKieServerReady()); } @Test public void testHealthCheck() { List<Message> healthMessages = kieServer.healthCheck(false); assertEquals(healthMessages.size(), 0); } @Test public void testHealthCheckWithReport() { List<Message> healthMessages = kieServer.healthCheck(true); assertEquals(healthMessages.size(), 2); Message header = healthMessages.get(0); assertEquals(Severity.INFO, header.getSeverity()); assertEquals(2, header.getMessages().size()); Message footer = healthMessages.get(1); assertEquals(Severity.INFO, footer.getSeverity()); assertEquals(1, footer.getMessages().size()); } @Test(timeout=10000) public void testHealthCheckDelayedStart() throws Exception { CountDownLatch latch = new CountDownLatch(1); CountDownLatch startedlatch = new CountDownLatch(1); kieServer.destroy(); kieServer = delayedKieServer(latch, startedlatch); assertFalse(kieServer.isKieServerReady()); List<Message> healthMessages = kieServer.healthCheck(false); assertEquals(healthMessages.size(), 1); Message notReady = healthMessages.get(0); assertEquals(Severity.ERROR, notReady.getSeverity()); assertEquals(1, notReady.getMessages().size()); latch.countDown(); startedlatch.await(); assertTrue(kieServer.isKieServerReady()); healthMessages = kieServer.healthCheck(false); assertEquals(healthMessages.size(), 0); } @Test public void testHealthCheckFailedContainer() { kieServer.destroy(); kieServer = new KieServerImpl(new KieServerStateFileRepository(REPOSITORY_DIR)) { @Override protected List<KieContainerInstanceImpl> getContainers() { List<KieContainerInstanceImpl> containers = new ArrayList<>(); KieContainerInstanceImpl container = new KieContainerInstanceImpl("test", KieContainerStatus.FAILED); containers.add(container); return containers; } }; kieServer.init(); List<Message> healthMessages = kieServer.healthCheck(false); assertEquals(healthMessages.size(), 1); Message failedContainer = healthMessages.get(0); assertEquals(Severity.WARN, failedContainer.getSeverity()); assertEquals(1, failedContainer.getMessages().size()); assertEquals("KIE Container 'test' is in FAILED state", failedContainer.getMessages().iterator().next()); } @Test public void testHealthCheckFailedContainerManagementDisabled() { System.setProperty(KieServerConstants.KIE_SERVER_MGMT_API_DISABLED, "true"); kieServer.destroy(); kieServer = new KieServerImpl(new KieServerStateFileRepository(REPOSITORY_DIR)) { @Override protected List<KieContainerInstanceImpl> getContainers() { List<KieContainerInstanceImpl> containers = new ArrayList<>(); KieContainerInstanceImpl container = new KieContainerInstanceImpl("test", KieContainerStatus.FAILED); containers.add(container); return containers; } }; kieServer.init(); List<Message> healthMessages = kieServer.healthCheck(false); assertEquals(1, healthMessages.size()); Message failedContainer = healthMessages.get(0); assertEquals(Severity.ERROR, failedContainer.getSeverity()); assertEquals(1, failedContainer.getMessages().size()); assertEquals("KIE Container 'test' is in FAILED state", failedContainer.getMessages().iterator().next()); System.clearProperty(KieServerConstants.KIE_SERVER_MGMT_API_DISABLED); } @Test public void testHealthCheckFailedExtension() { extensions.add(new KieServerExtension() { @Override public List<Message> healthCheck(boolean report) { List<Message> messages = KieServerExtension.super.healthCheck(report); messages.add(new Message(Severity.ERROR, "TEST extension is unhealthy")); return messages; } @Override public void updateContainer(String id, KieContainerInstance kieContainerInstance, Map<String, Object> parameters) { } @Override public boolean isUpdateContainerAllowed(String id, KieContainerInstance kieContainerInstance, Map<String, Object> parameters) { return false; } @Override public boolean isInitialized() { return true; } @Override public boolean isActive() { return true; } @Override public void init(KieServerImpl kieServer, KieServerRegistry registry) { } @Override public Integer getStartOrder() { return 10; } @Override public List<Object> getServices() { return null; } @Override public String getImplementedCapability() { return "TEST"; } @Override public String getExtensionName() { return "TEST"; } @Override public <T> T getAppComponents(Class<T> serviceType) { return null; } @Override public List<Object> getAppComponents(SupportedTransports type) { return null; } @Override public void disposeContainer(String id, KieContainerInstance kieContainerInstance, Map<String, Object> parameters) { } @Override public void destroy(KieServerImpl kieServer, KieServerRegistry registry) { } @Override public void createContainer(String id, KieContainerInstance kieContainerInstance, Map<String, Object> parameters) { } }); kieServer.init(); List<Message> healthMessages = kieServer.healthCheck(false); assertEquals(healthMessages.size(), 1); Message failedContainer = healthMessages.get(0); assertEquals(Severity.ERROR, failedContainer.getSeverity()); assertEquals(1, failedContainer.getMessages().size()); assertEquals("TEST extension is unhealthy", failedContainer.getMessages().iterator().next()); } @Test public void testManagementDisabledDefault() { assertNull(kieServer.checkAccessability()); } @Test public void testManagementDisabledConfigured() { System.setProperty(KieServerConstants.KIE_SERVER_MGMT_API_DISABLED, "true"); try { kieServer.destroy(); kieServer = new KieServerImpl(new KieServerStateFileRepository(REPOSITORY_DIR)); kieServer.init(); ServiceResponse<?> forbidden = kieServer.checkAccessability(); assertForbiddenResponse(forbidden); } finally { System.clearProperty(KieServerConstants.KIE_SERVER_MGMT_API_DISABLED); } } @Test public void testManagementDisabledConfiguredViaCommandService() { System.setProperty(KieServerConstants.KIE_SERVER_MGMT_API_DISABLED, "true"); try { kieServer.destroy(); kieServer = new KieServerImpl(new KieServerStateFileRepository(REPOSITORY_DIR)); kieServer.init(); KieContainerCommandServiceImpl commandService = new KieContainerCommandServiceImpl(kieServer, kieServer.getServerRegistry()); List<KieServerCommand> commands = new ArrayList<>(); commands.add(new CreateContainerCommand()); commands.add(new DisposeContainerCommand()); commands.add(new UpdateScannerCommand()); commands.add(new UpdateReleaseIdCommand()); CommandScript commandScript = new CommandScript(commands); ServiceResponsesList responseList = commandService.executeScript(commandScript, MarshallingFormat.JAXB, null); assertNotNull(responseList); List<ServiceResponse<?>> responses = responseList.getResponses(); assertEquals(4, responses.size()); for (ServiceResponse<?> forbidden : responses) { assertForbiddenResponse(forbidden); } } finally { System.clearProperty(KieServerConstants.KIE_SERVER_MGMT_API_DISABLED); } } @Test // https://issues.jboss.org/browse/RHBPMS-4087 public void testPersistScannerState() { String containerId = "persist-scanner-state"; createEmptyKjar(containerId); // create the container and update the scanner KieContainerResource kieContainerResource = new KieContainerResource(containerId, new ReleaseId(releaseId)); kieServer.createContainer(containerId, kieContainerResource); KieScannerResource kieScannerResource = new KieScannerResource(KieScannerStatus.STARTED, 20000L); kieServer.updateScanner(containerId, kieScannerResource); KieServerStateRepository stateRepository = new KieServerStateFileRepository(REPOSITORY_DIR); KieServerState state = stateRepository.load(KIE_SERVER_ID); Set<KieContainerResource> containers = state.getContainers(); Assertions.assertThat(containers).hasSize(1); KieContainerResource container = containers.iterator().next(); Assertions.assertThat(container.getScanner()).isEqualTo(kieScannerResource); KieScannerResource updatedKieScannerResource = new KieScannerResource(KieScannerStatus.DISPOSED); kieServer.updateScanner(containerId, updatedKieScannerResource); // create new state repository instance to avoid caching via 'knownStates' // this simulates the server restart (since the status is loaded from filesystem after restart) stateRepository = new KieServerStateFileRepository(REPOSITORY_DIR); state = stateRepository.load(KIE_SERVER_ID); containers = state.getContainers(); Assertions.assertThat(containers).hasSize(1); container = containers.iterator().next(); Assertions.assertThat(container.getScanner()).isEqualTo(updatedKieScannerResource); kieServer.disposeContainer(containerId); } @Test // https://issues.jboss.org/browse/JBPM-5288 public void testCreateScannerWhenCreatingContainer() { String containerId = "scanner-state-when-creating-container"; createEmptyKjar(containerId); // create the container (provide scanner info as well) KieContainerResource kieContainerResource = new KieContainerResource(containerId, new ReleaseId(releaseId)); KieScannerResource kieScannerResource = new KieScannerResource(KieScannerStatus.STARTED, 20000L); kieContainerResource.setScanner(kieScannerResource); ServiceResponse<KieContainerResource> createResponse = kieServer.createContainer(containerId, kieContainerResource); Assertions.assertThat(createResponse.getType()).isEqualTo(ServiceResponse.ResponseType.SUCCESS); Assertions.assertThat(createResponse.getResult().getScanner()).isEqualTo(kieScannerResource); ServiceResponse<KieContainerResource> getResponse = kieServer.getContainerInfo(containerId); Assertions.assertThat(getResponse.getType()).isEqualTo(ServiceResponse.ResponseType.SUCCESS); Assertions.assertThat(getResponse.getResult().getScanner()).isEqualTo(kieScannerResource); kieServer.disposeContainer(containerId); } @Test public void testCreateContainerValidationNullContainer() { String containerId = "container-to-create"; createEmptyKjar(containerId); ServiceResponse<KieContainerResource> createResponse = kieServer.createContainer(containerId, null); Assertions.assertThat(createResponse.getType()).isEqualTo(ServiceResponse.ResponseType.FAILURE); } @Test public void testCreateContainerValidationNullRelease() { String containerId = "container-to-create"; createEmptyKjar(containerId); // create the container (provide scanner info as well) KieContainerResource kieContainerResource = new KieContainerResource(containerId, null); KieScannerResource kieScannerResource = new KieScannerResource(KieScannerStatus.STARTED, 20000L); kieContainerResource.setScanner(kieScannerResource); ServiceResponse<KieContainerResource> createResponse = kieServer.createContainer(containerId, kieContainerResource); Assertions.assertThat(createResponse.getType()).isEqualTo(ServiceResponse.ResponseType.FAILURE); } @Test public void testUpdateContainer() { KieServerExtension extension = mock(KieServerExtension.class); when(extension.isUpdateContainerAllowed(any(), any(), any())).thenReturn(true); extensions.add(extension); String containerId = "container-to-update"; startContainerToUpdate(containerId, getVersion(mode)); ServiceResponse<ReleaseId> updateResponse = kieServer.updateContainerReleaseId(containerId, new ReleaseId(releaseId), true); Assertions.assertThat(updateResponse.getType()).isEqualTo(ServiceResponse.ResponseType.SUCCESS); verify(extension).isUpdateContainerAllowed(anyString(), any(), any()); verify(extension).updateContainer(any(), any(), any()); kieServer.disposeContainer(containerId); } @Test public void testUpdateContainerWithExtensionNotAllowing() { KieServerExtension extension = mock(KieServerExtension.class); when(extension.isUpdateContainerAllowed(any(), any(), any())).thenReturn(false); extensions.add(extension); String containerId = "container-to-update"; startContainerToUpdate(containerId, getVersion(mode)); ServiceResponse<ReleaseId> updateResponse = kieServer.updateContainerReleaseId(containerId, new ReleaseId(this.releaseId), true); Assertions.assertThat(updateResponse.getType()).isEqualTo(ServiceResponse.ResponseType.FAILURE); verify(extension).isUpdateContainerAllowed(anyString(), any(), any()); verify(extension, never()).updateContainer(any(), any(), any()); kieServer.disposeContainer(containerId); } @Test public void testUpdateContainerWithNullReleaseID() { KieServerExtension extension = mock(KieServerExtension.class); extensions.add(extension); String containerId = "container-to-update"; startContainerToUpdate(containerId, getVersion(mode)); ServiceResponse<ReleaseId> updateResponse = kieServer.updateContainerReleaseId(containerId, null, true); Assertions.assertThat(updateResponse.getType()).isEqualTo(ServiceResponse.ResponseType.FAILURE); verify(extension, never()).isUpdateContainerAllowed(anyString(), any(), any()); verify(extension, never()).updateContainer(any(), any(), any()); kieServer.disposeContainer(containerId); } protected void startContainerToUpdate(String containerId, String version) { createEmptyKjar(containerId, version); ReleaseId releaseId = new ReleaseId(this.releaseId); // create the container (provide scanner info as well) KieContainerResource kieContainerResource = new KieContainerResource(containerId, releaseId); KieScannerResource kieScannerResource = new KieScannerResource(KieScannerStatus.STARTED, 20000L); kieContainerResource.setScanner(kieScannerResource); ServiceResponse<KieContainerResource> createResponse = kieServer.createContainer(containerId, kieContainerResource); Assertions.assertThat(createResponse.getType()).isEqualTo(ServiceResponse.ResponseType.SUCCESS); Assertions.assertThat(createResponse.getResult().getScanner()).isEqualTo(kieScannerResource); ServiceResponse<KieContainerResource> getResponse = kieServer.getContainerInfo(containerId); Assertions.assertThat(getResponse.getType()).isEqualTo(ServiceResponse.ResponseType.SUCCESS); Assertions.assertThat(getResponse.getResult().getScanner()).isEqualTo(kieScannerResource); } @Test //https://issues.redhat.com/browse/JBPM-9435 public void kieServerStateResponseTest() { System.setProperty(KieServerConstants.CFG_KIE_USER, "kieserver"); System.setProperty(KieServerConstants.CFG_KIE_PASSWORD, "kieserver1!"); System.setProperty(KieServerConstants.CFG_KIE_TOKEN, "abcd-abcd-abcd-abcd"); System.setProperty(KieServerConstants.CFG_KIE_CONTROLLER_USER, "ControllerUser"); System.setProperty(KieServerConstants.CFG_KIE_CONTROLLER_PASSWORD, "ControllerUser1234"); System.setProperty(KieServerConstants.CFG_KIE_CONTROLLER_TOKEN, "abcd-abcd-abcd-abcd"); try { kieServer.destroy(); kieServer = new KieServerImpl(new KieServerStateFileRepository(REPOSITORY_DIR)); kieServer.init(); ServiceResponse<KieServerStateInfo> kieServerStateResponse = kieServer.getServerState(); assertNotNull(kieServerStateResponse); Assertions.assertThat(kieServerStateResponse.getType()).isEqualTo(ServiceResponse.ResponseType.SUCCESS); List<KieServerConfigItem> configItems = kieServerStateResponse.getResult().getConfiguration() .getConfigItems(); assertTrue(configItems.contains(new KieServerConfigItem(KieServerConstants.CFG_KIE_USER, "kieserver", "java.lang.String"))); assertFalse(configItems.contains(new KieServerConfigItem(KieServerConstants.CFG_KIE_PASSWORD, "kieserver1!", "java.lang.String"))); assertFalse(configItems.contains(new KieServerConfigItem(KieServerConstants.CFG_KIE_TOKEN, "abcd-abcd-abcd-abcd", "java.lang.String"))); assertTrue(configItems.contains(new KieServerConfigItem(KieServerConstants.CFG_KIE_CONTROLLER_USER, "ControllerUser", "java.lang.String"))); assertFalse(configItems.contains(new KieServerConfigItem(KieServerConstants.CFG_KIE_CONTROLLER_PASSWORD, "ControllerUser1234", "java.lang.String"))); assertFalse(configItems.contains(new KieServerConfigItem(KieServerConstants.CFG_KIE_CONTROLLER_TOKEN, "abcd-abcd-abcd-abcd", "java.lang.String"))); } finally { System.clearProperty(KieServerConstants.CFG_KIE_USER); System.clearProperty(KieServerConstants.CFG_KIE_PASSWORD); System.clearProperty(KieServerConstants.CFG_KIE_TOKEN); System.clearProperty(KieServerConstants.CFG_KIE_CONTROLLER_USER); System.clearProperty(KieServerConstants.CFG_KIE_CONTROLLER_PASSWORD); System.clearProperty(KieServerConstants.CFG_KIE_CONTROLLER_TOKEN); } } @Test public void testExecutorPropertiesInStateRepository() { KieServerStateFileRepository stateRepository = new KieServerStateFileRepository(REPOSITORY_DIR); KieServerState state = stateRepository.load(KIE_SERVER_ID); String executorInterval = state.getConfiguration().getConfigItemValue(KieServerConstants.CFG_EXECUTOR_INTERVAL); String executorRetries = state.getConfiguration().getConfigItemValue(KieServerConstants.CFG_EXECUTOR_RETRIES); String executorPool = state.getConfiguration().getConfigItemValue(KieServerConstants.CFG_EXECUTOR_POOL); String executorTimeUnit = state.getConfiguration().getConfigItemValue(KieServerConstants.CFG_EXECUTOR_TIME_UNIT); String executorJMSQueue = state.getConfiguration().getConfigItemValue(KieServerConstants.CFG_EXECUTOR_JMS_QUEUE); String executorDisabled = state.getConfiguration().getConfigItemValue(KieServerConstants.CFG_EXECUTOR_DISABLED); assertNull(executorInterval); assertNull(executorRetries); assertNull(executorPool); assertNull(executorTimeUnit); assertNull(executorJMSQueue); assertNull(executorDisabled); try { System.setProperty(KieServerConstants.CFG_EXECUTOR_INTERVAL, "4"); System.setProperty(KieServerConstants.CFG_EXECUTOR_RETRIES, "7"); System.setProperty(KieServerConstants.CFG_EXECUTOR_POOL, "11"); System.setProperty(KieServerConstants.CFG_EXECUTOR_TIME_UNIT, "HOURS"); System.setProperty(KieServerConstants.CFG_EXECUTOR_JMS_QUEUE, "queue/MY.OWN.QUEUE"); System.setProperty(KieServerConstants.CFG_EXECUTOR_DISABLED, "true"); stateRepository.clearCache(); state = stateRepository.load(KIE_SERVER_ID); executorInterval = state.getConfiguration().getConfigItemValue(KieServerConstants.CFG_EXECUTOR_INTERVAL); executorRetries = state.getConfiguration().getConfigItemValue(KieServerConstants.CFG_EXECUTOR_RETRIES); executorPool = state.getConfiguration().getConfigItemValue(KieServerConstants.CFG_EXECUTOR_POOL); executorTimeUnit = state.getConfiguration().getConfigItemValue(KieServerConstants.CFG_EXECUTOR_TIME_UNIT); executorJMSQueue = state.getConfiguration().getConfigItemValue(KieServerConstants.CFG_EXECUTOR_JMS_QUEUE); executorDisabled = state.getConfiguration().getConfigItemValue(KieServerConstants.CFG_EXECUTOR_DISABLED); assertNotNull(executorInterval); assertNotNull(executorRetries); assertNotNull(executorPool); assertNotNull(executorTimeUnit); assertNotNull(executorJMSQueue); assertNotNull(executorDisabled); assertEquals("4", executorInterval); assertEquals("7", executorRetries); assertEquals("11", executorPool); assertEquals("HOURS", executorTimeUnit); assertEquals("queue/MY.OWN.QUEUE", executorJMSQueue); assertEquals("true", executorDisabled); } finally { System.clearProperty(KieServerConstants.CFG_EXECUTOR_INTERVAL); System.clearProperty(KieServerConstants.CFG_EXECUTOR_RETRIES); System.clearProperty(KieServerConstants.CFG_EXECUTOR_POOL); System.clearProperty(KieServerConstants.CFG_EXECUTOR_TIME_UNIT); System.clearProperty(KieServerConstants.CFG_EXECUTOR_JMS_QUEUE); System.clearProperty(KieServerConstants.CFG_EXECUTOR_DISABLED); } } protected KieServerImpl delayedKieServer(CountDownLatch latch, CountDownLatch startedlatch) { KieServerImpl server = new KieServerImpl(new KieServerStateFileRepository(REPOSITORY_DIR)) { @Override public void markAsReady() { super.markAsReady(); startedlatch.countDown(); } @Override public KieServerController getController() { return new DefaultRestControllerImpl(getServerRegistry()) { @Override public KieServerSetup connect(KieServerInfo serverInfo) { try { if (latch.await(10, TimeUnit.MILLISECONDS)) { return new KieServerSetup(); } throw new KieControllerNotConnectedException("Unable to connect to any controller"); } catch (InterruptedException e) { throw new KieControllerNotConnectedException("Unable to connect to any controller"); } } }; } }; server.init(); return server; } protected void assertForbiddenResponse(ServiceResponse<?> forbidden) { assertNotNull(forbidden); assertEquals(KieServiceResponse.ResponseType.FAILURE, forbidden.getType()); assertEquals("KIE Server management api is disabled", forbidden.getMsg()); } protected void assertReleaseIds(String containerId, ReleaseId configuredReleaseId, ReleaseId resolvedReleaseId, long timeoutMillis) throws InterruptedException { long timeSpentWaiting = 0; while (timeSpentWaiting < timeoutMillis) { ServiceResponse<KieContainerResourceList> listResponse = kieServer.listContainers(KieContainerResourceFilter.ACCEPT_ALL); Assertions.assertThat(listResponse.getType()).isEqualTo(ServiceResponse.ResponseType.SUCCESS); List<KieContainerResource> containers = listResponse.getResult().getContainers(); for (KieContainerResource container : containers) { if (configuredReleaseId.equals(container.getReleaseId()) && resolvedReleaseId.equals(container.getResolvedReleaseId())) { return; } } Thread.sleep(200); timeSpentWaiting += 200L; } Assertions.fail("Waiting too long for container " + containerId + " to have expected releaseIds updated! " + "expected: releaseId=" + configuredReleaseId + ", resolvedReleaseId=" + resolvedReleaseId); } protected void createEmptyKjar(String artifactId) { createEmptyKjar(artifactId, testVersion); } protected void createEmptyKjar(String artifactId, String version) { // create empty kjar; content does not matter KieServices kieServices = KieServices.Factory.get(); KieFileSystem kfs = kieServices.newKieFileSystem(); releaseId = kieServices.newReleaseId(GROUP_ID, artifactId, version); KieModule kieModule = kieServices.newKieBuilder(kfs ).buildAll().getKieModule(); KieMavenRepository.getKieMavenRepository().installArtifact(releaseId, (InternalKieModule)kieModule, createPomFile(artifactId, version ) ); kieServices.getRepository().addKieModule(kieModule); } protected File createPomFile(String artifactId, String version) { String pomContent = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + "\n" + " <groupId>org.kie.server.test</groupId>\n" + " <artifactId>" + artifactId + "</artifactId>\n" + " <version>" + version + "</version>\n" + " <packaging>pom</packaging>\n" + "</project>"; try { File file = new File("target/" + artifactId + "-1.0.0.Final.pom"); FileUtils.write(file, pomContent); return file; } catch (IOException e) { throw new RuntimeException(e); } } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.workdocs.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/workdocs-2016-05-01/DescribeResourcePermissions" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeResourcePermissionsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The ID of the resource. * </p> */ private String resourceId; /** * <p> * The maximum number of items to return with this call. * </p> */ private Integer limit; /** * <p> * The marker for the next set of results. (You received this marker from a previous call) * </p> */ private String marker; /** * <p> * The ID of the resource. * </p> * * @param resourceId * The ID of the resource. */ public void setResourceId(String resourceId) { this.resourceId = resourceId; } /** * <p> * The ID of the resource. * </p> * * @return The ID of the resource. */ public String getResourceId() { return this.resourceId; } /** * <p> * The ID of the resource. * </p> * * @param resourceId * The ID of the resource. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeResourcePermissionsRequest withResourceId(String resourceId) { setResourceId(resourceId); return this; } /** * <p> * The maximum number of items to return with this call. * </p> * * @param limit * The maximum number of items to return with this call. */ public void setLimit(Integer limit) { this.limit = limit; } /** * <p> * The maximum number of items to return with this call. * </p> * * @return The maximum number of items to return with this call. */ public Integer getLimit() { return this.limit; } /** * <p> * The maximum number of items to return with this call. * </p> * * @param limit * The maximum number of items to return with this call. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeResourcePermissionsRequest withLimit(Integer limit) { setLimit(limit); return this; } /** * <p> * The marker for the next set of results. (You received this marker from a previous call) * </p> * * @param marker * The marker for the next set of results. (You received this marker from a previous call) */ public void setMarker(String marker) { this.marker = marker; } /** * <p> * The marker for the next set of results. (You received this marker from a previous call) * </p> * * @return The marker for the next set of results. (You received this marker from a previous call) */ public String getMarker() { return this.marker; } /** * <p> * The marker for the next set of results. (You received this marker from a previous call) * </p> * * @param marker * The marker for the next set of results. (You received this marker from a previous call) * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeResourcePermissionsRequest withMarker(String marker) { setMarker(marker); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getResourceId() != null) sb.append("ResourceId: ").append(getResourceId()).append(","); if (getLimit() != null) sb.append("Limit: ").append(getLimit()).append(","); if (getMarker() != null) sb.append("Marker: ").append(getMarker()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeResourcePermissionsRequest == false) return false; DescribeResourcePermissionsRequest other = (DescribeResourcePermissionsRequest) obj; if (other.getResourceId() == null ^ this.getResourceId() == null) return false; if (other.getResourceId() != null && other.getResourceId().equals(this.getResourceId()) == false) return false; if (other.getLimit() == null ^ this.getLimit() == null) return false; if (other.getLimit() != null && other.getLimit().equals(this.getLimit()) == false) return false; if (other.getMarker() == null ^ this.getMarker() == null) return false; if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getResourceId() == null) ? 0 : getResourceId().hashCode()); hashCode = prime * hashCode + ((getLimit() == null) ? 0 : getLimit().hashCode()); hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); return hashCode; } @Override public DescribeResourcePermissionsRequest clone() { return (DescribeResourcePermissionsRequest) super.clone(); } }
package mj.ocraptor.extraction.image_processing; import java.awt.image.BufferedImage; import java.io.File; import mj.ocraptor.MainController; import mj.ocraptor.MainController.Status; import mj.ocraptor.configuration.Config; import mj.ocraptor.configuration.properties.ConfigBool; import mj.ocraptor.configuration.properties.ConfigInteger; import mj.ocraptor.console.Platform; import mj.ocraptor.console.Platform.Os; import mj.ocraptor.events.EventManager; import mj.ocraptor.file_handler.executer.CommandExecutor; import mj.ocraptor.file_handler.executer.handler_impl.SimpleOutput; import mj.ocraptor.tools.St; import net.sourceforge.tess4j.Tesseract1; import net.sourceforge.tess4j.TesseractException; import org.apache.commons.io.FileUtils; public class ImageTextExtractorTess4j extends ImageTextExtractor { private final org.slf4j.Logger LOG = org.slf4j.LoggerFactory.getLogger(getClass()); private static final int WIDTH_THRESHOLD_TO_RESIZE = 1500; private static final int HEIGHT_THRESHOLD_TO_RESIZE = 1500; private static final int ONE_SECOND_IN_MS = 1000; private static final int CHECK_TIMEOUT_INTERVAL = 10; private String javaPath = ""; /** * */ public ImageTextExtractorTess4j() { super(); init(); } /** * * */ private void init() { // try { // ImageTextExtractorTess4j.copyNativeFiles(); // } catch (Exception e) { // String message = "Could not copy tesseracts native files. Shutting down..."; // LOG.error(message, e); // JOptionPane.showMessageDialog(null, message + "\n\n" // + StringTools.trimToLengthIndicatorRight(ExceptionUtils.getStackTrace(e), 500)); // // TODO: logging // System.exit(1); // } if (this.cfg.useBuildInJRE()) { final String basePath = Config.getBinsFolder() + File.separator + "portable-java" + File.separator; Os os = Platform.getSystem(); if (os == Os.LINUX) { javaPath = basePath + "lin-x86-64/bin/"; } else if (os == Os.OSX) { javaPath = basePath + "osx-x86-64/bin/"; } else if (os == Os.WINDOWS) { javaPath = basePath + "win-x86-64\\bin\\"; } } } private static final int PSM_MODE = 1; private static final String TESSERACT_FOLDER = "res/tess"; /** * * * @param file * @param originalPath * @param language * @return */ private String ocr(File file, String originalPath, String language) { // TODO: Tesseract1 instance = new Tesseract1(); instance.setPageSegMode(PSM_MODE); instance.setDatapath(TESSERACT_FOLDER); instance.setLanguage(language); try { String result = instance.doOCR(file); return result; } catch (TesseractException e) { e.printStackTrace(); } return null; } private static final String TESS4J_WRAPPER_MAIN_CLASS = "CommandLineInterpreter"; /** * * * @param file * @param originalPath * @param language * @return * */ private String ocrExternal(File file, String originalPath, String language) { String command = javaPath + "java -Dfile.encoding=UTF-8 -Xmx256m -cp \"" + Config.getTess4jWrapperBinPath() + (Platform.getSystem() == Os.WINDOWS ? ";" : ":") + Config.getLibraryFolderPath() + "/*\" " + TESS4J_WRAPPER_MAIN_CLASS + " \"" + file.getAbsolutePath() + "\" \"" + language + "\""; // LOG.info(command); String errOutput = null, stdOutput = null; SimpleOutput eventHandler = new SimpleOutput(); CommandExecutor bashExecuter = new CommandExecutor(Platform.getSystem(), eventHandler); bashExecuter.setCommand(command); Thread executorThread = new Thread(bashExecuter); MainController controller = MainController.inst(); if (command != null && !command.trim().isEmpty()) { executorThread.start(); final long startTime = System.currentTimeMillis(); boolean pause = false; while (!Thread.currentThread().isInterrupted() && executorThread.isAlive()) { long duration = System.currentTimeMillis() - startTime; if (!pause) { pause = (controller.getStatus() == Status.PAUSED); } // monitoring, if timeout occures --> kill process try { Thread.sleep(CHECK_TIMEOUT_INTERVAL); final int timeout = this.cfg.getProp(ConfigInteger.PROCESSING_TIMEOUT_IN_SECONDS) * ONE_SECOND_IN_MS; if (duration > timeout || (controller.getStatus() == Status.STOPPED) || pause) { bashExecuter.killProcess(); if (duration > timeout) { EventManager.instance().failedToProcessFile( "Timeout processing image with OCR-Engine.", originalPath); } break; } } catch (InterruptedException e) { e.printStackTrace(); } } // if application is paused, kill all current processes // and restart them on resume if (pause) { while (!Thread.currentThread().isInterrupted() && (controller.getStatus() != Status.STOPPED)) { try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } if (controller.getStatus() != Status.PAUSED) { return this.ocr(file, originalPath, language); } } } } errOutput = eventHandler.getErrOut(); stdOutput = eventHandler.getStdOut(); if (bashExecuter.isCommandStillRunning()) { executorThread.interrupt(); } // @formatter:off String[] acceptedMessages = new String[] { "using default language params", "weak margin", "test blob" }; // @formatter:on if (errOutput != null && !errOutput.trim().isEmpty()) { for (String acceptedMessage : acceptedMessages) { if (errOutput.toLowerCase().contains(acceptedMessage)) { return stdOutput; } } LOG.error(errOutput); } // System.out.println(stdOutput); // System.out.println(errOutput); return stdOutput; } /** * * * @param image * @param originalPath * @param language * @return */ private String ocr(BufferedImage image, String originalPath, String language) { // TODO: return null; } /** * * * @return */ private String extractText(BufferedImage image, File file, String originalPath, String... languages) { // ------------------------------------------------ // // -- // ------------------------------------------------ // String parsedText = null; try { String languageStrings = ""; for (String lang : languages) { languageStrings += lang + "+"; } languageStrings = St.removeLastCharacters(languageStrings, 1); if (image != null) { parsedText = ocr(image, originalPath, languageStrings); } if (file != null) { parsedText = ocr(file, originalPath, languageStrings); } } catch (Exception e) { e.printStackTrace(); } return parsedText; } /** * * * @return */ @Override public String extractText(BufferedImage image, String originalPath, String... languages) { if (image != null && cfg.getProp(ConfigBool.ENABLE_IMAGE_OCR)) { BufferedImage processedImage = null; if (this.lastGivenBufferedImage != null && this.lastProcessedBufferedImage != null && this.lastGivenBufferedImage == image) { processedImage = this.lastProcessedBufferedImage; } else { processedImage = preProcessImage(image); this.lastGivenBufferedImage = image; this.lastProcessedBufferedImage = processedImage; } File tempImageFile = this.preprocessor.bufferedImageToFile(processedImage); if (tempImageFile != null) { return extractText(null, tempImageFile, originalPath, languages); } } return null; } /** * * * @return */ @Override public String extractText(File file, String originalPath, String... languages) { if (file != null && cfg.getProp(ConfigBool.ENABLE_IMAGE_OCR)) { BufferedImage processedImage = null; if (this.lastGivenImageFile != null && this.lastProcessedBufferedImage != null && this.lastGivenImageFile == file) { processedImage = this.lastProcessedBufferedImage; } else { BufferedImage image = getPreprocessor().fileToBufferedImage(file); if (image != null) { processedImage = preProcessImage(image); this.lastGivenImageFile = file; this.lastProcessedBufferedImage = processedImage; } } File tempImageFile = this.preprocessor.bufferedImageToFile(processedImage); return extractText(null, tempImageFile, originalPath, languages); } return null; } /** * * * @param image * @return */ private BufferedImage preProcessImage(BufferedImage image) { BufferedImage processedImage = null; if (image.getWidth() < WIDTH_THRESHOLD_TO_RESIZE || image.getHeight() < HEIGHT_THRESHOLD_TO_RESIZE) { processedImage = getPreprocessor().preprocessForOCR(image, true); } else { processedImage = getPreprocessor().preprocessForOCR(image, false); } return processedImage; } // ------------------------------------------------ // // -- // ------------------------------------------------ // /** * * * @return */ public static void copyNativeFiles() throws Exception { if (!Config.devMode()) { // try { final String nativeLibPath = Config.getTess4jNativeLibrariesFolderPath(); if (nativeLibPath != null) { final File path = new File(nativeLibPath); if (path.exists() && path.isDirectory()) { final File[] libs = path.listFiles(); File basePath = Config.getJarFileFolder(); basePath = new File(""); if (basePath != null) { for (File lib : libs) { final File newLibFile = new File(basePath.getParent(), lib.getName()); if (!newLibFile.exists()) { FileUtils.copyFile(lib, newLibFile); newLibFile.deleteOnExit(); Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { if (newLibFile.exists() && newLibFile.canWrite()) { newLibFile.delete(); } } }); } } } } } // } catch (Exception e) { // String message = // "Could not copy tesseracts native files. Shutting down..."; // LOG.error(message, e); // JOptionPane.showMessageDialog(null, message + "\n\n" // + // StringTools.trimToLengthIndicatorRight(ExceptionUtils.getStackTrace(e), // 500)); // // TODO: logging // System.exit(1); // } } } }
package io.indexr.io; import com.google.common.base.Preconditions; import org.apache.hadoop.fs.FileStatus; import org.apache.spark.unsafe.Platform; import java.io.Closeable; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import io.indexr.util.IOUtil; public interface ByteBufferReader extends Closeable { /** * Read exactly <i>size</i>s bytes from this ByteBufferReader from <i>position</i> into <i>dst</i>. * * Whether this function may or may not change the position of datasource is implementation specific. */ void read(long position, ByteBuffer dst) throws IOException; default void read(long position, byte[] buffer, int offset, int length) throws IOException { ByteBuffer byteBuffer = ByteBuffer.wrap(buffer); byteBuffer.position(offset); byteBuffer.limit(offset + length); read(position, byteBuffer); } // Those method sshould only used in test/assert senario. default int readInt(long position) throws IOException { byte[] valBuffer = new byte[4]; read(position, valBuffer, 0, 4); return Platform.getInt(valBuffer, Platform.BYTE_ARRAY_OFFSET); } default long readLong(long position) throws IOException { byte[] valBuffer = new byte[8]; read(position, valBuffer, 0, 8); return Platform.getLong(valBuffer, Platform.BYTE_ARRAY_OFFSET); } default boolean exists(long position) throws IOException { return true; } @Override default void close() throws IOException {} default void setName(String name) {} @FunctionalInterface public static interface Opener { ByteBufferReader open(long readBase) throws IOException; /** * Only open when read methods are called. */ default ByteBufferReader openOnRead(long readBase) throws IOException { return new OpenOnReadBBR(this, readBase); } /** * Wrap a ByteBufferReader into an {@link Opener}. * Note that the close will <b>not</b> passed to the opener, */ static Opener create(ByteBufferReader reader) { // Usually the user can open many times, // and most of the time we don't want the reader realy be closed, // so lets set it null. return b -> ByteBufferReader.of(reader, b, null); } //static Opener create(FSDataInputStream input, long size) { // return b -> ByteBufferReader.of(input, size, b, null); //} static Opener create(FileChannel file) { return b -> ByteBufferReader.of(file, b, null); } static Opener create(ByteBuffer buffer) { return b -> ByteBufferReader.of(buffer, (int) b, null); } static Opener create(Path path) { return b -> { FileChannel fileChannel = FileChannel.open(path, StandardOpenOption.READ); ByteBufferReader bbr = ByteBufferReader.of(fileChannel, b, fileChannel::close); bbr.setName(path.toString()); return bbr; }; } static Opener create(org.apache.hadoop.fs.FileSystem fileSystem, org.apache.hadoop.fs.Path path) throws IOException { FileStatus status = fileSystem.getFileStatus(path); Preconditions.checkState(status != null, "File on %s not exists", path.toString()); Preconditions.checkState(status.isFile(), "%s should be a file", path.toString()); long size = status.getLen(); int blockCount = fileSystem.getFileBlockLocations(status, 0, size).length; return b -> { return DFSByteBufferReader.open(fileSystem, path, size, blockCount, b); }; } static Opener create(org.apache.hadoop.fs.FileSystem fileSystem, org.apache.hadoop.fs.Path path, long size, int blockCount) throws IOException { return b -> { return DFSByteBufferReader.open(fileSystem, path, size, blockCount, b); }; } } public static ByteBufferReader of(ByteBufferReader reader, long readBase, Closeable close) throws IOException { return new ByteBufferReader() { @Override public void read(long position, ByteBuffer dst) throws IOException { reader.read(readBase + position, dst); } @Override public void read(long position, byte[] buffer, int offset, int length) throws IOException { reader.read(readBase + position, buffer, offset, length); } @Override public boolean exists(long position) throws IOException { return reader.exists(readBase + position); } @Override public void close() throws IOException { if (close != null) { close.close(); } } }; } public static ByteBufferReader of(FileChannel file, long readBase, Closeable close) throws IOException { return new ByteBufferReader() { String name; @Override public void read(long position, ByteBuffer dst) throws IOException { try { IOUtil.readFully(file, readBase + position, dst); } catch (Exception e) { throw new IOException(String.format("name: %s", name), e); } } @Override public boolean exists(long position) throws IOException { return position >= 0 && readBase + position < file.size(); } @Override public void close() throws IOException { if (close != null) { close.close(); } } @Override public void setName(String name) { this.name = name; } }; } public static ByteBufferReader of(ByteBuffer byteBuffer, int readBase, Closeable close) throws IOException { return new ByteBufferReader() { @Override public void read(long position, ByteBuffer dst) throws IOException { IOUtil.readFully(byteBuffer, (int) (readBase + position), dst, dst.remaining()); } @Override public void read(long position, byte[] buffer, int offset, int length) { byteBuffer.position((int) position); byteBuffer.get(buffer, offset, length); } @Override public boolean exists(long position) throws IOException { return position >= 0 && readBase + position < byteBuffer.limit(); } @Override public void close() throws IOException { if (close != null) { close.close(); } } }; } }
/* * Copyright 2003-20175 Bas Leijdekkers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.siyeh.ig.fixes; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.searches.SuperMethodsSearch; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiFormatUtil; import com.intellij.psi.util.PsiUtil; import com.siyeh.ig.psiutils.ClassUtils; import com.siyeh.ig.psiutils.TypeUtils; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.IOException; import java.lang.reflect.Modifier; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.*; public class SerialVersionUIDBuilder extends JavaRecursiveElementVisitor { @NonNls private static final String ACCESS_METHOD_NAME_PREFIX = "access$"; private final PsiClass clazz; private int index = -1; private final Set<MemberSignature> nonPrivateConstructors; private final Set<MemberSignature> nonPrivateMethods; private final Set<MemberSignature> nonPrivateFields; private final List<MemberSignature> staticInitializers; private boolean assertStatement = false; private final Map<PsiElement, String> memberMap = new HashMap<>(); private static final Comparator<PsiClass> INTERFACE_COMPARATOR = (object1, object2) -> { if (object1 == null && object2 == null) { return 0; } if (object1 == null) { return 1; } if (object2 == null) { return -1; } final String name1 = object1.getQualifiedName(); final String name2 = object2.getQualifiedName(); if (name1 == null && name2 == null) { return 0; } if (name1 == null) { return 1; } if (name2 == null) { return -1; } return name1.compareTo(name2); }; private SerialVersionUIDBuilder(@NotNull PsiClass clazz) { this.clazz = clazz; nonPrivateMethods = new HashSet<>(); final PsiMethod[] methods = clazz.getMethods(); for (final PsiMethod method : methods) { if (!method.isConstructor() && !method.hasModifierProperty(PsiModifier.PRIVATE)) { final MemberSignature methodSignature = new MemberSignature(method); nonPrivateMethods.add(methodSignature); SuperMethodsSearch.search(method, null, true, false).forEach(method1 -> { final MemberSignature superSignature = new MemberSignature(methodSignature.getName(), methodSignature.getModifiers(), MemberSignature.createMethodSignature(method1.getMethod())); nonPrivateMethods.add(superSignature); return true; }); } } nonPrivateFields = new HashSet<>(); final PsiField[] fields = clazz.getFields(); for (final PsiField field : fields) { if (!field.hasModifierProperty(PsiModifier.PRIVATE) || !(field.hasModifierProperty(PsiModifier.STATIC) || field.hasModifierProperty(PsiModifier.TRANSIENT))) { final MemberSignature fieldSignature = new MemberSignature(field); nonPrivateFields.add(fieldSignature); } } staticInitializers = new ArrayList<>(1); final PsiClassInitializer[] initializers = clazz.getInitializers(); if (initializers.length > 0) { for (final PsiClassInitializer initializer : initializers) { final PsiModifierList modifierList = initializer.getModifierList(); if (modifierList != null && modifierList.hasModifierProperty(PsiModifier.STATIC)) { final MemberSignature initializerSignature = MemberSignature.getStaticInitializerMemberSignature(); staticInitializers.add(initializerSignature); break; } } } if (staticInitializers.isEmpty()) { final PsiField[] psiFields = clazz.getFields(); for (final PsiField field : psiFields) { if (hasStaticInitializer(field)) { final MemberSignature initializerSignature = MemberSignature.getStaticInitializerMemberSignature(); staticInitializers.add(initializerSignature); break; } } } final PsiMethod[] constructors = clazz.getConstructors(); nonPrivateConstructors = new HashSet<>(constructors.length); if (constructors.length == 0 && !clazz.isInterface()) { // generated empty constructor if no constructor is defined in the source final MemberSignature constructorSignature; if (clazz.hasModifierProperty(PsiModifier.PUBLIC)) { constructorSignature = MemberSignature.getPublicConstructor(); } else { constructorSignature = MemberSignature.getPackagePrivateConstructor(); } nonPrivateConstructors.add(constructorSignature); } for (final PsiMethod constructor : constructors) { if (!constructor.hasModifierProperty(PsiModifier.PRIVATE)) { final MemberSignature constructorSignature = new MemberSignature(constructor); nonPrivateConstructors.add(constructorSignature); } } } /** * @see java.io.ObjectStreamClass#computeDefaultSUID(Class) */ public static long computeDefaultSUID(@NotNull PsiClass psiClass) { final Project project = psiClass.getProject(); final GlobalSearchScope scope = GlobalSearchScope.allScope(project); final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project); final PsiClass serializable = psiFacade.findClass(CommonClassNames.JAVA_IO_SERIALIZABLE, scope); if (serializable == null) { // no jdk defined for project. return -1L; } final boolean isSerializable = psiClass.isInheritor(serializable, true); if (!isSerializable) { return 0L; } final SerialVersionUIDBuilder serialVersionUIDBuilder = new SerialVersionUIDBuilder(psiClass); try { final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); final DataOutputStream dataOutputStream = new DataOutputStream(byteArrayOutputStream); final String className = PsiFormatUtil.getExternalName(psiClass); dataOutputStream.writeUTF(className); final PsiModifierList classModifierList = psiClass.getModifierList(); int classModifiers = classModifierList != null ? MemberSignature.calculateModifierBitmap(classModifierList) : 0; final MemberSignature[] methodSignatures = serialVersionUIDBuilder.getNonPrivateMethodSignatures(); if (psiClass.isInterface()) { classModifiers |= Modifier.INTERFACE; if (methodSignatures.length == 0) { // interfaces were not marked abstract when they did't have methods in java 1.0 // For serialization compatibility the abstract modifier is ignored. classModifiers &= ~Modifier.ABSTRACT; } } dataOutputStream.writeInt(classModifiers); final PsiClass[] interfaces = psiClass.getInterfaces(); Arrays.sort(interfaces, INTERFACE_COMPARATOR); for (PsiClass aInterfaces : interfaces) { final String name = aInterfaces.getQualifiedName(); dataOutputStream.writeUTF(name); } final MemberSignature[] fields = serialVersionUIDBuilder.getNonPrivateFields(); writeSignatures(fields, dataOutputStream); final MemberSignature[] staticInitializers = serialVersionUIDBuilder.getStaticInitializers(); writeSignatures(staticInitializers, dataOutputStream); final MemberSignature[] constructors = serialVersionUIDBuilder.getNonPrivateConstructors(); writeSignatures(constructors, dataOutputStream); writeSignatures(methodSignatures, dataOutputStream); dataOutputStream.flush(); @NonNls final String algorithm = "SHA"; final MessageDigest digest = MessageDigest.getInstance(algorithm); final byte[] digestBytes = digest.digest(byteArrayOutputStream.toByteArray()); long serialVersionUID = 0L; for (int i = Math.min(digestBytes.length, 8) - 1; i >= 0; i--) { serialVersionUID = serialVersionUID << 8 | digestBytes[i] & 0xFF; } return serialVersionUID; } catch (IOException exception) { throw new InternalError(exception.getMessage(), exception); } catch (NoSuchAlgorithmException exception) { throw new SecurityException(exception.getMessage(), exception); } } private static void writeSignatures(MemberSignature[] signatures, DataOutputStream dataOutputStream) throws IOException { Arrays.sort(signatures); for (final MemberSignature field : signatures) { dataOutputStream.writeUTF(field.getName()); dataOutputStream.writeInt(field.getModifiers()); dataOutputStream.writeUTF(field.getSignature()); } } private String getAccessMethodIndex(PsiElement element) { String cache = memberMap.get(element); if (cache == null) { cache = String.valueOf(index); index++; memberMap.put(element, cache); } return cache; } @NotNull public MemberSignature[] getNonPrivateConstructors() { init(); return nonPrivateConstructors.toArray(new MemberSignature[nonPrivateConstructors.size()]); } @NotNull public MemberSignature[] getNonPrivateFields() { init(); return nonPrivateFields.toArray(new MemberSignature[nonPrivateFields.size()]); } @NotNull public MemberSignature[] getNonPrivateMethodSignatures() { init(); return nonPrivateMethods.toArray(new MemberSignature[nonPrivateMethods.size()]); } @NotNull public MemberSignature[] getStaticInitializers() { init(); return staticInitializers.toArray(new MemberSignature[staticInitializers.size()]); } private static boolean hasStaticInitializer(PsiField field) { if (field.hasModifierProperty(PsiModifier.STATIC)) { final PsiExpression initializer = field.getInitializer(); if (initializer == null) { return false; } final PsiType fieldType = field.getType(); final PsiType stringType = TypeUtils.getStringType(field); if (field.hasModifierProperty(PsiModifier.FINAL) && (fieldType instanceof PsiPrimitiveType || fieldType.equals(stringType))) { return !PsiUtil.isConstantExpression(initializer); } else { return true; } } return false; } private void init() { if (index < 0) { index = 0; clazz.acceptChildren(this); } } @Override public void visitAssertStatement(PsiAssertStatement statement) { super.visitAssertStatement(statement); if (assertStatement) { return; } final MemberSignature memberSignature = MemberSignature.getAssertionsDisabledFieldMemberSignature(); nonPrivateFields.add(memberSignature); if (staticInitializers.isEmpty()) { final MemberSignature initializerSignature = MemberSignature.getStaticInitializerMemberSignature(); staticInitializers.add(initializerSignature); } assertStatement = true; } @Override public void visitMethodCallExpression( @NotNull PsiMethodCallExpression methodCallExpression) { // for navigating the psi tree in the order javac navigates its AST final PsiExpressionList argumentList = methodCallExpression.getArgumentList(); final PsiExpression[] expressions = argumentList.getExpressions(); for (final PsiExpression expression : expressions) { expression.accept(this); } final PsiReferenceExpression methodExpression = methodCallExpression.getMethodExpression(); methodExpression.accept(this); } @Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { super.visitReferenceElement(reference); final PsiElement parentClass = ClassUtils.getContainingClass(reference); if (reference.getParent() instanceof PsiTypeElement) { return; } final PsiElement element = reference.resolve(); if (!(element instanceof PsiClass)) { return; } final PsiClass elementParentClass = ClassUtils.getContainingClass(element); if (elementParentClass == null || !elementParentClass.equals(clazz) || element.equals(parentClass)) { return; } final PsiClass innerClass = (PsiClass)element; if (!innerClass.hasModifierProperty(PsiModifier.PRIVATE)) { return; } final PsiMethod[] constructors = innerClass.getConstructors(); if (constructors.length == 0) { getAccessMethodIndex(innerClass); } } @Override public void visitReferenceExpression( @NotNull PsiReferenceExpression expression) { super.visitReferenceExpression(expression); final PsiElement element = expression.resolve(); final PsiElement elementParentClass = ClassUtils.getContainingClass(element); final PsiElement expressionParentClass = ClassUtils.getContainingClass(expression); if (expressionParentClass == null || expressionParentClass .equals(elementParentClass)) { return; } PsiElement parentOfParentClass = ClassUtils.getContainingClass(expressionParentClass); while (parentOfParentClass != null && !parentOfParentClass.equals(clazz)) { if (!(expressionParentClass instanceof PsiAnonymousClass)) { getAccessMethodIndex(expressionParentClass); } getAccessMethodIndex(parentOfParentClass); parentOfParentClass = ClassUtils.getContainingClass(parentOfParentClass); } if (element instanceof PsiField) { final PsiField field = (PsiField)element; if (field.hasModifierProperty(PsiModifier.PRIVATE)) { boolean isStatic = false; final PsiType type = field.getType(); if (field.hasModifierProperty(PsiModifier.STATIC)) { if (field.hasModifierProperty(PsiModifier.FINAL) && type instanceof PsiPrimitiveType) { final PsiExpression initializer = field.getInitializer(); if (PsiUtil.isConstantExpression(initializer)) { return; } } isStatic = true; } final String returnTypeSignature = MemberSignature.createTypeSignature(type).replace('/', '.'); final String className = clazz.getQualifiedName(); @NonNls final StringBuilder signatureBuffer = new StringBuilder("("); if (!isStatic) { signatureBuffer.append('L').append(className).append(';'); } final String accessMethodIndex = getAccessMethodIndex(field); if (!clazz.equals(field.getContainingClass())) { return; } @NonNls String name = null; final PsiElement parent = expression.getParent(); if (parent instanceof PsiAssignmentExpression) { final PsiAssignmentExpression assignment = (PsiAssignmentExpression)parent; if (assignment.getLExpression().equals(expression)) { name = ACCESS_METHOD_NAME_PREFIX + accessMethodIndex + "02"; signatureBuffer.append(returnTypeSignature); } } else if (parent instanceof PsiPostfixExpression) { final PsiPostfixExpression postfixExpression = (PsiPostfixExpression)parent; final IElementType tokenType = postfixExpression.getOperationTokenType(); if (tokenType.equals(JavaTokenType.PLUSPLUS)) { name = ACCESS_METHOD_NAME_PREFIX + accessMethodIndex + "08"; } else if (tokenType.equals(JavaTokenType.MINUSMINUS)) { name = ACCESS_METHOD_NAME_PREFIX + accessMethodIndex + "10"; } } else if (parent instanceof PsiPrefixExpression) { final PsiPrefixExpression prefixExpression = (PsiPrefixExpression)parent; final IElementType tokenType = prefixExpression.getOperationTokenType(); if (tokenType.equals(JavaTokenType.PLUSPLUS)) { name = ACCESS_METHOD_NAME_PREFIX + accessMethodIndex + "04"; } else if (tokenType.equals(JavaTokenType.MINUSMINUS)) { name = ACCESS_METHOD_NAME_PREFIX + accessMethodIndex + "06"; } } if (name == null) { name = ACCESS_METHOD_NAME_PREFIX + accessMethodIndex + "00"; } signatureBuffer.append(')').append(returnTypeSignature); final String signature = signatureBuffer.toString(); final MemberSignature methodSignature = new MemberSignature(name, Modifier.STATIC, signature); nonPrivateMethods.add(methodSignature); } } else if (element instanceof PsiMethod) { final PsiMethod method = (PsiMethod)element; if (method.hasModifierProperty(PsiModifier.PRIVATE) && clazz.equals(method.getContainingClass())) { final String signature; if (method.hasModifierProperty(PsiModifier.STATIC)) { signature = MemberSignature.createMethodSignature(method) .replace('/', '.'); } else { final String returnTypeSignature = MemberSignature.createTypeSignature(method.getReturnType()) .replace('/', '.'); @NonNls final StringBuilder signatureBuffer = new StringBuilder(); signatureBuffer.append("(L"); signatureBuffer.append(clazz.getQualifiedName()) .append(';'); final PsiParameter[] parameters = method.getParameterList() .getParameters(); for (final PsiParameter parameter : parameters) { final PsiType type = parameter.getType(); final String typeSignature = MemberSignature.createTypeSignature(type) .replace('/', '.'); signatureBuffer.append(typeSignature); } signatureBuffer.append(')'); signatureBuffer.append(returnTypeSignature); signature = signatureBuffer.toString(); } final String accessMethodIndex = getAccessMethodIndex(method); final MemberSignature methodSignature = new MemberSignature(ACCESS_METHOD_NAME_PREFIX + accessMethodIndex + "00", Modifier.STATIC, signature); nonPrivateMethods.add(methodSignature); } } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/genomics/v1/variants.proto package com.google.genomics.v1; /** * Protobuf type {@code google.genomics.v1.GetVariantRequest} */ public final class GetVariantRequest extends com.google.protobuf.GeneratedMessage implements // @@protoc_insertion_point(message_implements:google.genomics.v1.GetVariantRequest) GetVariantRequestOrBuilder { // Use GetVariantRequest.newBuilder() to construct. private GetVariantRequest(com.google.protobuf.GeneratedMessage.Builder builder) { super(builder); } private GetVariantRequest() { variantId_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private GetVariantRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { com.google.protobuf.ByteString bs = input.readBytes(); variantId_ = bs; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.genomics.v1.VariantsProto.internal_static_google_genomics_v1_GetVariantRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.genomics.v1.VariantsProto.internal_static_google_genomics_v1_GetVariantRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.genomics.v1.GetVariantRequest.class, com.google.genomics.v1.GetVariantRequest.Builder.class); } public static final com.google.protobuf.Parser<GetVariantRequest> PARSER = new com.google.protobuf.AbstractParser<GetVariantRequest>() { public GetVariantRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new GetVariantRequest(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<GetVariantRequest> getParserForType() { return PARSER; } public static final int VARIANT_ID_FIELD_NUMBER = 1; private java.lang.Object variantId_; /** * <code>optional string variant_id = 1;</code> * * <pre> * The ID of the variant. * </pre> */ public java.lang.String getVariantId() { java.lang.Object ref = variantId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { variantId_ = s; } return s; } } /** * <code>optional string variant_id = 1;</code> * * <pre> * The ID of the variant. * </pre> */ public com.google.protobuf.ByteString getVariantIdBytes() { java.lang.Object ref = variantId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); variantId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (!getVariantIdBytes().isEmpty()) { output.writeBytes(1, getVariantIdBytes()); } } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (!getVariantIdBytes().isEmpty()) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getVariantIdBytes()); } memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; public static com.google.genomics.v1.GetVariantRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.genomics.v1.GetVariantRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.genomics.v1.GetVariantRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.genomics.v1.GetVariantRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.genomics.v1.GetVariantRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static com.google.genomics.v1.GetVariantRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static com.google.genomics.v1.GetVariantRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static com.google.genomics.v1.GetVariantRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static com.google.genomics.v1.GetVariantRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static com.google.genomics.v1.GetVariantRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return new Builder(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(com.google.genomics.v1.GetVariantRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code google.genomics.v1.GetVariantRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.genomics.v1.GetVariantRequest) com.google.genomics.v1.GetVariantRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.genomics.v1.VariantsProto.internal_static_google_genomics_v1_GetVariantRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.genomics.v1.VariantsProto.internal_static_google_genomics_v1_GetVariantRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.genomics.v1.GetVariantRequest.class, com.google.genomics.v1.GetVariantRequest.Builder.class); } // Construct using com.google.genomics.v1.GetVariantRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); variantId_ = ""; return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.genomics.v1.VariantsProto.internal_static_google_genomics_v1_GetVariantRequest_descriptor; } public com.google.genomics.v1.GetVariantRequest getDefaultInstanceForType() { return com.google.genomics.v1.GetVariantRequest.getDefaultInstance(); } public com.google.genomics.v1.GetVariantRequest build() { com.google.genomics.v1.GetVariantRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.genomics.v1.GetVariantRequest buildPartial() { com.google.genomics.v1.GetVariantRequest result = new com.google.genomics.v1.GetVariantRequest(this); result.variantId_ = variantId_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.genomics.v1.GetVariantRequest) { return mergeFrom((com.google.genomics.v1.GetVariantRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.genomics.v1.GetVariantRequest other) { if (other == com.google.genomics.v1.GetVariantRequest.getDefaultInstance()) return this; if (!other.getVariantId().isEmpty()) { variantId_ = other.variantId_; onChanged(); } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.genomics.v1.GetVariantRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.genomics.v1.GetVariantRequest) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object variantId_ = ""; /** * <code>optional string variant_id = 1;</code> * * <pre> * The ID of the variant. * </pre> */ public java.lang.String getVariantId() { java.lang.Object ref = variantId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { variantId_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <code>optional string variant_id = 1;</code> * * <pre> * The ID of the variant. * </pre> */ public com.google.protobuf.ByteString getVariantIdBytes() { java.lang.Object ref = variantId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); variantId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>optional string variant_id = 1;</code> * * <pre> * The ID of the variant. * </pre> */ public Builder setVariantId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } variantId_ = value; onChanged(); return this; } /** * <code>optional string variant_id = 1;</code> * * <pre> * The ID of the variant. * </pre> */ public Builder clearVariantId() { variantId_ = getDefaultInstance().getVariantId(); onChanged(); return this; } /** * <code>optional string variant_id = 1;</code> * * <pre> * The ID of the variant. * </pre> */ public Builder setVariantIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } variantId_ = value; onChanged(); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.genomics.v1.GetVariantRequest) } // @@protoc_insertion_point(class_scope:google.genomics.v1.GetVariantRequest) private static final com.google.genomics.v1.GetVariantRequest defaultInstance;static { defaultInstance = new com.google.genomics.v1.GetVariantRequest(); } public static com.google.genomics.v1.GetVariantRequest getDefaultInstance() { return defaultInstance; } public com.google.genomics.v1.GetVariantRequest getDefaultInstanceForType() { return defaultInstance; } }
/** * Copyright 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.sha1coin.core; import java.io.UnsupportedEncodingException; import java.math.BigInteger; import java.util.Arrays; /** * <p>Base58 is a way to encode Bitcoin addresses as numbers and letters. Note that this is not the same base58 as used by * Flickr, which you may see reference to around the internet.</p> * * <p>You may instead wish to work with {@link VersionedChecksummedBytes}, which adds support for testing the prefix * and suffix bytes commonly found in addresses.</p> * * <p>Satoshi says: why base-58 instead of standard base-64 encoding?<p> * * <ul> * <li>Don't want 0OIl characters that look the same in some fonts and * could be used to create visually identical looking account numbers.</li> * <li>A string with non-alphanumeric characters is not as easily accepted as an account number.</li> * <li>E-mail usually won't line-break if there's no punctuation to break at.</li> * <li>Doubleclicking selects the whole number as one word if it's all alphanumeric.</li> * </ul> */ public class Base58 { public static final char[] ALPHABET = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz".toCharArray(); private static final int[] INDEXES = new int[128]; static { for (int i = 0; i < INDEXES.length; i++) { INDEXES[i] = -1; } for (int i = 0; i < ALPHABET.length; i++) { INDEXES[ALPHABET[i]] = i; } } /** Encodes the given bytes in base58. No checksum is appended. */ public static String encode(byte[] input) { if (input.length == 0) { return ""; } input = copyOfRange(input, 0, input.length); // Count leading zeroes. int zeroCount = 0; while (zeroCount < input.length && input[zeroCount] == 0) { ++zeroCount; } // The actual encoding. byte[] temp = new byte[input.length * 2]; int j = temp.length; int startAt = zeroCount; while (startAt < input.length) { byte mod = divmod58(input, startAt); if (input[startAt] == 0) { ++startAt; } temp[--j] = (byte) ALPHABET[mod]; } // Strip extra '1' if there are some after decoding. while (j < temp.length && temp[j] == ALPHABET[0]) { ++j; } // Add as many leading '1' as there were leading zeros. while (--zeroCount >= 0) { temp[--j] = (byte) ALPHABET[0]; } byte[] output = copyOfRange(temp, j, temp.length); try { return new String(output, "US-ASCII"); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); // Cannot happen. } } public static byte[] decode(String input) throws AddressFormatException { if (input.length() == 0) { return new byte[0]; } byte[] input58 = new byte[input.length()]; // Transform the String to a base58 byte sequence for (int i = 0; i < input.length(); ++i) { char c = input.charAt(i); int digit58 = -1; if (c >= 0 && c < 128) { digit58 = INDEXES[c]; } if (digit58 < 0) { throw new AddressFormatException("Illegal character " + c + " at " + i); } input58[i] = (byte) digit58; } // Count leading zeroes int zeroCount = 0; while (zeroCount < input58.length && input58[zeroCount] == 0) { ++zeroCount; } // The encoding byte[] temp = new byte[input.length()]; int j = temp.length; int startAt = zeroCount; while (startAt < input58.length) { byte mod = divmod256(input58, startAt); if (input58[startAt] == 0) { ++startAt; } temp[--j] = mod; } // Do no add extra leading zeroes, move j to first non null byte. while (j < temp.length && temp[j] == 0) { ++j; } return copyOfRange(temp, j - zeroCount, temp.length); } public static BigInteger decodeToBigInteger(String input) throws AddressFormatException { return new BigInteger(1, decode(input)); } /** * Uses the checksum in the last 4 bytes of the decoded data to verify the rest are correct. The checksum is * removed from the returned data. * * @throws AddressFormatException if the input is not base 58 or the checksum does not validate. */ public static byte[] decodeChecked(String input) throws AddressFormatException { byte tmp [] = decode(input); if (tmp.length < 4) throw new AddressFormatException("Input too short"); byte[] bytes = copyOfRange(tmp, 0, tmp.length - 4); byte[] checksum = copyOfRange(tmp, tmp.length - 4, tmp.length); tmp = Utils.doubleDigest(bytes); byte[] hash = copyOfRange(tmp, 0, 4); if (!Arrays.equals(checksum, hash)) throw new AddressFormatException("Checksum does not validate"); return bytes; } // // number -> number / 58, returns number % 58 // private static byte divmod58(byte[] number, int startAt) { int remainder = 0; for (int i = startAt; i < number.length; i++) { int digit256 = (int) number[i] & 0xFF; int temp = remainder * 256 + digit256; number[i] = (byte) (temp / 58); remainder = temp % 58; } return (byte) remainder; } // // number -> number / 256, returns number % 256 // private static byte divmod256(byte[] number58, int startAt) { int remainder = 0; for (int i = startAt; i < number58.length; i++) { int digit58 = (int) number58[i] & 0xFF; int temp = remainder * 58 + digit58; number58[i] = (byte) (temp / 256); remainder = temp % 256; } return (byte) remainder; } private static byte[] copyOfRange(byte[] source, int from, int to) { byte[] range = new byte[to - from]; System.arraycopy(source, from, range, 0, range.length); return range; } }
/* See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * Esri Inc. licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.esri.gpt.server.csw.client; import java.io.IOException; import java.io.InputStream; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParserFactory; import org.apache.commons.httpclient.HttpClient; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * Maintains information of CSW Catalog * * The catalogs contain all the information like url, profile information * credentials and capabilities. */ public class CswCatalog implements Comparable { /** The base url. */ private String baseUrl; /** The capabilities. */ private CswCatalogCapabilities capabilities = null; /** The credentials. */ private UsernamePasswordCredentials credentials; /** The id. */ private String id; /** The ID counter. */ private static int IDCounter = 1; /** The is connected. */ private boolean isConnected = false; /** The locking. */ private boolean locking = false; /** The name. */ private String name; /** The profile. */ private CswProfile profile; /** The url. */ private String url; /** The request timeout ms. */ private int responseTimeoutMs; /** The connection timeout ms. */ private int connectionTimeoutMs; private HttpClient batchHttpClient; /** * Instantiates a new csw catalog. */ public CswCatalog() { } /** * Creates instance of the catalog. * * @param url the url * @param name the name * @param profile the profile */ public CswCatalog(String url, String name, CswProfile profile) { this.url = url; this.id = this.url; this.name = name; this.profile = profile; } /** * Gets the underlying Apache HttpClient to be used for batch requests to the * same server. * * @return the batch client */ public HttpClient getBatchHttpClient() { return this.batchHttpClient; } /** * Sets the underlying Apache HttpClient to be used for batch requests to the * same server. * * @param batchHttpClient the batch client */ public void setBatchHttpClient(HttpClient batchHttpClient) { this.batchHttpClient = batchHttpClient; } /** * Gets the base url. * * @return the base url */ public String getBaseUrl() { return baseUrl; } /** * Sets the base url. * * @param baseUrl the new base url */ public void setBaseUrl(String baseUrl) { this.baseUrl = baseUrl; } /** * Gets the connection timeout ms. * * @return the connection timeout ms */ public int getConnectionTimeoutMs() { return connectionTimeoutMs; } /** * Sets the connection timeout ms. * * @param connectionTimeoutMs the new connection timeout ms */ public void setConnectionTimeoutMs(int connectionTimeoutMs) { this.connectionTimeoutMs = connectionTimeoutMs; } /** * Gets the request timeout ms. * * @return the request timeout ms */ public int getResponseTimeoutMs() { return responseTimeoutMs; } /** * Sets the request timeout ms. * * @param requestTimeoutMs the new request timeout ms */ public void setResponseTimeoutMs(int requestTimeoutMs) { this.responseTimeoutMs = requestTimeoutMs; } /** * Execute GetCapabilities using SAX objects. Send GetCapabilities request, * receive the response from a service, and parse the response to get URLs for * "GetRecords" and "GetRecordsById". * * @return the csw catalog capabilities * @throws SAXException the sAX exception * @throws IOException Signals that an I/O exception has occurred. * @throws ParserConfigurationException the parser configuration exception * @return Csw Capabilities object */ private CswCatalogCapabilities executeGetCapabilitiesWithSAX() throws SAXException, IOException, ParserConfigurationException { CswCatalogCapabilities capabilities = new CswCatalogCapabilities(); CswClient client = new CswClient(); client.setConnectTimeout(this.getConnectionTimeoutMs()); client.setReadTimeout(this.getResponseTimeoutMs()); client.setBatchHttpClient(getBatchHttpClient()); // Execute submission and parsing into response element InputStream responseStream = client.submitHttpRequest("GET", url, ""); SAXParserFactory factory = SAXParserFactory.newInstance(); factory.setNamespaceAware(true); CapabilitiesParse cParse = new CapabilitiesParse(capabilities); factory.newSAXParser().parse(new InputSource(responseStream), cParse); this.capabilities = capabilities; Utils.close(responseStream); return capabilities; } /** * Gets the capabilities. * * @return the capabilities */ public CswCatalogCapabilities getCapabilities() { return this.capabilities; } /** * Sets the capabilities. * * @param capabilities the new capabilities */ public void setCapabilities(CswCatalogCapabilities capabilities) { this.capabilities = capabilities; } /** * Gets the credentials. * * @return the credentials */ public UsernamePasswordCredentials getCredentials() { return credentials; } /** * Sets the credentials. * * @param credentials the new credentials */ public void setCredentials(UsernamePasswordCredentials credentials) { this.credentials = credentials; } /** * Gets the id. * * @return the id */ public String getId() { return id; } /** * Sets the id. * * @param id the new id */ public void setId(String id) { this.id = id; } /** * Gets the name. * * @return the name */ public String getName() { return name; } /** * Sets the name. * * @param name the new name */ public void setName(String name) { // if there is no input for name, use url as a name if (name == null || name.length() == 0) { name = url; } this.name = name; } /** * Gets the profile. * * @return the profile */ public CswProfile getProfile() { return profile; } /** * Sets the profile. * * @param profile the new profile */ public void setProfile(CswProfile profile) { this.profile = profile; } /** * Gets the url. * * @return the url */ public String getUrl() { return url; } /** * Sets the url. * * @param url the new url */ public void setUrl(String url) { this.url = url; } /** * To connect to a catalog service. The capabilties details are populated based * on the service. * * @return true if connection can be made to the csw service * @throws ParserConfigurationException the parser configuration exception * @throws IOException Signals that an I/O exception has occurred. * @throws SAXException the sAX exception */ public boolean connect() throws SAXException, IOException, ParserConfigurationException { // Execute getCapabilites and setup URLs for "GetRecords" and "GetRecordById" CswCatalogCapabilities capabilities = executeGetCapabilitiesWithSAX(); this.isConnected = capabilities.isReady(); return this.isConnected; } /** * To test if already connected to a catalog service. * * @return true if connection has already been made to the csw service else * false */ public boolean IsConnected() { return this.isConnected; } /** * Locking. * * @return true, if successful */ public boolean Locking() { return false; } public int compareTo(Object arg0) { // TODO: IMPLEMENT THIS LATER return 0; } }
/* See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * Esri Inc. licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.esri.geoportal.lib.elastic.request; import com.esri.geoportal.base.util.JsonUtil; import com.esri.geoportal.base.util.Val; import com.esri.geoportal.base.util.exception.InvalidParameterException; import com.esri.geoportal.context.AppResponse; import com.esri.geoportal.context.GeoportalContext; import com.esri.geoportal.lib.elastic.ElasticContext; import com.esri.geoportal.lib.elastic.response.ItemWriter; import com.esri.geoportal.lib.elastic.response.ItemWriterFactory; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.json.Json; import javax.json.JsonArrayBuilder; import javax.json.JsonObjectBuilder; import javax.json.JsonStructure; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryStringQueryBuilder; /* ES 2to5 */ //import org.elasticsearch.index.query.QueryStringQueryBuilder.Operator; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.WrapperQueryBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.SuggestBuilders; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Search for items. */ public class SearchRequest extends _SearchRequestBase { /** Logger. */ private static final Logger LOGGER = LoggerFactory.getLogger(SearchRequest.class); /** Instance variables . */ protected int from = 0; protected int size = 10; protected String [] urlTypes; protected List<JsonStructure> musts = new ArrayList<JsonStructure>(); protected List<JsonStructure> filters = new ArrayList<JsonStructure>(); /** Constructor */ public SearchRequest() { super(); } /** The starting index number for the search. */ public int getFrom() { return from; } /** The number of search results per page. */ public int getSize() { return size; } public String[] getUrlTypes() { return urlTypes; } public void setUrlTypes(String...urlTypes) { this.urlTypes = urlTypes; } /** Methods =============================================================== */ @Override public AppResponse execute() throws Exception { AppResponse response = new AppResponse(); ElasticContext ec = GeoportalContext.getInstance().getElasticContext(); boolean b = Val.chkBool(getParameter("pretty"),false); if (b) this.setPretty(b); SearchRequestBuilder search = ec.getTransportClient().prepareSearch(ec.getItemIndexName()); parse(search); search.setTypes(ec.getItemIndexType()); JsonObjectBuilder jsoQuery = Json.createObjectBuilder(); JsonObjectBuilder jsoBool = Json.createObjectBuilder(); if (musts.size() == 0) { jsoBool.add("must",Json.createObjectBuilder().add("match_all",JsonUtil.newObject())); } else { JsonArrayBuilder a = Json.createArrayBuilder(); for (JsonStructure j: musts) a.add(j); jsoBool.add("must",a); } if (filters.size() > 0) { JsonArrayBuilder a = Json.createArrayBuilder(); for (JsonStructure j: filters) a.add(j); jsoBool.add("filter",a); } /* ES 2to5 */ //jsoQuery.add("query",Json.createObjectBuilder().add("bool",jsoBool)); jsoQuery.add("bool",jsoBool); String wq = JsonUtil.toJson(jsoQuery.build()); search.setQuery(QueryBuilders.wrapperQuery(wq)); // TODO some logging here //LOGGER.info("SearchRequest: "+wq); LOGGER.trace("SearchRequest: "+wq); SearchResponse searchResponse = search.get(); writeResponse(response,searchResponse); return response; } /** * Create the response item writer. * @return the writer */ protected ItemWriter newWriter() { ItemWriterFactory factory = GeoportalContext.getInstance().getBean( "item.ItemWriterFactory",ItemWriterFactory.class); return factory.newWriter(this.getF()); } /** * Parse the request. * @param search the search */ protected void parse(SearchRequestBuilder search) { this.parseXtnF(search); this.parseElastic(search); this.parseUrlTypes(search); this.parseXtnBBox(search); this.parseXtnTime(search); this.parseXtnFilter(search); this.parseXtnId(search); } /** * Parse the Elasticsearch components of the request. * @param search the search */ protected void parseElastic(SearchRequestBuilder search) { // based on org.elasticsearch.rest.action.search.RestSearchAction 2016-01-27 // https://github.com/elastic/elasticsearch/blob/master/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java /* String[] indices = Strings.splitStringByCommaToArray(request.param("index")); SearchRequest searchRequest = new SearchRequest(indices); // get the content, and put it in the body // add content/source as template if template flag is set boolean isTemplateRequest = request.path().endsWith("/template"); String searchType = request.param("search_type"); if (SearchType.fromString(searchType, parseFieldMatcher).equals(SearchType.QUERY_AND_FETCH) || SearchType.fromString(searchType, parseFieldMatcher).equals(SearchType.DFS_QUERY_AND_FETCH)) { throw new IllegalArgumentException("Unsupported search type [" + searchType + "]"); } else { searchRequest.searchType(searchType); } String scroll = request.param("scroll"); if (scroll != null) { searchRequest.scroll(new Scroll(parseTimeValue(scroll, null, "scroll"))); } searchRequest.types(Strings.splitStringByCommaToArray(request.param("type"))); searchRequest.routing(request.param("routing")); searchRequest.preference(request.param("preference")); searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions())); */ String dsl = Val.trim(getBody()); if (dsl != null && dsl.length() > 0) { boolean isJson = false; try { JsonUtil.toJsonStructure(dsl); isJson = true; } catch (Exception e) {} if (isJson) { //AggregationBuilders. // TODO Queries aggregations suggesters highlighters others WrapperQueryBuilder qwb = QueryBuilders.wrapperQuery(dsl); String json = qwb.toString(); musts.add(JsonUtil.toJsonStructure(json)); //search.setQuery(qwb); } } String q = Val.trim(this.getParameter("q")); if (q != null && q.length() > 0) { QueryStringQueryBuilder qsqb = QueryBuilders.queryStringQuery(q); if (!q.equals("*:*")) { //qsqb.escape(true); // TODO should this be escaped } //qsqb.allowLeadingWildcard(true); // TODO should this be allowed qsqb.defaultField(Val.chkStr(this.getParameter("df"),null)); qsqb.analyzer(Val.chkStr(this.getParameter("analyzer"),null)); qsqb.analyzeWildcard(Val.chkBool(this.getParameter("analyze_wildcard"),true)); // TODO false? // qsqb.lowercaseExpandedTerms(Val.chkBool(this.getParameter("lowercase_expanded_terms"),true)); // TODO ES 2to5 String lenient = Val.trim(this.getParameter("lenient")); if (lenient != null && lenient.length() > 0) { qsqb.lenient(Val.chkBool(lenient,false)); } String defaultOperator = Val.trim(this.getParameter("default_operator")); if (defaultOperator != null && defaultOperator.length() > 0) { //qsqb.defaultOperator(Operator.fromString(defaultOperator)); qsqb.defaultOperator(Operator.valueOf(defaultOperator)); } String json = qsqb.toString(); musts.add(JsonUtil.toJsonStructure(json)); } int nFrom = Val.chkInt(this.getParameter("from"),-1); if (nFrom<0) { // backward compatibility with old version of Eros feed nFrom = Val.chkInt(this.getParameter("start"),-1); } if (nFrom >= 0) { this.from = nFrom; if (this.getInputIndexOffset() == 0) { search.setFrom(nFrom); } else { search.setFrom(nFrom - this.getInputIndexOffset()); } /* search.setFrom(nFrom); if (this.getInputIndexOffset() > 0) { this.from = nFrom - this.getInputIndexOffset(); } */ } else { this.from = this.getInputIndexOffset(); } int nSize = Val.chkInt(this.getParameter("size"),-1); if (nSize<0) { // backward compatibility with old version of Eros feed nSize = Val.chkInt(this.getParameter("max"),-1); } if (nSize >= 0) { this.size = nSize; search.setSize(size); } String explain = Val.trim(this.getParameter("explain")); if (explain != null && explain.length() > 0) { search.setExplain(Val.chkBool(explain,false)); } String version = Val.trim(this.getParameter("version")); if (version != null && version.length() > 0) { search.setVersion(Val.chkBool(version,false)); } String timeout = Val.trim(this.getParameter("timeout")); if (timeout != null && timeout.length() > 0) { TimeValue tv = TimeValue.parseTimeValue(timeout,null,"timeout"); if (tv != null) search.setTimeout(tv); } int terminateAfter = Val.chkInt(this.getParameter("terminate_after"),-1); if (terminateAfter > 0) search.setTerminateAfter(terminateAfter); /* if (request.param("fields") != null) { throw new IllegalArgumentException("The parameter [" + SearchSourceBuilder.FIELDS_FIELD + "] is no longer supported, please use [" + SearchSourceBuilder.STORED_FIELDS_FIELD + "] to retrieve stored fields or _source filtering " + "if the field is not stored"); } */ // https://github.com/elastic/elasticsearch/blob/44ac5d057a8ceb6940c26275d9963bccb9f5065a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java /* ES 2to5 */ /* String fieldsParam = Val.trim(this.getParameter("fields")); if (fieldsParam != null) { if (fieldsParam.length() == 0) { search.setNoFields(); } else { String[] fields = Val.tokenize(fieldsParam,",",false); for (String field: fields) search.addField(field); } } */ String fieldsParam = this.getParameter("fields"); if (fieldsParam == null) fieldsParam = this.getParameter("stored_fields"); fieldsParam = Val.trim(fieldsParam); if (fieldsParam != null) { if (fieldsParam.length() == 0) { //search.setNoFields(); /* ES 2to5 TODO */ } else { String[] fields = Val.tokenize(fieldsParam,",",false); for (String field: fields) search.addStoredField(field); } } /* ES 2to5 */ /* String[] fdFields = Val.tokenize(this.getParameter("fielddata_fields"),",",false); for (String field: fdFields) search.addFieldDataField(field); */ String[] dvFields = Val.tokenize(this.getParameter("docvalue_fields"),",",false); for (String field: dvFields) search.addDocValueField(field); String[] fdFields = Val.tokenize(this.getParameter("fielddata_fields"),",",false); for (String field: fdFields) search.addDocValueField(field); // "stored_fields" boolean checkFetchSource = true; if (checkFetchSource) { String source = Val.trim(this.getParameter("_source")); Boolean fetchSource = null; String[] source_includes = null; String[] source_excludes = null; boolean isExplicitTrue = Val.chkBool(source,false); boolean isExplicitFalse = !Val.chkBool(source,true); if (isExplicitTrue) { fetchSource = true; } else if (isExplicitFalse) { fetchSource = false; } else { String[] a = Val.tokenize(this.getParameter(source),",",false); if (a.length > 0) source_includes = a; } String[] includes = Val.tokenize(this.getParameter("_source_includes"),",",false); String[] excludes = Val.tokenize(this.getParameter("_source_excludes"),",",false); if (includes.length > 0) source_includes = includes; if (excludes.length > 0) source_excludes = excludes; if (fetchSource != null || source_includes != null || source_excludes != null) { if (fetchSource == null) fetchSource = true; if (source_includes != null || source_excludes != null) { search.setFetchSource(source_includes,source_includes); } else { search.setFetchSource(fetchSource); } } } String trackScores = Val.trim(this.getParameter("track_scores")); if (trackScores != null && trackScores.length() > 0) { search.setTrackScores(Val.chkBool(trackScores,false)); } String[] sorts = Val.tokenize(this.getParameter("sort"),",",false); for (String sort: sorts) { int idx = sort.lastIndexOf(":"); if (idx != -1) { String sortField = sort.substring(0,idx); String sortOrder = sort.substring(idx+1); if (sortOrder.equals("desc")) { search.addSort(sortField,SortOrder.DESC); } else { search.addSort(sortField,SortOrder.ASC); } } else { search.addSort(sort,SortOrder.ASC); } } String[] stats = Val.tokenize(this.getParameter("stats"),",",false); if (stats.length > 0) search.setStats(stats); String suggestField = Val.trim(this.getParameter("suggest_field")); if (suggestField != null && suggestField.length() > 0) { String suggestText = Val.chkStr(this.getParameter("suggest_text"),q); int suggestSize = Val.chkInt(this.getParameter("suggest_size"),5); String suggestMode = Val.trim(this.getParameter("suggest_mode")); /* ES 2to5 */ /* search.addSuggestion(SuggestBuilders.termSuggestion(suggestField) .field(suggestField).text(suggestText).size(suggestSize).suggestMode(suggestMode)); */ search.suggest(new SuggestBuilder().addSuggestion(suggestField, SuggestBuilders.termSuggestion(suggestField) .text(suggestText).size(suggestSize).suggestMode(SuggestMode.resolve(suggestMode)))); } } /** * Parse the bbox parameter. * <br>bbox={geo:box?} * @param search the search */ protected void parseXtnBBox(SearchRequestBuilder search) { // bbox={geo:box?} /* { "geo_shape": { "envelope_geo": { "shape": { "type": "envelope", "coordinates" : [[13.0, 53.0], [14.0, 52.0]] } } } } */ // TODO configure field names String field = "envelope_geo"; String bbox = Val.trim(this.getParameter("bbox")); if (bbox != null && bbox.length() > 0) { String[] a = bbox.split(","); if (a.length >= 4) { try { double xmin = Double.parseDouble(a[0]); double ymin = Double.parseDouble(a[1]); double xmax = Double.parseDouble(a[2]); double ymax = Double.parseDouble(a[3]); if (xmin > 10000) { String msg = "The bbox coordinates should be WGS84."; throw new InvalidParameterException("bbox",msg); } if ((xmin < -180.0) && (xmax >= -180.0)) xmin = -180.0; if ((xmax > 180.0) && (xmin <= 180.0)) xmax = 180.0; if ((ymin < -90.0) && (ymax >= -90.0)) ymin = -90.0; if ((ymax > 90.0) && (ymin <= 90.0)) ymax = 90.0; String txt = "{\"type\":\"envelope\",\"coordinates\":[["+xmin+","+ymax+"],["+xmax+","+ymin+"]]}"; JsonStructure env = JsonUtil.toJsonStructure(txt); JsonObjectBuilder jso = Json.createObjectBuilder(); jso.add("geo_shape",Json.createObjectBuilder() .add(field,Json.createObjectBuilder().add("shape",env))); filters.add(jso.build()); } catch (NumberFormatException nfe) { throw new InvalidParameterException("bbox"); } } } } /** * Parse the response format parameter. * <br>f={json|pjson|atom|csw} * @param search the search */ protected void parseXtnF(SearchRequestBuilder search) { String f = Val.trim(getParameter("f")); String outputSchema = Val.trim(getParameter("outputSchema")); if (outputSchema != null && outputSchema.length() > 0) { f = outputSchema; } if (f != null && f.equals("pjson")) this.setPretty(true); if (f != null && f.length() > 0) this.setF(f); } /** * Parse the id parameter. * <br>id={id,id,id} * @param search the search */ protected void parseXtnId(SearchRequestBuilder search) { String[] ids = getParameterValues("id"); //if (ids == null) ids = getParameterValues("ids"); //if (ids == null) ids = getParameterValues("recordId"); if (ids == null) ids = getParameterValues("recordIds"); if (ids != null && ids.length == 1) { ids = Val.tokenize(ids[0],",",false); } if (ids != null && ids.length > 0) { String json = QueryBuilders.idsQuery().addIds(ids).toString(); filters.add(JsonUtil.toJsonStructure(json)); } } /** * Parse the filter parameter. * <br>filter={filter} * @param search the search */ protected void parseXtnFilter(SearchRequestBuilder search) { String v = Val.trim(this.getParameter("filter")); if (v != null && v.length() > 0) { // TODO check for wrapper query?? String json = QueryBuilders.queryStringQuery(v).toString(); filters.add(JsonUtil.toJsonStructure(json)); } } /** * Parse the time parameter (temporal extent) * <br>time={time:start?/{time:end?} * @param search the search */ protected void parseXtnTime(SearchRequestBuilder search) { // time={time:start?/{time:end?} /* { "range" : { "date" : { "gte" : "now-1d/d", "lt" : "now/d" } } } */ // TODO configure field names // TODO end: lt or lte ??? List<JsonStructure> tFilters = new ArrayList<JsonStructure>(); String startField = "timeperiod_nst.begin_dt"; String endField = "timeperiod_nst.end_dt"; String time = Val.trim(this.getParameter("time")); if (time != null && time.length() > 0) { String start = null, end = null; String[] parts = time.split("/"); start = Val.trim(parts[0]); if (parts.length > 1) end = Val.trim(parts[1]); if (start != null && start.length() > 0) { JsonObjectBuilder jso = Json.createObjectBuilder(); jso.add("range",Json.createObjectBuilder().add( startField,Json.createObjectBuilder().add("gte",start))); tFilters.add(jso.build()); } if (end != null && end.length() > 0) { JsonObjectBuilder jso = Json.createObjectBuilder(); jso.add("range",Json.createObjectBuilder().add( endField,Json.createObjectBuilder().add("lt",end))); tFilters.add(jso.build()); } } if (tFilters.size() > 0) { JsonArrayBuilder a = Json.createArrayBuilder(); for (JsonStructure j: tFilters) a.add(j); JsonObjectBuilder jq = Json.createObjectBuilder(); /* jq.add("query",Json.createObjectBuilder() .add("nested",Json.createObjectBuilder() .add("path", "timeperiod_nst") .add("query",Json.createObjectBuilder() .add("bool",Json.createObjectBuilder() .add("must",a) ) ) ) ); */ jq.add("nested",Json.createObjectBuilder() .add("path", "timeperiod_nst") .add("query",Json.createObjectBuilder() .add("bool",Json.createObjectBuilder() .add("must",a) ) ) ); //System.err.println("temporalq="+jq.build()); filters.add(jq.build()); } } protected void parseUrlTypes(SearchRequestBuilder search) { String sUrlTypes = Val.trim(this.getParameter("urlTypes")); if (sUrlTypes!=null) { urlTypes = sUrlTypes.split(","); } if (urlTypes!=null && urlTypes.length>0) { JsonObjectBuilder jsoQuery = Json.createObjectBuilder(); JsonObjectBuilder jsoBool = Json.createObjectBuilder(); JsonObjectBuilder jsoMust = Json.createObjectBuilder(); JsonObjectBuilder jsoNested = Json.createObjectBuilder(); JsonObjectBuilder jsoNestedQuery = Json.createObjectBuilder(); JsonObjectBuilder jsoTerms = Json.createObjectBuilder(); JsonArrayBuilder jsoTermsArray = Json.createArrayBuilder(); Arrays.asList(urlTypes).forEach(jsoTermsArray::add); jsoTerms.add("resources_nst.url_type_s", jsoTermsArray); jsoNestedQuery.add("terms", jsoTerms); jsoNested.add("path", "resources_nst"); jsoNested.add("query", jsoNestedQuery); jsoMust.add("nested", jsoNested); jsoBool.add("must", jsoMust); jsoQuery.add("bool", jsoBool); JsonObjectBuilder jq = Json.createObjectBuilder(); jq.add("query", jsoQuery); filters.add(jq.build()); } } /** * Write the response. * @param response the app response * @param searchResponse the search response */ public void writeResponse(AppResponse response, SearchResponse searchResponse) throws Exception { ItemWriter writer = newWriter(); writer.write(this,response,searchResponse); } }
/** * Copyright 2005-2015 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krms.impl.repository; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.kuali.rice.core.api.criteria.QueryByCriteria; import org.kuali.rice.core.api.criteria.QueryResults; import org.kuali.rice.core.api.exception.RiceIllegalArgumentException; import org.kuali.rice.core.api.mo.ModelObjectUtils; import org.kuali.rice.krad.data.DataObjectService; import org.kuali.rice.krad.data.PersistenceOption; import org.kuali.rice.krms.api.repository.function.FunctionDefinition; import org.kuali.rice.krms.api.repository.function.FunctionRepositoryService; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Default implementation of the {@link FunctionRepositoryService}. * * @author Kuali Rice Team (rice.collab@kuali.org) */ public class FunctionBoServiceImpl implements FunctionRepositoryService, FunctionBoService { private DataObjectService dataObjectService; // used for converting lists of BOs to model objects private static final ModelObjectUtils.Transformer<FunctionBo, FunctionDefinition> toFunctionDefinition = new ModelObjectUtils.Transformer<FunctionBo, FunctionDefinition>() { public FunctionDefinition transform(FunctionBo input) { return FunctionBo.to(input); }; }; @Override public FunctionDefinition getFunction(String functionId) { return getFunctionById(functionId); } @Override public List<FunctionDefinition> getFunctions(List<String> functionIds) { if (functionIds == null) { throw new RiceIllegalArgumentException(); } List<FunctionDefinition> functionDefinitions = new ArrayList<FunctionDefinition>(); for (String functionId : functionIds) { if (!StringUtils.isBlank(functionId)) { FunctionDefinition functionDefinition = getFunctionById(functionId); if (functionDefinition != null) { functionDefinitions.add(functionDefinition); } } } return Collections.unmodifiableList(functionDefinitions); } /** * This method will create a {@link FunctionDefinition} as described * by the function passed in. * * @see org.kuali.rice.krms.impl.repository.FunctionBoService#createFunction(org.kuali.rice.krms.api.repository.function.FunctionDefinition) */ @Override public FunctionDefinition createFunction(FunctionDefinition function) { if (function == null) { throw new IllegalArgumentException("function is null"); } final String nameKey = function.getName(); final String namespaceKey = function.getNamespace(); final FunctionDefinition existing = getFunctionByNameAndNamespace(nameKey, namespaceKey); if (existing != null && existing.getName().equals(nameKey) && existing.getNamespace().equals(namespaceKey)) { throw new IllegalStateException("the function to create already exists: " + function); } FunctionBo functionBo = FunctionBo.from(function); for (FunctionParameterBo param : functionBo.getParameters()) { param.setFunction(functionBo); } functionBo = dataObjectService.save(functionBo, PersistenceOption.FLUSH); return FunctionBo.to(functionBo); } /** * This overridden method updates an existing Function in the repository * * @see org.kuali.rice.krms.impl.repository.FunctionBoService#updateFunction(org.kuali.rice.krms.api.repository.function.FunctionDefinition) */ @Override public FunctionDefinition updateFunction(FunctionDefinition function) { if (function == null) { throw new IllegalArgumentException("function is null"); } final String functionIdKey = function.getId(); final FunctionDefinition existing = getFunctionById(functionIdKey); if (existing == null) { throw new IllegalStateException("the function does not exist: " + function); } final FunctionDefinition toUpdate; if (!existing.getId().equals(function.getId())) { final FunctionDefinition.Builder builder = FunctionDefinition.Builder.create(function); builder.setId(existing.getId()); toUpdate = builder.build(); } else { toUpdate = function; } return FunctionBo.to(dataObjectService.save(FunctionBo.from(toUpdate), PersistenceOption.FLUSH)); // TODO: Do we need to return the updated FunctionDefinition? } /** * This overridden method retrieves a function by the given function id. * * @see org.kuali.rice.krms.impl.repository.FunctionBoService#getFunctionById(java.lang.String) */ @Override public FunctionDefinition getFunctionById(String functionId) { if (StringUtils.isBlank(functionId)) { throw new RiceIllegalArgumentException("functionId is null or blank"); } FunctionBo functionBo = dataObjectService.find(FunctionBo.class, functionId); return FunctionBo.to(functionBo); } /** * This overridden method retrieves a function by the given name and namespace. * * @see org.kuali.rice.krms.impl.repository.FunctionBoService#getFunctionByNameAndNamespace(java.lang.String, * java.lang.String) */ public FunctionDefinition getFunctionByNameAndNamespace(String name, String namespace) { if (StringUtils.isBlank(name)) { throw new IllegalArgumentException("name is null or blank"); } if (StringUtils.isBlank(namespace)) { throw new IllegalArgumentException("namespace is null or blank"); } final Map<String, Object> map = new HashMap<String, Object>(); map.put("name", name); map.put("namespace", namespace); QueryByCriteria query = QueryByCriteria.Builder.andAttributes(map).build(); QueryResults<FunctionBo> results = dataObjectService.findMatching(FunctionBo.class, query); if (results == null || results.getResults().size() == 0) { // fall through and return null } else if (results.getResults().size() == 1) { return FunctionBo.to(results.getResults().get(0)); } else if (results.getResults().size() > 1) { throw new IllegalStateException("there can be only one FunctionDefinition for a given name and namespace"); } return null; } /** * Gets all of the {@link FunctionDefinition}s within the given namespace * * @param namespace the namespace in which to get the functions * @return the list of function definitions, or if none are found, an empty list */ public List<FunctionDefinition> getFunctionsByNamespace(String namespace) { if (StringUtils.isBlank(namespace)) { throw new IllegalArgumentException("namespace is null or blank"); } QueryByCriteria criteria = QueryByCriteria.Builder.forAttribute("namespace", namespace).build(); QueryResults<FunctionBo> queryResults = dataObjectService.findMatching(FunctionBo.class, criteria); List<FunctionBo> functionBos = queryResults.getResults(); return convertFunctionBosToImmutables(functionBos); } /** * Converts a Collection of FunctionBos to an Unmodifiable List of Agendas * * @param functionBos a mutable List of FunctionBos to made completely immutable. * @return An unmodifiable List of FunctionDefinitions */ private List<FunctionDefinition> convertFunctionBosToImmutables(final Collection<FunctionBo> functionBos) { if (CollectionUtils.isEmpty(functionBos)) { return Collections.emptyList(); } return Collections.unmodifiableList(ModelObjectUtils.transform(functionBos, toFunctionDefinition)); } public void setDataObjectService(DataObjectService dataObjectService) { this.dataObjectService = dataObjectService; } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.java.codeInsight; import com.intellij.JavaTestUtil; import com.intellij.ide.DataManager; import com.intellij.lang.java.JavaLanguage; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.editor.actionSystem.EditorActionHandler; import com.intellij.openapi.editor.actionSystem.EditorActionManager; import com.intellij.psi.codeStyle.CommonCodeStyleSettings; import com.intellij.testFramework.LightJavaCodeInsightTestCase; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; public class JoinLinesTest extends LightJavaCodeInsightTestCase { @NotNull @Override protected String getTestDataPath() { return JavaTestUtil.getJavaTestDataPath(); } public void testNormal() { doTest(); } public void testStringLiteral() { doTest(); } public void testStringLiteralTrim() { doTest(); } public void testLiteralSCR4989() { doTest(); } public void testCallChain() { doTest(); } public void testCallChain2() { doTest(); } public void testCallChainLineBreak() { CommonCodeStyleSettings settings = getJavaSettings(); settings.METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS; doTest(); } public void testCallChainWrong() { doTest(); } public void testCallChainWrong2() { doTest(); } public void testDeclarationAndCall() { doTest(); } public void testDeclarationAndCallSelfRef() { doTest(); } public void testAssignmentAndCall() { doTest(); } public void testDeclarationAndReassignmentWithCall() { doTest(); } public void testAssignmentAndReassignmentWithCall() { doTest(); } public void testIfChain() { doTest(); } public void testIfChainIncomplete() { doTest(); } public void testIfChainCorrectIndent() { doTest(); } public void testIfChainPolyadic() { doTest(); } public void testIfChainNoBraces() { doTest(); } public void testIfChainElse() { doTest(); } public void testIfChainSelection() { doTest(); } public void testAtEOF() { doTest(); } public void testSCR3493() { CommonCodeStyleSettings settings = getJavaSettings(); boolean use_tab_character = settings.getIndentOptions().USE_TAB_CHARACTER; boolean smart_tabs = settings.getIndentOptions().SMART_TABS; try { settings.getIndentOptions().USE_TAB_CHARACTER = true; settings.getIndentOptions().SMART_TABS = true; doTest(); } finally { settings.getIndentOptions().USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions().SMART_TABS = smart_tabs; } } public void testSCR3493a() { CommonCodeStyleSettings settings = getJavaSettings(); boolean use_tab_character = settings.getIndentOptions().USE_TAB_CHARACTER; boolean smart_tabs = settings.getIndentOptions().SMART_TABS; try { settings.getIndentOptions().USE_TAB_CHARACTER = true; settings.getIndentOptions().SMART_TABS = true; doTest(); } finally { settings.getIndentOptions().USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions().SMART_TABS = smart_tabs; } } public void testSCR3493b() { CommonCodeStyleSettings settings = getJavaSettings(); boolean use_tab_character = settings.getIndentOptions().USE_TAB_CHARACTER; boolean smart_tabs = settings.getIndentOptions().SMART_TABS; try { settings.getIndentOptions().USE_TAB_CHARACTER = true; settings.getIndentOptions().SMART_TABS = true; doTest(); } finally { settings.getIndentOptions().USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions().SMART_TABS = smart_tabs; } } public void testSCR3493c() { CommonCodeStyleSettings settings = getJavaSettings(); boolean use_tab_character = settings.getIndentOptions().USE_TAB_CHARACTER; boolean smart_tabs = settings.getIndentOptions().SMART_TABS; try { settings.getIndentOptions().USE_TAB_CHARACTER = true; settings.getIndentOptions().SMART_TABS = true; doTest(); } finally { settings.getIndentOptions().USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions().SMART_TABS = smart_tabs; } } public void testSCR3493d() { CommonCodeStyleSettings settings = getJavaSettings(); boolean use_tab_character = settings.getIndentOptions().USE_TAB_CHARACTER; boolean smart_tabs = settings.getIndentOptions().SMART_TABS; try { settings.getIndentOptions().USE_TAB_CHARACTER = true; settings.getIndentOptions().SMART_TABS = true; doTest(); } finally { settings.getIndentOptions().USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions().SMART_TABS = smart_tabs; } } public void testSCR3493e() { CommonCodeStyleSettings settings = getJavaSettings(); boolean use_tab_character = settings.getIndentOptions().USE_TAB_CHARACTER; boolean smart_tabs = settings.getIndentOptions().SMART_TABS; try { settings.getIndentOptions().USE_TAB_CHARACTER = true; settings.getIndentOptions().SMART_TABS = true; doTest(); } finally { settings.getIndentOptions().USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions().SMART_TABS = smart_tabs; } } public void testSCR5959() { doTest(); } public void testSCR6299() { doTest(); } public void testLocalVar() { doTest(); } public void testLocalVarAnnotated() { doTest(); } public void testLocalVarImplicit() { doTest(); } public void testSlashComment() { doTest(); } public void testSlashCommentFollowedByBlockComment() { doTest(); } public void testSlashCommentFollowedByBlockCommentAndCode() { doTest(); } public void testDocComment() { doTest(); } public void testOnEmptyLine() { doTest(); } public void testCollapseClass() { doTest(); } public void testSCR10386() { doTest(); } public void testDeclarationWithInitializer() {doTest(); } public void testUnwrapCodeBlock1() { CommonCodeStyleSettings settings = getJavaSettings(); boolean use_tab_character = settings.getIndentOptions().USE_TAB_CHARACTER; boolean smart_tabs = settings.getIndentOptions().SMART_TABS; int old = settings.IF_BRACE_FORCE; try { settings.getIndentOptions().USE_TAB_CHARACTER = true; settings.getIndentOptions().SMART_TABS = true; settings.IF_BRACE_FORCE = CommonCodeStyleSettings.FORCE_BRACES_IF_MULTILINE; doTest(); } finally { settings.getIndentOptions().USE_TAB_CHARACTER = use_tab_character; settings.getIndentOptions().SMART_TABS = smart_tabs; settings.IF_BRACE_FORCE = old; } } public void testUnwrapCodeBlock2() { CommonCodeStyleSettings settings = getJavaSettings(); settings.getIndentOptions().USE_TAB_CHARACTER = true; settings.getIndentOptions().SMART_TABS = true; settings.SPACE_WITHIN_BRACES = true; settings.IF_BRACE_FORCE = CommonCodeStyleSettings.FORCE_BRACES_ALWAYS; doTest(); } public void testUnwrapCodeBlockIfElse() { getJavaSettings().SPACE_WITHIN_BRACES = true; doTest(); } public void testUnwrapCodeBlockIfElse2() { getJavaSettings().SPACE_WITHIN_BRACES = true; doTest(); } public void testIfBlockWithComment() { doTest(); } public void testAssignmentExpression() { doTest(); } public void testAssignmentExpression2() { doTest(); } public void testAssignmentExpressionPrecedence() { doTest(); } public void testAssignmentExpressionPrecedence2() { doTest(); } public void testReformatInsertsNewlines() { CommonCodeStyleSettings settings = getJavaSettings(); settings.getIndentOptions().USE_TAB_CHARACTER = true; settings.getIndentOptions().SMART_TABS = true; settings.IF_BRACE_FORCE = CommonCodeStyleSettings.FORCE_BRACES_ALWAYS; settings.METHOD_BRACE_STYLE = CommonCodeStyleSettings.NEXT_LINE; settings.SPACE_WITHIN_BRACES = true; doTest(); } public void testForceBrace() { CommonCodeStyleSettings settings = getJavaSettings(); int old = settings.IF_BRACE_FORCE; try { settings.IF_BRACE_FORCE = CommonCodeStyleSettings.FORCE_BRACES_ALWAYS; doTest(); } finally { settings.IF_BRACE_FORCE = old; } } public void testWrongWrapping() { CommonCodeStyleSettings settings = getJavaSettings(); settings.RIGHT_MARGIN = 80; settings.CALL_PARAMETERS_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED; settings.ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true; doTest(); } public void testSubsequentJoiningAndUnexpectedTextRemoval() { // Inspired by IDEA-65342 CommonCodeStyleSettings settings = getJavaSettings(); settings.RIGHT_MARGIN = 50; settings.CALL_PARAMETERS_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED; settings.SPACE_WITHIN_BRACES = true; doTest(2); } public void testLeaveTrailingComment() { doTest(); } public void testConvertComment() { doTest();} public void testConvertComment2() { doTest();} public void testConvertFinalLineComment() { doTest();} public void testConvertFinalLineComment2() { doTest();} public void testConvertFinalLineComment3() { doTest();} public void testConvertLongLine() { CommonCodeStyleSettings settings = getJavaSettings(); settings.RIGHT_MARGIN = 79; doTest(); } public void testConvertMultiLongLine() { CommonCodeStyleSettings settings = getJavaSettings(); settings.RIGHT_MARGIN = 50; doTest(); } public void testConvertManyEndOfLineComments() { doTest();} public void testConvertMixed() { doTest();} public void testJoiningMethodCallWhenItDoesntFit() { CommonCodeStyleSettings settings = getJavaSettings(); settings.METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED; settings.RIGHT_MARGIN = 20; doTest(); } public void testMultipleBlockComments() { doTest(); } public void testPreserveSpaceIfOnNewLineOptionEnabled() { CommonCodeStyleSettings settings = getJavaSettings(); settings.CATCH_ON_NEW_LINE = true; doTest(); } private void doTest() { doTest(".java"); } private void doTest(int times) { doTest(".java", times); } private void doTest(@NonNls final String ext) { doTest(ext, 1); } private void doTest(@NonNls final String ext, int times) { @NonNls String path = "/codeInsight/joinLines/"; configureByFile(path + getTestName(false) + ext); while (times-- > 0) { performAction(); } checkResultByFile(path + getTestName(false) + "_after" + ext); } private void performAction() { EditorActionManager actionManager = EditorActionManager.getInstance(); EditorActionHandler actionHandler = actionManager.getActionHandler(IdeActions.ACTION_EDITOR_JOIN_LINES); actionHandler.execute(getEditor(), null, DataManager.getInstance().getDataContext()); } @NotNull private CommonCodeStyleSettings getJavaSettings() { return getCurrentCodeStyleSettings().getCommonSettings(JavaLanguage.INSTANCE); } }
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.client.http; import io.undertow.UndertowLogger; import io.undertow.client.ClientCallback; import io.undertow.client.ClientConnection; import io.undertow.client.ClientExchange; import io.undertow.client.ClientRequest; import io.undertow.client.ClientResponse; import io.undertow.client.UndertowClientMessages; import io.undertow.conduits.ChunkedStreamSinkConduit; import io.undertow.conduits.ChunkedStreamSourceConduit; import io.undertow.conduits.ConduitListener; import io.undertow.conduits.FixedLengthStreamSourceConduit; import io.undertow.server.protocol.http.HttpContinue; import io.undertow.util.AbstractAttachable; import io.undertow.util.Headers; import io.undertow.util.HttpString; import io.undertow.util.Methods; import io.undertow.util.Protocols; import io.undertow.util.StatusCodes; import org.xnio.ChannelExceptionHandler; import org.xnio.ChannelListener; import org.xnio.ChannelListeners; import org.xnio.Option; import org.xnio.OptionMap; import org.xnio.Pool; import org.xnio.Pooled; import org.xnio.StreamConnection; import org.xnio.XnioIoThread; import org.xnio.XnioWorker; import org.xnio.channels.StreamSourceChannel; import org.xnio.conduits.ConduitStreamSinkChannel; import org.xnio.conduits.ConduitStreamSourceChannel; import org.xnio.conduits.PushBackStreamSourceConduit; import org.xnio.conduits.StreamSinkConduit; import org.xnio.conduits.StreamSourceConduit; import java.io.Closeable; import java.io.IOException; import java.net.SocketAddress; import java.nio.ByteBuffer; import java.util.ArrayDeque; import java.util.Deque; import java.util.Locale; import static io.undertow.client.UndertowClientMessages.MESSAGES; import static io.undertow.util.Headers.CLOSE; import static io.undertow.util.Headers.CONNECTION; import static io.undertow.util.Headers.CONTENT_LENGTH; import static io.undertow.util.Headers.TRANSFER_ENCODING; import static io.undertow.util.Headers.UPGRADE; import static org.xnio.Bits.allAreClear; import static org.xnio.Bits.allAreSet; import static org.xnio.Bits.anyAreSet; import static org.xnio.IoUtils.safeClose; /** * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a> */ class HttpClientConnection extends AbstractAttachable implements Closeable, ClientConnection { public final ConduitListener<StreamSinkConduit> requestFinishListener = new ConduitListener<StreamSinkConduit>() { @Override public void handleEvent(StreamSinkConduit channel) { currentRequest.terminateRequest(); } }; public final ConduitListener<StreamSourceConduit> responseFinishedListener = new ConduitListener<StreamSourceConduit>() { @Override public void handleEvent(StreamSourceConduit channel) { currentRequest.terminateResponse(); } }; private final Deque<HttpClientExchange> pendingQueue = new ArrayDeque<>(); private HttpClientExchange currentRequest; private HttpResponseBuilder pendingResponse; private final OptionMap options; private final StreamConnection connection; private final PushBackStreamSourceConduit pushBackStreamSourceConduit; private final ClientReadListener clientReadListener = new ClientReadListener(); private final Pool<ByteBuffer> bufferPool; private Pooled<ByteBuffer> pooledBuffer; private final StreamSinkConduit originalSinkConduit; private static final int UPGRADED = 1 << 28; private static final int UPGRADE_REQUESTED = 1 << 29; private static final int CLOSE_REQ = 1 << 30; private static final int CLOSED = 1 << 31; private int count = 0; private int state; private final ChannelListener.SimpleSetter<HttpClientConnection> closeSetter = new ChannelListener.SimpleSetter<>(); HttpClientConnection(final StreamConnection connection, final OptionMap options, final Pool<ByteBuffer> bufferPool) { this.options = options; this.connection = connection; this.pushBackStreamSourceConduit = new PushBackStreamSourceConduit(connection.getSourceChannel().getConduit()); this.connection.getSourceChannel().setConduit(pushBackStreamSourceConduit); this.bufferPool = bufferPool; this.originalSinkConduit = connection.getSinkChannel().getConduit(); connection.getCloseSetter().set(new ChannelListener<StreamConnection>() { public void handleEvent(StreamConnection channel) { HttpClientConnection.this.state |= CLOSED; ChannelListeners.invokeChannelListener(HttpClientConnection.this, closeSetter.get()); try { if (pooledBuffer != null) { pooledBuffer.free(); } } catch (Throwable ignored){} } }); } @Override public Pool<ByteBuffer> getBufferPool() { return bufferPool; } @Override public SocketAddress getPeerAddress() { return connection.getPeerAddress(); } StreamConnection getConnection() { return connection; } @Override public <A extends SocketAddress> A getPeerAddress(Class<A> type) { return connection.getPeerAddress(type); } @Override public ChannelListener.Setter<? extends HttpClientConnection> getCloseSetter() { return closeSetter; } @Override public SocketAddress getLocalAddress() { return connection.getLocalAddress(); } @Override public <A extends SocketAddress> A getLocalAddress(Class<A> type) { return connection.getLocalAddress(type); } @Override public XnioWorker getWorker() { return connection.getWorker(); } @Override public XnioIoThread getIoThread() { return connection.getIoThread(); } @Override public boolean isOpen() { return connection.isOpen() && allAreClear(state, CLOSE_REQ | CLOSED); } @Override public boolean supportsOption(Option<?> option) { return connection.supportsOption(option); } @Override public <T> T getOption(Option<T> option) throws IOException { return connection.getOption(option); } @Override public <T> T setOption(Option<T> option, T value) throws IllegalArgumentException, IOException { return connection.setOption(option, value); } @Override public boolean isUpgraded() { return anyAreSet(state, UPGRADE_REQUESTED | UPGRADED); } @Override public boolean isPushSupported() { return false; } @Override public boolean isMultiplexingSupported() { return false; } @Override public void sendRequest(final ClientRequest request, final ClientCallback<ClientExchange> clientCallback) { count++; if (anyAreSet(state, UPGRADE_REQUESTED | UPGRADED | CLOSE_REQ | CLOSED)) { clientCallback.failed(UndertowClientMessages.MESSAGES.invalidConnectionState()); return; } final HttpClientExchange httpClientExchange = new HttpClientExchange(clientCallback, request, this); if (currentRequest == null) { initiateRequest(httpClientExchange); } else { pendingQueue.add(httpClientExchange); } } private void initiateRequest(HttpClientExchange httpClientExchange) { currentRequest = httpClientExchange; pendingResponse = new HttpResponseBuilder(); ClientRequest request = httpClientExchange.getRequest(); String connectionString = request.getRequestHeaders().getFirst(CONNECTION); if (connectionString != null) { HttpString connectionHttpString = new HttpString(connectionString); if (connectionHttpString.equals(CLOSE)) { state |= CLOSE_REQ; } else if(connectionHttpString.equals(UPGRADE)) { state |= UPGRADE_REQUESTED; } } else if (request.getProtocol() != Protocols.HTTP_1_1) { state |= CLOSE_REQ; } if (request.getRequestHeaders().contains(UPGRADE)) { state |= UPGRADE_REQUESTED; } if(request.getMethod().equals(Methods.CONNECT)) { //we treat CONNECT like upgrade requests state |= UPGRADE_REQUESTED; } //setup the client request conduits final ConduitStreamSourceChannel sourceChannel = connection.getSourceChannel(); sourceChannel.setReadListener(clientReadListener); sourceChannel.resumeReads(); ConduitStreamSinkChannel sinkChannel = connection.getSinkChannel(); StreamSinkConduit conduit = originalSinkConduit; conduit = new HttpRequestConduit(conduit, bufferPool, request); String fixedLengthString = request.getRequestHeaders().getFirst(CONTENT_LENGTH); String transferEncodingString = request.getRequestHeaders().getLast(TRANSFER_ENCODING); boolean hasContent = true; if (fixedLengthString != null) { try { long length = Long.parseLong(fixedLengthString); conduit = new ClientFixedLengthStreamSinkConduit(conduit, length, false, false, currentRequest); hasContent = length != 0; } catch (NumberFormatException e) { handleError(new IOException(e)); return; } } else if (transferEncodingString != null) { if (!transferEncodingString.toLowerCase(Locale.ENGLISH).contains(Headers.CHUNKED.toString())) { handleError(UndertowClientMessages.MESSAGES.unknownTransferEncoding(transferEncodingString)); return; } conduit = new ChunkedStreamSinkConduit(conduit, httpClientExchange.getConnection().getBufferPool(), false, false, httpClientExchange.getRequest().getRequestHeaders(), requestFinishListener, httpClientExchange); } else { conduit = new ClientFixedLengthStreamSinkConduit(conduit, 0, false, false, currentRequest); hasContent = false; } sinkChannel.setConduit(conduit); httpClientExchange.invokeReadReadyCallback(); if (!hasContent) { //if there is no content we flush the response channel. //otherwise it is up to the user try { sinkChannel.shutdownWrites(); if (!sinkChannel.flush()) { sinkChannel.setWriteListener(ChannelListeners.flushingChannelListener(null, new ChannelExceptionHandler<ConduitStreamSinkChannel>() { @Override public void handleException(ConduitStreamSinkChannel channel, IOException exception) { handleError(exception); } })); } } catch (IOException e) { handleError(e); } } } private void handleError(IOException exception) { currentRequest.setFailed(exception); UndertowLogger.REQUEST_IO_LOGGER.ioException(exception); safeClose(connection); } public StreamConnection performUpgrade() throws IOException { // Upgrade the connection // Set the upgraded flag already to prevent new requests after this one if (allAreSet(state, UPGRADED | CLOSE_REQ | CLOSED)) { throw new IOException(UndertowClientMessages.MESSAGES.connectionClosed()); } state |= UPGRADED; connection.getSinkChannel().setConduit(originalSinkConduit); connection.getSourceChannel().setConduit(pushBackStreamSourceConduit); return connection; } public void close() throws IOException { if (anyAreSet(state, CLOSED)) { return; } state |= CLOSED | CLOSE_REQ; connection.close(); } /** * Notification that the current request is finished */ public void requestDone() { connection.getSinkChannel().setConduit(originalSinkConduit); connection.getSourceChannel().setConduit(pushBackStreamSourceConduit); connection.getSinkChannel().suspendWrites(); connection.getSinkChannel().setWriteListener(null); if (anyAreSet(state, CLOSE_REQ)) { currentRequest = null; this.state |= CLOSED; safeClose(connection); } else if (anyAreSet(state, UPGRADE_REQUESTED)) { connection.getSourceChannel().suspendReads(); currentRequest = null; return; } currentRequest = null; HttpClientExchange next = pendingQueue.poll(); if (next == null) { //we resume reads, so if the target goes away we get notified connection.getSourceChannel().setReadListener(clientReadListener); connection.getSourceChannel().resumeReads(); } else { initiateRequest(next); } } class ClientReadListener implements ChannelListener<StreamSourceChannel> { public void handleEvent(StreamSourceChannel channel) { HttpResponseBuilder builder = pendingResponse; final Pooled<ByteBuffer> pooled = bufferPool.allocate(); final ByteBuffer buffer = pooled.getResource(); boolean free = true; try { if (builder == null) { //read ready when no request pending buffer.clear(); try { int res = channel.read(buffer); if(res == -1) { UndertowLogger.CLIENT_LOGGER.debugf("Connection to %s was closed by the target server", connection.getPeerAddress()); safeClose(HttpClientConnection.this); } else if(res != 0) { UndertowLogger.CLIENT_LOGGER.debugf("Target server %s sent unexpected data when no request pending, closing connection", connection.getPeerAddress()); safeClose(HttpClientConnection.this); } //otherwise it is a spurious notification } catch (IOException e) { if (UndertowLogger.CLIENT_LOGGER.isDebugEnabled()) { UndertowLogger.CLIENT_LOGGER.debugf(e, "Connection closed with IOException"); } safeClose(connection); } return; } final ResponseParseState state = builder.getParseState(); int res; do { buffer.clear(); try { res = channel.read(buffer); } catch (IOException e) { if (UndertowLogger.CLIENT_LOGGER.isDebugEnabled()) { UndertowLogger.CLIENT_LOGGER.debugf(e, "Connection closed with IOException"); } safeClose(channel); currentRequest.setFailed(new IOException(MESSAGES.connectionClosed())); return; } if (res == 0) { if (!channel.isReadResumed()) { channel.getReadSetter().set(this); channel.resumeReads(); } return; } else if (res == -1) { channel.suspendReads(); safeClose(HttpClientConnection.this); // Cancel the current active request currentRequest.setFailed(new IOException(MESSAGES.connectionClosed())); return; } buffer.flip(); HttpResponseParser.INSTANCE.handle(buffer, state, builder); if (buffer.hasRemaining()) { free = false; pushBackStreamSourceConduit.pushBack(pooled); } } while (!state.isComplete()); final ClientResponse response = builder.build(); String connectionString = response.getResponseHeaders().getFirst(CONNECTION); //check if an upgrade worked if (anyAreSet(HttpClientConnection.this.state, UPGRADE_REQUESTED)) { if ((connectionString == null || !UPGRADE.equalToString(connectionString)) && !response.getResponseHeaders().contains(UPGRADE)) { if(!currentRequest.getRequest().getMethod().equals(Methods.CONNECT) || response.getResponseCode() != 200) { //make sure it was not actually a connect request //just unset the upgrade requested flag HttpClientConnection.this.state &= ~UPGRADE_REQUESTED; } } } if(connectionString != null) { if (HttpString.tryFromString(connectionString).equals(Headers.CLOSE)) { HttpClientConnection.this.state |= CLOSE_REQ; } } if (builder.getStatusCode() == StatusCodes.CONTINUE) { pendingResponse = new HttpResponseBuilder(); currentRequest.setContinueResponse(response); } else { prepareResponseChannel(response, currentRequest); channel.getReadSetter().set(null); channel.suspendReads(); pendingResponse = null; currentRequest.setResponse(response); if(response.getResponseCode() == StatusCodes.EXPECTATION_FAILED) { if(HttpContinue.requiresContinueResponse(currentRequest.getRequest().getRequestHeaders())) { HttpClientConnection.this.state |= CLOSE_REQ; ConduitStreamSinkChannel sinkChannel = HttpClientConnection.this.connection.getSinkChannel(); sinkChannel.shutdownWrites(); if(!sinkChannel.flush()) { sinkChannel.setWriteListener(ChannelListeners.flushingChannelListener(null, null)); sinkChannel.resumeWrites(); } currentRequest.terminateRequest(); } } } } catch (Exception e) { UndertowLogger.CLIENT_LOGGER.exceptionProcessingRequest(e); safeClose(connection); currentRequest.setFailed(new IOException(e)); } finally { if (free) { pooled.free(); pooledBuffer = null; } else { pooledBuffer = pooled; } } } } private void prepareResponseChannel(ClientResponse response, ClientExchange exchange) { String encoding = response.getResponseHeaders().getLast(TRANSFER_ENCODING); boolean chunked = encoding != null && Headers.CHUNKED.equals(new HttpString(encoding)); String length = response.getResponseHeaders().getFirst(CONTENT_LENGTH); if (exchange.getRequest().getMethod().equals(Methods.HEAD)) { connection.getSourceChannel().setConduit(new FixedLengthStreamSourceConduit(connection.getSourceChannel().getConduit(), 0, responseFinishedListener)); } else if (chunked) { connection.getSourceChannel().setConduit(new ChunkedStreamSourceConduit(connection.getSourceChannel().getConduit(), pushBackStreamSourceConduit, bufferPool, responseFinishedListener, exchange)); } else if (length != null) { try { long contentLength = Long.parseLong(length); connection.getSourceChannel().setConduit(new FixedLengthStreamSourceConduit(connection.getSourceChannel().getConduit(), contentLength, responseFinishedListener)); } catch (NumberFormatException e) { handleError(new IOException(e)); throw e; } } else if (response.getProtocol().equals(Protocols.HTTP_1_1)) { connection.getSourceChannel().setConduit(new FixedLengthStreamSourceConduit(connection.getSourceChannel().getConduit(), 0, responseFinishedListener)); } else { state |= CLOSE_REQ; } } }
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // http://code.google.com/p/protobuf/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package com.google.protobuf.micro; import java.io.UnsupportedEncodingException; /** * Immutable array of bytes. * * @author crazybob@google.com Bob Lee * @author kenton@google.com Kenton Varda */ public final class ByteStringMicro { /** * Empty ByteStringMicro. */ public static final ByteStringMicro EMPTY = new ByteStringMicro(new byte[0]); private final byte[] bytes; private volatile int hash = 0; private ByteStringMicro(final byte[] bytes) { this.bytes = bytes; } /** * Copies the given bytes into a {@code ByteStringMicro}. */ public static ByteStringMicro copyFrom(final byte[] bytes, final int offset, final int size) { final byte[] copy = new byte[size]; System.arraycopy(bytes, offset, copy, 0, size); return new ByteStringMicro(copy); } // ================================================================= // byte[] -> ByteStringMicro /** * Copies the given bytes into a {@code ByteStringMicro}. */ public static ByteStringMicro copyFrom(final byte[] bytes) { return copyFrom(bytes, 0, bytes.length); } /** * Encodes {@code text} into a sequence of bytes using the named charset * and returns the result as a {@code ByteStringMicro}. */ public static ByteStringMicro copyFrom(final String text, final String charsetName) throws UnsupportedEncodingException { return new ByteStringMicro(text.getBytes(charsetName)); } /** * Encodes {@code text} into a sequence of UTF-8 bytes and returns the * result as a {@code ByteStringMicro}. */ public static ByteStringMicro copyFromUtf8(final String text) { try { return new ByteStringMicro(text.getBytes("UTF-8")); } catch (UnsupportedEncodingException e) { throw new RuntimeException("UTF-8 not supported?"); } } /** * Gets the byte at the given index. * * @throws ArrayIndexOutOfBoundsException {@code index} is < 0 or >= size */ public byte byteAt(final int index) { return bytes[index]; } /** * Gets the number of bytes. */ public int size() { return bytes.length; } // ================================================================= // ByteStringMicro -> byte[] /** * Returns {@code true} if the size is {@code 0}, {@code false} otherwise. */ public boolean isEmpty() { return bytes.length == 0; } /** * Copies bytes into a buffer at the given offset. * * @param target buffer to copy into * @param offset in the target buffer */ public void copyTo(final byte[] target, final int offset) { System.arraycopy(bytes, 0, target, offset, bytes.length); } /** * Copies bytes into a buffer. * * @param target buffer to copy into * @param sourceOffset offset within these bytes * @param targetOffset offset within the target buffer * @param size number of bytes to copy */ public void copyTo(final byte[] target, final int sourceOffset, final int targetOffset, final int size) { System.arraycopy(bytes, sourceOffset, target, targetOffset, size); } /** * Copies bytes to a {@code byte[]}. */ public byte[] toByteArray() { final int size = bytes.length; final byte[] copy = new byte[size]; System.arraycopy(bytes, 0, copy, 0, size); return copy; } /** * Constructs a new {@code String} by decoding the bytes using the * specified charset. */ public String toString(final String charsetName) throws UnsupportedEncodingException { return new String(bytes, charsetName); } // ================================================================= // equals() and hashCode() /** * Constructs a new {@code String} by decoding the bytes as UTF-8. */ public String toStringUtf8() { try { return new String(bytes, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new RuntimeException("UTF-8 not supported?"); } } //@Override for compatibility with Java 1.3 code we can't use annotations public boolean equals(final Object o) { if (o == this) { return true; } if (!(o instanceof ByteStringMicro)) { return false; } final ByteStringMicro other = (ByteStringMicro) o; final int size = bytes.length; if (size != other.bytes.length) { return false; } final byte[] thisBytes = bytes; final byte[] otherBytes = other.bytes; for (int i = 0; i < size; i++) { if (thisBytes[i] != otherBytes[i]) { return false; } } return true; } //@Override for compatibility with Java 1.3 code we can't use annotations public int hashCode() { int h = hash; if (h == 0) { final byte[] thisBytes = bytes; final int size = bytes.length; h = size; for (int i = 0; i < size; i++) { h = h * 31 + thisBytes[i]; } if (h == 0) { h = 1; } hash = h; } return h; } }
/* * Copyright 2012-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.metrics.web.reactive.server; import io.micrometer.core.instrument.Tag; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.http.HttpStatus; import org.springframework.http.server.reactive.ServerHttpRequest; import org.springframework.http.server.reactive.ServerHttpResponse; import org.springframework.mock.http.server.reactive.MockServerHttpRequest; import org.springframework.mock.web.server.MockServerWebExchange; import org.springframework.web.reactive.HandlerMapping; import org.springframework.web.server.ServerWebExchange; import org.springframework.web.util.pattern.PathPatternParser; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.mock; /** * Tests for {@link WebFluxTags}. * * @author Brian Clozel * @author Michael McFadyen * @author Madhura Bhave * @author Stephane Nicoll */ class WebFluxTagsTests { private MockServerWebExchange exchange; private final PathPatternParser parser = new PathPatternParser(); @BeforeEach void setup() { this.exchange = MockServerWebExchange.from(MockServerHttpRequest.get("")); } @Test void uriTagValueIsBestMatchingPatternWhenAvailable() { this.exchange.getAttributes().put(HandlerMapping.BEST_MATCHING_PATTERN_ATTRIBUTE, this.parser.parse("/spring/")); this.exchange.getResponse().setStatusCode(HttpStatus.MOVED_PERMANENTLY); Tag tag = WebFluxTags.uri(this.exchange); assertThat(tag.getValue()).isEqualTo("/spring/"); } @Test void uriTagValueIsRootWhenBestMatchingPatternIsEmpty() { this.exchange.getAttributes().put(HandlerMapping.BEST_MATCHING_PATTERN_ATTRIBUTE, this.parser.parse("")); this.exchange.getResponse().setStatusCode(HttpStatus.MOVED_PERMANENTLY); Tag tag = WebFluxTags.uri(this.exchange); assertThat(tag.getValue()).isEqualTo("root"); } @Test void uriTagValueWithBestMatchingPatternAndIgnoreTrailingSlashRemoveTrailingSlash() { this.exchange.getAttributes().put(HandlerMapping.BEST_MATCHING_PATTERN_ATTRIBUTE, this.parser.parse("/spring/")); Tag tag = WebFluxTags.uri(this.exchange, true); assertThat(tag.getValue()).isEqualTo("/spring"); } @Test void uriTagValueWithBestMatchingPatternAndIgnoreTrailingSlashKeepSingleSlash() { this.exchange.getAttributes().put(HandlerMapping.BEST_MATCHING_PATTERN_ATTRIBUTE, this.parser.parse("/")); Tag tag = WebFluxTags.uri(this.exchange, true); assertThat(tag.getValue()).isEqualTo("/"); } @Test void uriTagValueIsRedirectionWhenResponseStatusIs3xx() { this.exchange.getResponse().setStatusCode(HttpStatus.MOVED_PERMANENTLY); Tag tag = WebFluxTags.uri(this.exchange); assertThat(tag.getValue()).isEqualTo("REDIRECTION"); } @Test void uriTagValueIsNotFoundWhenResponseStatusIs404() { this.exchange.getResponse().setStatusCode(HttpStatus.NOT_FOUND); Tag tag = WebFluxTags.uri(this.exchange); assertThat(tag.getValue()).isEqualTo("NOT_FOUND"); } @Test void uriTagToleratesCustomResponseStatus() { this.exchange.getResponse().setRawStatusCode(601); Tag tag = WebFluxTags.uri(this.exchange); assertThat(tag.getValue()).isEqualTo("root"); } @Test void uriTagValueIsRootWhenRequestHasNoPatternOrPathInfo() { Tag tag = WebFluxTags.uri(this.exchange); assertThat(tag.getValue()).isEqualTo("root"); } @Test void uriTagValueIsRootWhenRequestHasNoPatternAndSlashPathInfo() { MockServerHttpRequest request = MockServerHttpRequest.get("/").build(); ServerWebExchange exchange = MockServerWebExchange.from(request); Tag tag = WebFluxTags.uri(exchange); assertThat(tag.getValue()).isEqualTo("root"); } @Test void uriTagValueIsUnknownWhenRequestHasNoPatternAndNonRootPathInfo() { MockServerHttpRequest request = MockServerHttpRequest.get("/example").build(); ServerWebExchange exchange = MockServerWebExchange.from(request); Tag tag = WebFluxTags.uri(exchange); assertThat(tag.getValue()).isEqualTo("UNKNOWN"); } @Test void methodTagToleratesNonStandardHttpMethods() { ServerWebExchange exchange = mock(ServerWebExchange.class); ServerHttpRequest request = mock(ServerHttpRequest.class); given(exchange.getRequest()).willReturn(request); given(request.getMethodValue()).willReturn("CUSTOM"); Tag tag = WebFluxTags.method(exchange); assertThat(tag.getValue()).isEqualTo("CUSTOM"); } @Test void outcomeTagIsSuccessWhenResponseStatusIsNull() { this.exchange.getResponse().setStatusCode(null); Tag tag = WebFluxTags.outcome(this.exchange); assertThat(tag.getValue()).isEqualTo("SUCCESS"); } @Test void outcomeTagIsSuccessWhenResponseStatusIsAvailableFromUnderlyingServer() { ServerWebExchange exchange = mock(ServerWebExchange.class); ServerHttpRequest request = mock(ServerHttpRequest.class); ServerHttpResponse response = mock(ServerHttpResponse.class); given(response.getStatusCode()).willReturn(HttpStatus.OK); given(response.getRawStatusCode()).willReturn(null); given(exchange.getRequest()).willReturn(request); given(exchange.getResponse()).willReturn(response); Tag tag = WebFluxTags.outcome(exchange); assertThat(tag.getValue()).isEqualTo("SUCCESS"); } @Test void outcomeTagIsInformationalWhenResponseIs1xx() { this.exchange.getResponse().setStatusCode(HttpStatus.CONTINUE); Tag tag = WebFluxTags.outcome(this.exchange); assertThat(tag.getValue()).isEqualTo("INFORMATIONAL"); } @Test void outcomeTagIsSuccessWhenResponseIs2xx() { this.exchange.getResponse().setStatusCode(HttpStatus.OK); Tag tag = WebFluxTags.outcome(this.exchange); assertThat(tag.getValue()).isEqualTo("SUCCESS"); } @Test void outcomeTagIsRedirectionWhenResponseIs3xx() { this.exchange.getResponse().setStatusCode(HttpStatus.MOVED_PERMANENTLY); Tag tag = WebFluxTags.outcome(this.exchange); assertThat(tag.getValue()).isEqualTo("REDIRECTION"); } @Test void outcomeTagIsClientErrorWhenResponseIs4xx() { this.exchange.getResponse().setStatusCode(HttpStatus.BAD_REQUEST); Tag tag = WebFluxTags.outcome(this.exchange); assertThat(tag.getValue()).isEqualTo("CLIENT_ERROR"); } @Test void outcomeTagIsServerErrorWhenResponseIs5xx() { this.exchange.getResponse().setStatusCode(HttpStatus.BAD_GATEWAY); Tag tag = WebFluxTags.outcome(this.exchange); assertThat(tag.getValue()).isEqualTo("SERVER_ERROR"); } @Test void outcomeTagIsClientErrorWhenResponseIsNonStandardInClientSeries() { this.exchange.getResponse().setRawStatusCode(490); Tag tag = WebFluxTags.outcome(this.exchange); assertThat(tag.getValue()).isEqualTo("CLIENT_ERROR"); } @Test void outcomeTagIsUnknownWhenResponseStatusIsInUnknownSeries() { this.exchange.getResponse().setRawStatusCode(701); Tag tag = WebFluxTags.outcome(this.exchange); assertThat(tag.getValue()).isEqualTo("UNKNOWN"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.discovery; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.name.Named; import com.google.inject.name.Names; import com.google.inject.servlet.GuiceFilter; import org.apache.druid.curator.discovery.ServerDiscoverySelector; import org.apache.druid.guice.GuiceInjectors; import org.apache.druid.guice.Jerseys; import org.apache.druid.guice.JsonConfigProvider; import org.apache.druid.guice.LazySingleton; import org.apache.druid.guice.LifecycleModule; import org.apache.druid.guice.annotations.Self; import org.apache.druid.initialization.Initialization; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.http.client.HttpClient; import org.apache.druid.java.util.http.client.Request; import org.apache.druid.server.DruidNode; import org.apache.druid.server.initialization.BaseJettyTest; import org.apache.druid.server.initialization.jetty.JettyServerInitializer; import org.easymock.EasyMock; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.HandlerList; import org.eclipse.jetty.servlet.DefaultServlet; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.jboss.netty.handler.codec.http.HttpMethod; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.IOException; import java.net.URI; import java.nio.charset.StandardCharsets; /** */ public class DruidLeaderClientTest extends BaseJettyTest { @Rule public ExpectedException expectedException = ExpectedException.none(); private DiscoveryDruidNode discoveryDruidNode; private HttpClient httpClient; @Override protected Injector setupInjector() { final DruidNode node = new DruidNode("test", "localhost", false, null, null, true, false); discoveryDruidNode = new DiscoveryDruidNode(node, NodeRole.PEON, ImmutableMap.of()); Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), ImmutableList.<Module>of( new Module() { @Override public void configure(Binder binder) { JsonConfigProvider.bindInstance( binder, Key.get(DruidNode.class, Self.class), node ); binder.bind(Integer.class).annotatedWith(Names.named("port")).toInstance(node.getPlaintextPort()); binder.bind(JettyServerInitializer.class).to(TestJettyServerInitializer.class).in(LazySingleton.class); Jerseys.addResource(binder, SimpleResource.class); LifecycleModule.register(binder, Server.class); } } ) ); httpClient = injector.getInstance(ClientHolder.class).getClient(); return injector; } @Test public void testSimple() throws Exception { DruidNodeDiscovery druidNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class); EasyMock.expect(druidNodeDiscovery.getAllNodes()).andReturn( ImmutableList.of(discoveryDruidNode) ); DruidNodeDiscoveryProvider druidNodeDiscoveryProvider = EasyMock.createMock(DruidNodeDiscoveryProvider.class); EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.PEON)).andReturn(druidNodeDiscovery); EasyMock.replay(druidNodeDiscovery, druidNodeDiscoveryProvider); DruidLeaderClient druidLeaderClient = new DruidLeaderClient( httpClient, druidNodeDiscoveryProvider, NodeRole.PEON, "/simple/leader", EasyMock.createNiceMock(ServerDiscoverySelector.class) ); druidLeaderClient.start(); Request request = druidLeaderClient.makeRequest(HttpMethod.POST, "/simple/direct"); request.setContent("hello".getBytes(StandardCharsets.UTF_8)); Assert.assertEquals("hello", druidLeaderClient.go(request).getContent()); } @Test public void testNoLeaderFound() throws Exception { DruidNodeDiscovery druidNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class); EasyMock.expect(druidNodeDiscovery.getAllNodes()).andReturn(ImmutableList.of()); DruidNodeDiscoveryProvider druidNodeDiscoveryProvider = EasyMock.createMock(DruidNodeDiscoveryProvider.class); EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.PEON)).andReturn(druidNodeDiscovery); EasyMock.replay(druidNodeDiscovery, druidNodeDiscoveryProvider); DruidLeaderClient druidLeaderClient = new DruidLeaderClient( httpClient, druidNodeDiscoveryProvider, NodeRole.PEON, "/simple/leader", EasyMock.createNiceMock(ServerDiscoverySelector.class) ); druidLeaderClient.start(); expectedException.expect(IOException.class); expectedException.expectMessage("No known server"); druidLeaderClient.makeRequest(HttpMethod.POST, "/simple/direct"); } @Test public void testRedirection() throws Exception { DruidNodeDiscovery druidNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class); EasyMock.expect(druidNodeDiscovery.getAllNodes()).andReturn( ImmutableList.of(discoveryDruidNode) ); DruidNodeDiscoveryProvider druidNodeDiscoveryProvider = EasyMock.createMock(DruidNodeDiscoveryProvider.class); EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.PEON)).andReturn(druidNodeDiscovery); EasyMock.replay(druidNodeDiscovery, druidNodeDiscoveryProvider); DruidLeaderClient druidLeaderClient = new DruidLeaderClient( httpClient, druidNodeDiscoveryProvider, NodeRole.PEON, "/simple/leader", EasyMock.createNiceMock(ServerDiscoverySelector.class) ); druidLeaderClient.start(); Request request = druidLeaderClient.makeRequest(HttpMethod.POST, "/simple/redirect"); request.setContent("hello".getBytes(StandardCharsets.UTF_8)); Assert.assertEquals("hello", druidLeaderClient.go(request).getContent()); } @Test public void testServerFailureAndRedirect() throws Exception { ServerDiscoverySelector serverDiscoverySelector = EasyMock.createMock(ServerDiscoverySelector.class); EasyMock.expect(serverDiscoverySelector.pick()).andReturn(null).anyTimes(); DruidNodeDiscovery druidNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class); DiscoveryDruidNode dummyNode = new DiscoveryDruidNode( new DruidNode("test", "dummyhost", false, 64231, null, true, false), NodeRole.PEON, ImmutableMap.of() ); EasyMock.expect(druidNodeDiscovery.getAllNodes()).andReturn(ImmutableList.of(dummyNode)); EasyMock.expect(druidNodeDiscovery.getAllNodes()).andReturn(ImmutableList.of(discoveryDruidNode)); DruidNodeDiscoveryProvider druidNodeDiscoveryProvider = EasyMock.createMock(DruidNodeDiscoveryProvider.class); EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.PEON)).andReturn(druidNodeDiscovery).anyTimes(); EasyMock.replay(serverDiscoverySelector, druidNodeDiscovery, druidNodeDiscoveryProvider); DruidLeaderClient druidLeaderClient = new DruidLeaderClient( httpClient, druidNodeDiscoveryProvider, NodeRole.PEON, "/simple/leader", serverDiscoverySelector ); druidLeaderClient.start(); Request request = druidLeaderClient.makeRequest(HttpMethod.POST, "/simple/redirect"); request.setContent("hello".getBytes(StandardCharsets.UTF_8)); Assert.assertEquals("hello", druidLeaderClient.go(request).getContent()); } @Test public void testFindCurrentLeader() { DruidNodeDiscovery druidNodeDiscovery = EasyMock.createMock(DruidNodeDiscovery.class); EasyMock.expect(druidNodeDiscovery.getAllNodes()).andReturn( ImmutableList.of(discoveryDruidNode) ); DruidNodeDiscoveryProvider druidNodeDiscoveryProvider = EasyMock.createMock(DruidNodeDiscoveryProvider.class); EasyMock.expect(druidNodeDiscoveryProvider.getForNodeRole(NodeRole.PEON)).andReturn(druidNodeDiscovery); EasyMock.replay(druidNodeDiscovery, druidNodeDiscoveryProvider); DruidLeaderClient druidLeaderClient = new DruidLeaderClient( httpClient, druidNodeDiscoveryProvider, NodeRole.PEON, "/simple/leader", EasyMock.createNiceMock(ServerDiscoverySelector.class) ); druidLeaderClient.start(); Assert.assertEquals("http://localhost:1234/", druidLeaderClient.findCurrentLeader()); } private static class TestJettyServerInitializer implements JettyServerInitializer { @Override public void initialize(Server server, Injector injector) { final ServletContextHandler root = new ServletContextHandler(ServletContextHandler.SESSIONS); root.addServlet(new ServletHolder(new DefaultServlet()), "/*"); root.addFilter(GuiceFilter.class, "/*", null); final HandlerList handlerList = new HandlerList(); handlerList.setHandlers(new Handler[]{root}); server.setHandler(handlerList); } } @Path("/simple") public static class SimpleResource { private final int port; @Inject public SimpleResource(@Named("port") int port) { this.port = port; } @POST @Path("/direct") @Produces(MediaType.APPLICATION_JSON) public Response direct(String input) { if ("hello".equals(input)) { return Response.ok("hello").build(); } else { return Response.serverError().build(); } } @POST @Path("/redirect") @Produces(MediaType.APPLICATION_JSON) public Response redirecting() throws Exception { return Response.temporaryRedirect(new URI(StringUtils.format("http://localhost:%s/simple/direct", port))).build(); } @GET @Path("/leader") @Produces(MediaType.APPLICATION_JSON) public Response leader() { return Response.ok("http://localhost:1234/").build(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.analysis.synonym; import java.io.IOException; import java.io.Reader; import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.MockGraphTokenFilter; import org.apache.lucene.analysis.core.KeywordTokenizer; import org.apache.lucene.analysis.tokenattributes.*; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util._TestUtil; public class TestSynonymMapFilter extends BaseTokenStreamTestCase { private SynonymMap.Builder b; private Tokenizer tokensIn; private SynonymFilter tokensOut; private CharTermAttribute termAtt; private PositionIncrementAttribute posIncrAtt; private PositionLengthAttribute posLenAtt; private OffsetAttribute offsetAtt; private void add(String input, String output, boolean keepOrig) { if (VERBOSE) { System.out.println(" add input=" + input + " output=" + output + " keepOrig=" + keepOrig); } CharsRef inputCharsRef = new CharsRef(); SynonymMap.Builder.join(input.split(" +"), inputCharsRef); CharsRef outputCharsRef = new CharsRef(); SynonymMap.Builder.join(output.split(" +"), outputCharsRef); b.add(inputCharsRef, outputCharsRef, keepOrig); } private void assertEquals(CharTermAttribute term, String expected) { assertEquals(expected.length(), term.length()); final char[] buffer = term.buffer(); for(int chIDX=0;chIDX<expected.length();chIDX++) { assertEquals(expected.charAt(chIDX), buffer[chIDX]); } } // For the output string: separate positions with a space, // and separate multiple tokens at each position with a // /. If a token should have end offset != the input // token's end offset then add :X to it: // TODO: we should probably refactor this guy to use/take analyzer, // the tests are a little messy private void verify(String input, String output) throws Exception { if (VERBOSE) { System.out.println("TEST: verify input=" + input + " expectedOutput=" + output); } tokensIn.setReader(new StringReader(input)); tokensOut.reset(); final String[] expected = output.split(" "); int expectedUpto = 0; while(tokensOut.incrementToken()) { if (VERBOSE) { System.out.println(" incr token=" + termAtt.toString() + " posIncr=" + posIncrAtt.getPositionIncrement() + " startOff=" + offsetAtt.startOffset() + " endOff=" + offsetAtt.endOffset()); } assertTrue(expectedUpto < expected.length); final int startOffset = offsetAtt.startOffset(); final int endOffset = offsetAtt.endOffset(); final String[] expectedAtPos = expected[expectedUpto++].split("/"); for(int atPos=0;atPos<expectedAtPos.length;atPos++) { if (atPos > 0) { assertTrue(tokensOut.incrementToken()); if (VERBOSE) { System.out.println(" incr token=" + termAtt.toString() + " posIncr=" + posIncrAtt.getPositionIncrement() + " startOff=" + offsetAtt.startOffset() + " endOff=" + offsetAtt.endOffset()); } } final int colonIndex = expectedAtPos[atPos].indexOf(':'); final int underbarIndex = expectedAtPos[atPos].indexOf('_'); final String expectedToken; final int expectedEndOffset; final int expectedPosLen; if (colonIndex != -1) { expectedToken = expectedAtPos[atPos].substring(0, colonIndex); if (underbarIndex != -1) { expectedEndOffset = Integer.parseInt(expectedAtPos[atPos].substring(1+colonIndex, underbarIndex)); expectedPosLen = Integer.parseInt(expectedAtPos[atPos].substring(1+underbarIndex)); } else { expectedEndOffset = Integer.parseInt(expectedAtPos[atPos].substring(1+colonIndex)); expectedPosLen = 1; } } else { expectedToken = expectedAtPos[atPos]; expectedEndOffset = endOffset; expectedPosLen = 1; } assertEquals(expectedToken, termAtt.toString()); assertEquals(atPos == 0 ? 1 : 0, posIncrAtt.getPositionIncrement()); // start/end offset of all tokens at same pos should // be the same: assertEquals(startOffset, offsetAtt.startOffset()); assertEquals(expectedEndOffset, offsetAtt.endOffset()); assertEquals(expectedPosLen, posLenAtt.getPositionLength()); } } tokensOut.end(); tokensOut.close(); if (VERBOSE) { System.out.println(" incr: END"); } assertEquals(expectedUpto, expected.length); } public void testDontKeepOrig() throws Exception { b = new SynonymMap.Builder(true); add("a b", "foo", false); final SynonymMap map = b.build(); final Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, false)); } }; assertAnalyzesTo(analyzer, "a b c", new String[] {"foo", "c"}, new int[] {0, 4}, new int[] {3, 5}, null, new int[] {1, 1}, new int[] {1, 1}, true); checkAnalysisConsistency(random(), analyzer, false, "a b c"); } public void testDoKeepOrig() throws Exception { b = new SynonymMap.Builder(true); add("a b", "foo", true); final SynonymMap map = b.build(); final Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, false)); } }; assertAnalyzesTo(analyzer, "a b c", new String[] {"a", "foo", "b", "c"}, new int[] {0, 0, 2, 4}, new int[] {1, 3, 3, 5}, null, new int[] {1, 0, 1, 1}, new int[] {1, 2, 1, 1}, true); checkAnalysisConsistency(random(), analyzer, false, "a b c"); } public void testBasic() throws Exception { b = new SynonymMap.Builder(true); add("a", "foo", true); add("a b", "bar fee", true); add("b c", "dog collar", true); add("c d", "dog harness holder extras", true); add("m c e", "dog barks loudly", false); add("i j k", "feep", true); add("e f", "foo bar", false); add("e f", "baz bee", false); add("z", "boo", false); add("y", "bee", true); tokensIn = new MockTokenizer(new StringReader("a"), MockTokenizer.WHITESPACE, true); tokensIn.reset(); assertTrue(tokensIn.incrementToken()); assertFalse(tokensIn.incrementToken()); tokensIn.end(); tokensIn.close(); tokensOut = new SynonymFilter(tokensIn, b.build(), true); termAtt = tokensOut.addAttribute(CharTermAttribute.class); posIncrAtt = tokensOut.addAttribute(PositionIncrementAttribute.class); posLenAtt = tokensOut.addAttribute(PositionLengthAttribute.class); offsetAtt = tokensOut.addAttribute(OffsetAttribute.class); verify("a b c", "a/bar b/fee c"); // syn output extends beyond input tokens verify("x a b c d", "x a/bar b/fee c/dog d/harness holder extras"); verify("a b a", "a/bar b/fee a/foo"); // outputs that add to one another: verify("c d c d", "c/dog d/harness c/holder/dog d/extras/harness holder extras"); // two outputs for same input verify("e f", "foo/baz bar/bee"); // verify multi-word / single-output offsets: verify("g i j k g", "g i/feep:7_3 j k g"); // mixed keepOrig true/false: verify("a m c e x", "a/foo dog barks loudly x"); verify("c d m c e x", "c/dog d/harness holder/dog extras/barks loudly x"); assertTrue(tokensOut.getCaptureCount() > 0); // no captureStates when no syns matched verify("p q r s t", "p q r s t"); assertEquals(0, tokensOut.getCaptureCount()); // no captureStates when only single-input syns, w/ no // lookahead needed, matched verify("p q z y t", "p q boo y/bee t"); assertEquals(0, tokensOut.getCaptureCount()); } private String getRandomString(char start, int alphabetSize, int length) { assert alphabetSize <= 26; char[] s = new char[2*length]; for(int charIDX=0;charIDX<length;charIDX++) { s[2*charIDX] = (char) (start + random().nextInt(alphabetSize)); s[2*charIDX+1] = ' '; } return new String(s); } private static class OneSyn { String in; List<String> out; boolean keepOrig; } public String slowSynMatcher(String doc, List<OneSyn> syns, int maxOutputLength) { assertTrue(doc.length() % 2 == 0); final int numInputs = doc.length()/2; boolean[] keepOrigs = new boolean[numInputs]; boolean[] hasMatch = new boolean[numInputs]; Arrays.fill(keepOrigs, false); String[] outputs = new String[numInputs + maxOutputLength]; OneSyn[] matches = new OneSyn[numInputs]; for(OneSyn syn : syns) { int idx = -1; while(true) { idx = doc.indexOf(syn.in, 1+idx); if (idx == -1) { break; } assertTrue(idx % 2 == 0); final int matchIDX = idx/2; assertTrue(syn.in.length() % 2 == 1); if (matches[matchIDX] == null) { matches[matchIDX] = syn; } else if (syn.in.length() > matches[matchIDX].in.length()) { // Greedy conflict resolution: longer match wins: matches[matchIDX] = syn; } else { assertTrue(syn.in.length() < matches[matchIDX].in.length()); } } } // Greedy conflict resolution: if syn matches a range of inputs, // it prevents other syns from matching that range for(int inputIDX=0;inputIDX<numInputs;inputIDX++) { final OneSyn match = matches[inputIDX]; if (match != null) { final int synInLength = (1+match.in.length())/2; for(int nextInputIDX=inputIDX+1;nextInputIDX<numInputs && nextInputIDX<(inputIDX+synInLength);nextInputIDX++) { matches[nextInputIDX] = null; } } } // Fill overlapping outputs: for(int inputIDX=0;inputIDX<numInputs;inputIDX++) { final OneSyn syn = matches[inputIDX]; if (syn == null) { continue; } for(int idx=0;idx<(1+syn.in.length())/2;idx++) { hasMatch[inputIDX+idx] = true; keepOrigs[inputIDX+idx] |= syn.keepOrig; } for(String synOut : syn.out) { final String[] synOutputs = synOut.split(" "); assertEquals(synOutputs.length, (1+synOut.length())/2); final int matchEnd = inputIDX + synOutputs.length; int synUpto = 0; for(int matchIDX=inputIDX;matchIDX<matchEnd;matchIDX++) { if (outputs[matchIDX] == null) { outputs[matchIDX] = synOutputs[synUpto++]; } else { outputs[matchIDX] = outputs[matchIDX] + "/" + synOutputs[synUpto++]; } final int endOffset; if (matchIDX < numInputs) { final int posLen; if (synOutputs.length == 1) { // Add full endOffset endOffset = (inputIDX*2) + syn.in.length(); posLen = syn.keepOrig ? (1+syn.in.length())/2 : 1; } else { // Add endOffset matching input token's endOffset = (matchIDX*2) + 1; posLen = 1; } outputs[matchIDX] = outputs[matchIDX] + ":" + endOffset + "_" + posLen; } } } } StringBuilder sb = new StringBuilder(); String[] inputTokens = doc.split(" "); final int limit = inputTokens.length + maxOutputLength; for(int inputIDX=0;inputIDX<limit;inputIDX++) { boolean posHasOutput = false; if (inputIDX >= numInputs && outputs[inputIDX] == null) { break; } if (inputIDX < numInputs && (!hasMatch[inputIDX] || keepOrigs[inputIDX])) { assertTrue(inputTokens[inputIDX].length() != 0); sb.append(inputTokens[inputIDX]); posHasOutput = true; } if (outputs[inputIDX] != null) { if (posHasOutput) { sb.append('/'); } sb.append(outputs[inputIDX]); } else if (!posHasOutput) { continue; } if (inputIDX < limit-1) { sb.append(' '); } } return sb.toString(); } public void testRandom() throws Exception { final int alphabetSize = _TestUtil.nextInt(random(), 2, 7); final int docLen = atLeast(3000); //final int docLen = 50; final String document = getRandomString('a', alphabetSize, docLen); if (VERBOSE) { System.out.println("TEST: doc=" + document); } final int numSyn = atLeast(5); //final int numSyn = 2; final Map<String,OneSyn> synMap = new HashMap<String,OneSyn>(); final List<OneSyn> syns = new ArrayList<OneSyn>(); final boolean dedup = random().nextBoolean(); if (VERBOSE) { System.out.println(" dedup=" + dedup); } b = new SynonymMap.Builder(dedup); for(int synIDX=0;synIDX<numSyn;synIDX++) { final String synIn = getRandomString('a', alphabetSize, _TestUtil.nextInt(random(), 1, 5)).trim(); OneSyn s = synMap.get(synIn); if (s == null) { s = new OneSyn(); s.in = synIn; syns.add(s); s.out = new ArrayList<String>(); synMap.put(synIn, s); s.keepOrig = random().nextBoolean(); } final String synOut = getRandomString('0', 10, _TestUtil.nextInt(random(), 1, 5)).trim(); s.out.add(synOut); add(synIn, synOut, s.keepOrig); if (VERBOSE) { System.out.println(" syns[" + synIDX + "] = " + s.in + " -> " + s.out + " keepOrig=" + s.keepOrig); } } tokensIn = new MockTokenizer(new StringReader("a"), MockTokenizer.WHITESPACE, true); tokensIn.reset(); assertTrue(tokensIn.incrementToken()); assertFalse(tokensIn.incrementToken()); tokensIn.end(); tokensIn.close(); tokensOut = new SynonymFilter(tokensIn, b.build(), true); termAtt = tokensOut.addAttribute(CharTermAttribute.class); posIncrAtt = tokensOut.addAttribute(PositionIncrementAttribute.class); posLenAtt = tokensOut.addAttribute(PositionLengthAttribute.class); offsetAtt = tokensOut.addAttribute(OffsetAttribute.class); if (dedup) { pruneDups(syns); } final String expected = slowSynMatcher(document, syns, 5); if (VERBOSE) { System.out.println("TEST: expected=" + expected); } verify(document, expected); } private void pruneDups(List<OneSyn> syns) { Set<String> seen = new HashSet<String>(); for(OneSyn syn : syns) { int idx = 0; while(idx < syn.out.size()) { String out = syn.out.get(idx); if (!seen.contains(out)) { seen.add(out); idx++; } else { syn.out.remove(idx); } } seen.clear(); } } private String randomNonEmptyString() { while(true) { final String s = _TestUtil.randomUnicodeString(random()).trim(); if (s.length() != 0 && s.indexOf('\u0000') == -1) { return s; } } } /** simple random test, doesn't verify correctness. * does verify it doesnt throw exceptions, or that the stream doesn't misbehave */ public void testRandom2() throws Exception { final int numIters = atLeast(3); for (int i = 0; i < numIters; i++) { b = new SynonymMap.Builder(random().nextBoolean()); final int numEntries = atLeast(10); for (int j = 0; j < numEntries; j++) { add(randomNonEmptyString(), randomNonEmptyString(), random().nextBoolean()); } final SynonymMap map = b.build(); final boolean ignoreCase = random().nextBoolean(); final Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, ignoreCase)); } }; checkRandomData(random(), analyzer, 100); } } // NOTE: this is an invalid test... SynFilter today can't // properly consume a graph... we can re-enable this once // we fix that... /* // Adds MockGraphTokenFilter before SynFilter: public void testRandom2GraphBefore() throws Exception { final int numIters = atLeast(10); Random random = random(); for (int i = 0; i < numIters; i++) { b = new SynonymMap.Builder(random.nextBoolean()); final int numEntries = atLeast(10); for (int j = 0; j < numEntries; j++) { add(randomNonEmptyString(), randomNonEmptyString(), random.nextBoolean()); } final SynonymMap map = b.build(); final boolean ignoreCase = random.nextBoolean(); final Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true); TokenStream graph = new MockGraphTokenFilter(random(), tokenizer); return new TokenStreamComponents(tokenizer, new SynonymFilter(graph, map, ignoreCase)); } }; checkRandomData(random, analyzer, 1000*RANDOM_MULTIPLIER); } } */ // Adds MockGraphTokenFilter after SynFilter: public void testRandom2GraphAfter() throws Exception { final int numIters = atLeast(3); Random random = random(); for (int i = 0; i < numIters; i++) { b = new SynonymMap.Builder(random.nextBoolean()); final int numEntries = atLeast(10); for (int j = 0; j < numEntries; j++) { add(randomNonEmptyString(), randomNonEmptyString(), random.nextBoolean()); } final SynonymMap map = b.build(); final boolean ignoreCase = random.nextBoolean(); final Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true); TokenStream syns = new SynonymFilter(tokenizer, map, ignoreCase); TokenStream graph = new MockGraphTokenFilter(random(), syns); return new TokenStreamComponents(tokenizer, graph); } }; checkRandomData(random, analyzer, 100); } } public void testEmptyTerm() throws IOException { Random random = random(); final int numIters = atLeast(10); for (int i = 0; i < numIters; i++) { b = new SynonymMap.Builder(random.nextBoolean()); final int numEntries = atLeast(10); for (int j = 0; j < numEntries; j++) { add(randomNonEmptyString(), randomNonEmptyString(), random.nextBoolean()); } final SynonymMap map = b.build(); final boolean ignoreCase = random.nextBoolean(); final Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new KeywordTokenizer(reader); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, ignoreCase)); } }; checkAnalysisConsistency(random, analyzer, random.nextBoolean(), ""); } } /** simple random test like testRandom2, but for larger docs */ public void testRandomHuge() throws Exception { Random random = random(); final int numIters = atLeast(3); for (int i = 0; i < numIters; i++) { b = new SynonymMap.Builder(random.nextBoolean()); final int numEntries = atLeast(10); if (VERBOSE) { System.out.println("TEST: iter=" + i + " numEntries=" + numEntries); } for (int j = 0; j < numEntries; j++) { add(randomNonEmptyString(), randomNonEmptyString(), random.nextBoolean()); } final SynonymMap map = b.build(); final boolean ignoreCase = random.nextBoolean(); final Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.SIMPLE, true); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, ignoreCase)); } }; checkRandomData(random, analyzer, 100, 1024); } } // LUCENE-3375 public void testVanishingTerms() throws Exception { String testFile = "aaa => aaaa1 aaaa2 aaaa3\n" + "bbb => bbbb1 bbbb2\n"; SolrSynonymParser parser = new SolrSynonymParser(true, true, new MockAnalyzer(random())); parser.add(new StringReader(testFile)); final SynonymMap map = parser.build(); Analyzer analyzer = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, true); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, true)); } }; // where did my pot go?! assertAnalyzesTo(analyzer, "xyzzy bbb pot of gold", new String[] { "xyzzy", "bbbb1", "pot", "bbbb2", "of", "gold" }); // this one nukes 'pot' and 'of' // xyzzy aaa pot of gold -> xyzzy aaaa1 aaaa2 aaaa3 gold assertAnalyzesTo(analyzer, "xyzzy aaa pot of gold", new String[] { "xyzzy", "aaaa1", "pot", "aaaa2", "of", "aaaa3", "gold" }); } public void testBasic2() throws Exception { b = new SynonymMap.Builder(true); final boolean keepOrig = false; add("aaa", "aaaa1 aaaa2 aaaa3", keepOrig); add("bbb", "bbbb1 bbbb2", keepOrig); tokensIn = new MockTokenizer(new StringReader("a"), MockTokenizer.WHITESPACE, true); tokensIn.reset(); assertTrue(tokensIn.incrementToken()); assertFalse(tokensIn.incrementToken()); tokensIn.end(); tokensIn.close(); tokensOut = new SynonymFilter(tokensIn, b.build(), true); termAtt = tokensOut.addAttribute(CharTermAttribute.class); posIncrAtt = tokensOut.addAttribute(PositionIncrementAttribute.class); posLenAtt = tokensOut.addAttribute(PositionLengthAttribute.class); offsetAtt = tokensOut.addAttribute(OffsetAttribute.class); if (keepOrig) { verify("xyzzy bbb pot of gold", "xyzzy bbb/bbbb1 pot/bbbb2 of gold"); verify("xyzzy aaa pot of gold", "xyzzy aaa/aaaa1 pot/aaaa2 of/aaaa3 gold"); } else { verify("xyzzy bbb pot of gold", "xyzzy bbbb1 pot/bbbb2 of gold"); verify("xyzzy aaa pot of gold", "xyzzy aaaa1 pot/aaaa2 of/aaaa3 gold"); } } public void testMatching() throws Exception { b = new SynonymMap.Builder(true); final boolean keepOrig = false; add("a b", "ab", keepOrig); add("a c", "ac", keepOrig); add("a", "aa", keepOrig); add("b", "bb", keepOrig); add("z x c v", "zxcv", keepOrig); add("x c", "xc", keepOrig); final SynonymMap map = b.build(); Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, true)); } }; checkOneTerm(a, "$", "$"); checkOneTerm(a, "a", "aa"); checkOneTerm(a, "b", "bb"); assertAnalyzesTo(a, "a $", new String[] { "aa", "$" }, new int[] { 1, 1 }); assertAnalyzesTo(a, "$ a", new String[] { "$", "aa" }, new int[] { 1, 1 }); assertAnalyzesTo(a, "a a", new String[] { "aa", "aa" }, new int[] { 1, 1 }); assertAnalyzesTo(a, "z x c v", new String[] { "zxcv" }, new int[] { 1 }); assertAnalyzesTo(a, "z x c $", new String[] { "z", "xc", "$" }, new int[] { 1, 1, 1 }); } public void testRepeatsOff() throws Exception { b = new SynonymMap.Builder(true); final boolean keepOrig = false; add("a b", "ab", keepOrig); add("a b", "ab", keepOrig); add("a b", "ab", keepOrig); final SynonymMap map = b.build(); Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, true)); } }; assertAnalyzesTo(a, "a b", new String[] { "ab" }, new int[] { 1 }); } public void testRepeatsOn() throws Exception { b = new SynonymMap.Builder(false); final boolean keepOrig = false; add("a b", "ab", keepOrig); add("a b", "ab", keepOrig); add("a b", "ab", keepOrig); final SynonymMap map = b.build(); Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, true)); } }; assertAnalyzesTo(a, "a b", new String[] { "ab", "ab", "ab" }, new int[] { 1, 0, 0 }); } public void testRecursion() throws Exception { b = new SynonymMap.Builder(true); final boolean keepOrig = false; add("zoo", "zoo", keepOrig); final SynonymMap map = b.build(); Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, true)); } }; assertAnalyzesTo(a, "zoo zoo $ zoo", new String[] { "zoo", "zoo", "$", "zoo" }, new int[] { 1, 1, 1, 1 }); } public void testRecursion2() throws Exception { b = new SynonymMap.Builder(true); final boolean keepOrig = false; add("zoo", "zoo", keepOrig); add("zoo", "zoo zoo", keepOrig); final SynonymMap map = b.build(); Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, true)); } }; // verify("zoo zoo $ zoo", "zoo/zoo zoo/zoo/zoo $/zoo zoo/zoo zoo"); assertAnalyzesTo(a, "zoo zoo $ zoo", new String[] { "zoo", "zoo", "zoo", "zoo", "zoo", "$", "zoo", "zoo", "zoo", "zoo" }, new int[] { 1, 0, 1, 0, 0, 1, 0, 1, 0, 1 }); } public void testOutputHangsOffEnd() throws Exception { b = new SynonymMap.Builder(true); final boolean keepOrig = false; // b hangs off the end (no input token under it): add("a", "a b", keepOrig); tokensIn = new MockTokenizer(new StringReader("a"), MockTokenizer.WHITESPACE, true); tokensIn.reset(); assertTrue(tokensIn.incrementToken()); assertFalse(tokensIn.incrementToken()); tokensIn.end(); tokensIn.close(); tokensOut = new SynonymFilter(tokensIn, b.build(), true); termAtt = tokensOut.addAttribute(CharTermAttribute.class); posIncrAtt = tokensOut.addAttribute(PositionIncrementAttribute.class); offsetAtt = tokensOut.addAttribute(OffsetAttribute.class); posLenAtt = tokensOut.addAttribute(PositionLengthAttribute.class); // Make sure endOffset inherits from previous input token: verify("a", "a b:1"); } public void testIncludeOrig() throws Exception { b = new SynonymMap.Builder(true); final boolean keepOrig = true; add("a b", "ab", keepOrig); add("a c", "ac", keepOrig); add("a", "aa", keepOrig); add("b", "bb", keepOrig); add("z x c v", "zxcv", keepOrig); add("x c", "xc", keepOrig); final SynonymMap map = b.build(); Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, true)); } }; assertAnalyzesTo(a, "$", new String[] { "$" }, new int[] { 1 }); assertAnalyzesTo(a, "a", new String[] { "a", "aa" }, new int[] { 1, 0 }); assertAnalyzesTo(a, "a", new String[] { "a", "aa" }, new int[] { 1, 0 }); assertAnalyzesTo(a, "$ a", new String[] { "$", "a", "aa" }, new int[] { 1, 1, 0 }); assertAnalyzesTo(a, "a $", new String[] { "a", "aa", "$" }, new int[] { 1, 0, 1 }); assertAnalyzesTo(a, "$ a !", new String[] { "$", "a", "aa", "!" }, new int[] { 1, 1, 0, 1 }); assertAnalyzesTo(a, "a a", new String[] { "a", "aa", "a", "aa" }, new int[] { 1, 0, 1, 0 }); assertAnalyzesTo(a, "b", new String[] { "b", "bb" }, new int[] { 1, 0 }); assertAnalyzesTo(a, "z x c v", new String[] { "z", "zxcv", "x", "c", "v" }, new int[] { 1, 0, 1, 1, 1 }); assertAnalyzesTo(a, "z x c $", new String[] { "z", "x", "xc", "c", "$" }, new int[] { 1, 1, 0, 1, 1 }); } public void testRecursion3() throws Exception { b = new SynonymMap.Builder(true); final boolean keepOrig = true; add("zoo zoo", "zoo", keepOrig); final SynonymMap map = b.build(); Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, true)); } }; assertAnalyzesTo(a, "zoo zoo $ zoo", new String[] { "zoo", "zoo", "zoo", "$", "zoo" }, new int[] { 1, 0, 1, 1, 1 }); } public void testRecursion4() throws Exception { b = new SynonymMap.Builder(true); final boolean keepOrig = true; add("zoo zoo", "zoo", keepOrig); add("zoo", "zoo zoo", keepOrig); final SynonymMap map = b.build(); Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, true)); } }; assertAnalyzesTo(a, "zoo zoo $ zoo", new String[] { "zoo", "zoo", "zoo", "$", "zoo", "zoo", "zoo" }, new int[] { 1, 0, 1, 1, 1, 0, 1 }); } public void testMultiwordOffsets() throws Exception { b = new SynonymMap.Builder(true); final boolean keepOrig = true; add("national hockey league", "nhl", keepOrig); final SynonymMap map = b.build(); Analyzer a = new Analyzer() { @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); return new TokenStreamComponents(tokenizer, new SynonymFilter(tokenizer, map, true)); } }; assertAnalyzesTo(a, "national hockey league", new String[] { "national", "nhl", "hockey", "league" }, new int[] { 0, 0, 9, 16 }, new int[] { 8, 22, 15, 22 }, new int[] { 1, 0, 1, 1 }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.io.orc; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertNotNull; import static junit.framework.Assert.assertNull; import java.io.File; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; import org.apache.hive.common.util.HiveTestUtils; import org.apache.orc.BinaryColumnStatistics; import org.apache.orc.BooleanColumnStatistics; import org.apache.orc.ColumnStatistics; import org.apache.orc.DoubleColumnStatistics; import org.apache.orc.IntegerColumnStatistics; import org.apache.orc.StringColumnStatistics; import org.apache.orc.StripeInformation; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import com.google.common.collect.Lists; public class TestOrcSerDeStats { public static class ListStruct { List<String> list1; public ListStruct(List<String> l1) { this.list1 = l1; } } public static class MapStruct { Map<String, Double> map1; public MapStruct(Map<String, Double> m1) { this.map1 = m1; } } public static class SimpleStruct { BytesWritable bytes1; Text string1; SimpleStruct(BytesWritable b1, String s1) { this.bytes1 = b1; if (s1 == null) { this.string1 = null; } else { this.string1 = new Text(s1); } } } public static class InnerStruct { int int1; Text string1 = new Text(); InnerStruct(int int1, String string1) { this.int1 = int1; this.string1.set(string1); } } public static class MiddleStruct { List<InnerStruct> list = new ArrayList<InnerStruct>(); MiddleStruct(InnerStruct... items) { list.clear(); list.addAll(Arrays.asList(items)); } } public static class BigRow { Boolean boolean1; Byte byte1; Short short1; Integer int1; Long long1; Float float1; Double double1; BytesWritable bytes1; Text string1; List<InnerStruct> list = new ArrayList<InnerStruct>(); Map<Text, InnerStruct> map = new HashMap<Text, InnerStruct>(); Timestamp ts; HiveDecimal decimal1; MiddleStruct middle; BigRow(Boolean b1, Byte b2, Short s1, Integer i1, Long l1, Float f1, Double d1, BytesWritable b3, String s2, MiddleStruct m1, List<InnerStruct> l2, Map<Text, InnerStruct> m2, Timestamp ts1, HiveDecimal dec1) { this.boolean1 = b1; this.byte1 = b2; this.short1 = s1; this.int1 = i1; this.long1 = l1; this.float1 = f1; this.double1 = d1; this.bytes1 = b3; if (s2 == null) { this.string1 = null; } else { this.string1 = new Text(s2); } this.middle = m1; this.list = l2; this.map = m2; this.ts = ts1; this.decimal1 = dec1; } } private static InnerStruct inner(int i, String s) { return new InnerStruct(i, s); } private static Map<Text, InnerStruct> map(InnerStruct... items) { Map<Text, InnerStruct> result = new HashMap<Text, InnerStruct>(); for (InnerStruct i : items) { result.put(new Text(i.string1), i); } return result; } private static List<InnerStruct> list(InnerStruct... items) { List<InnerStruct> result = new ArrayList<InnerStruct>(); result.addAll(Arrays.asList(items)); return result; } private static BytesWritable bytes(int... items) { BytesWritable result = new BytesWritable(); result.setSize(items.length); for (int i = 0; i < items.length; ++i) { result.getBytes()[i] = (byte) items[i]; } return result; } Path workDir = new Path(System.getProperty("test.tmp.dir", "target" + File.separator + "test" + File.separator + "tmp")); Configuration conf; FileSystem fs; Path testFilePath; @Rule public TestName testCaseName = new TestName(); @Before public void openFileSystem() throws Exception { conf = new Configuration(); fs = FileSystem.getLocal(conf); testFilePath = new Path(workDir, "TestOrcSerDeStats." + testCaseName.getMethodName() + ".orc"); fs.delete(testFilePath, false); } @Test public void testStringAndBinaryStatistics() throws Exception { ObjectInspector inspector; synchronized (TestOrcSerDeStats.class) { inspector = ObjectInspectorFactory.getReflectionObjectInspector (SimpleStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); } Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf) .inspector(inspector) .stripeSize(100000) .bufferSize(10000)); writer.addRow(new SimpleStruct(bytes(0, 1, 2, 3, 4), "foo")); writer.addRow(new SimpleStruct(bytes(0, 1, 2, 3), "bar")); writer.addRow(new SimpleStruct(bytes(0, 1, 2, 3, 4, 5), null)); writer.addRow(new SimpleStruct(null, "hi")); writer.close(); assertEquals(4, writer.getNumberOfRows()); assertEquals(273, writer.getRawDataSize()); Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs)); assertEquals(4, reader.getNumberOfRows()); assertEquals(289, reader.getRawDataSize()); assertEquals(15, reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1"))); assertEquals(258, reader.getRawDataSizeOfColumns(Lists.newArrayList("string1"))); assertEquals(273, reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1", "string1"))); // check the stats ColumnStatistics[] stats = reader.getStatistics(); assertEquals(4, stats[0].getNumberOfValues()); assertEquals("count: 4 hasNull: false", stats[0].toString()); assertEquals(3, stats[1].getNumberOfValues()); assertEquals(15, ((BinaryColumnStatistics) stats[1]).getSum()); assertEquals("count: 3 hasNull: true bytesOnDisk: 28 sum: 15", stats[1].toString()); assertEquals(3, stats[2].getNumberOfValues()); assertEquals("bar", ((StringColumnStatistics) stats[2]).getMinimum()); assertEquals("hi", ((StringColumnStatistics) stats[2]).getMaximum()); assertEquals(8, ((StringColumnStatistics) stats[2]).getSum()); assertEquals("count: 3 hasNull: true bytesOnDisk: 22 min: bar max: hi sum: 8", stats[2].toString()); // check the inspectors StructObjectInspector readerInspector = (StructObjectInspector) reader.getObjectInspector(); assertEquals(ObjectInspector.Category.STRUCT, readerInspector.getCategory()); assertEquals("struct<bytes1:binary,string1:string>", readerInspector.getTypeName()); List<? extends StructField> fields = readerInspector.getAllStructFieldRefs(); BinaryObjectInspector bi = (BinaryObjectInspector) readerInspector. getStructFieldRef("bytes1").getFieldObjectInspector(); StringObjectInspector st = (StringObjectInspector) readerInspector. getStructFieldRef("string1").getFieldObjectInspector(); RecordReader rows = reader.rows(); Object row = rows.next(null); assertNotNull(row); // check the contents of the first row assertEquals(bytes(0, 1, 2, 3, 4), bi.getPrimitiveWritableObject( readerInspector.getStructFieldData(row, fields.get(0)))); assertEquals("foo", st.getPrimitiveJavaObject(readerInspector. getStructFieldData(row, fields.get(1)))); // check the contents of second row assertEquals(true, rows.hasNext()); row = rows.next(row); assertEquals(bytes(0, 1, 2, 3), bi.getPrimitiveWritableObject( readerInspector.getStructFieldData(row, fields.get(0)))); assertEquals("bar", st.getPrimitiveJavaObject(readerInspector. getStructFieldData(row, fields.get(1)))); // check the contents of second row assertEquals(true, rows.hasNext()); row = rows.next(row); assertEquals(bytes(0, 1, 2, 3, 4, 5), bi.getPrimitiveWritableObject( readerInspector.getStructFieldData(row, fields.get(0)))); assertNull(st.getPrimitiveJavaObject(readerInspector. getStructFieldData(row, fields.get(1)))); // check the contents of second row assertEquals(true, rows.hasNext()); row = rows.next(row); assertNull(bi.getPrimitiveWritableObject( readerInspector.getStructFieldData(row, fields.get(0)))); assertEquals("hi", st.getPrimitiveJavaObject(readerInspector. getStructFieldData(row, fields.get(1)))); // handle the close up assertEquals(false, rows.hasNext()); rows.close(); } @Test public void testOrcSerDeStatsList() throws Exception { ObjectInspector inspector; synchronized (TestOrcSerDeStats.class) { inspector = ObjectInspectorFactory.getReflectionObjectInspector (ListStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); } Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf) .inspector(inspector) .stripeSize(10000) .bufferSize(10000)); for (int row = 0; row < 5000; row++) { List<String> test = new ArrayList<String>(); for (int i = 0; i < 1000; i++) { test.add("hi"); } writer.addRow(new ListStruct(test)); } writer.close(); assertEquals(5000, writer.getNumberOfRows()); assertEquals(430000000, writer.getRawDataSize()); Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs)); // stats from reader assertEquals(5000, reader.getNumberOfRows()); assertEquals(430040000, reader.getRawDataSize()); assertEquals(430020000, reader.getRawDataSizeOfColumns(Lists.newArrayList("list1"))); } @Test public void testOrcSerDeStatsMap() throws Exception { ObjectInspector inspector; synchronized (TestOrcSerDeStats.class) { inspector = ObjectInspectorFactory.getReflectionObjectInspector (MapStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); } Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf) .inspector(inspector) .stripeSize(10000) .bufferSize(10000)); for (int row = 0; row < 1000; row++) { Map<String, Double> test = new HashMap<String, Double>(); for (int i = 0; i < 10; i++) { test.put("hi" + i, 2.0); } writer.addRow(new MapStruct(test)); } writer.close(); // stats from writer assertEquals(1000, writer.getNumberOfRows()); assertEquals(950000, writer.getRawDataSize()); Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs)); // stats from reader assertEquals(1000, reader.getNumberOfRows()); assertEquals(958000, reader.getRawDataSize()); assertEquals(954000, reader.getRawDataSizeOfColumns(Lists.newArrayList("map1"))); } @Test public void testOrcSerDeStatsSimpleWithNulls() throws Exception { ObjectInspector inspector; synchronized (TestOrcSerDeStats.class) { inspector = ObjectInspectorFactory.getReflectionObjectInspector (SimpleStruct.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); } Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf) .inspector(inspector) .stripeSize(10000) .bufferSize(10000)); for (int row = 0; row < 1000; row++) { if (row % 2 == 0) { writer.addRow(new SimpleStruct(new BytesWritable(new byte[] {1, 2, 3}), "hi")); } else { writer.addRow(null); } } writer.close(); // stats from writer assertEquals(1000, writer.getNumberOfRows()); assertEquals(44500, writer.getRawDataSize()); Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs)); // stats from reader assertEquals(1000, reader.getNumberOfRows()); assertEquals(48500, reader.getRawDataSize()); assertEquals(1500, reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1"))); assertEquals(43000, reader.getRawDataSizeOfColumns(Lists.newArrayList("string1"))); assertEquals(44500, reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1", "string1"))); } @Test public void testOrcSerDeStatsComplex() throws Exception { ObjectInspector inspector; synchronized (TestOrcSerDeStats.class) { inspector = ObjectInspectorFactory.getReflectionObjectInspector (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); } Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf) .inspector(inspector) .stripeSize(100000) .bufferSize(10000)); // 1 + 2 + 4 + 8 + 4 + 8 + 5 + 2 + 4 + 3 + 4 + 4 + 4 + 4 + 4 + 3 = 64 writer.addRow(new BigRow(false, (byte) 1, (short) 1024, 65536, Long.MAX_VALUE, (float) 1.0, -15.0, bytes(0, 1, 2, 3, 4), "hi", new MiddleStruct(inner(1, "bye"), inner(2, "sigh")), list(inner(3, "good"), inner(4, "bad")), map(), Timestamp.valueOf("2000-03-12 15:00:00"), HiveDecimal.create( "12345678.6547456"))); // 1 + 2 + 4 + 8 + 4 + 8 + 3 + 4 + 3 + 4 + 4 + 4 + 3 + 4 + 2 + 4 + 3 + 5 + 4 + 5 + 7 + 4 + 7 = // 97 writer.addRow(new BigRow(true, (byte) 100, (short) 2048, 65536, Long.MAX_VALUE, (float) 2.0, -5.0, bytes(), "bye", new MiddleStruct(inner(1, "bye"), inner(2, "sigh")), list(inner(100000000, "cat"), inner(-100000, "in"), inner(1234, "hat")), map(inner(5, "chani"), inner(1, "mauddib")), Timestamp.valueOf("2000-03-11 15:00:00"), HiveDecimal.create("12345678.6547452"))); writer.close(); long rowCount = writer.getNumberOfRows(); long rawDataSize = writer.getRawDataSize(); assertEquals(2, rowCount); assertEquals(1668, rawDataSize); Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs)); assertEquals(2, reader.getNumberOfRows()); assertEquals(1760, reader.getRawDataSize()); assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1"))); assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("byte1"))); assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("short1"))); assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("int1"))); assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("long1"))); assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("float1"))); assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("double1"))); assertEquals(5, reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1"))); assertEquals(172, reader.getRawDataSizeOfColumns(Lists.newArrayList("string1"))); assertEquals(483, reader.getRawDataSizeOfColumns(Lists.newArrayList("list"))); assertEquals(384, reader.getRawDataSizeOfColumns(Lists.newArrayList("map"))); assertEquals(396, reader.getRawDataSizeOfColumns(Lists.newArrayList("middle"))); assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts"))); assertEquals(224, reader.getRawDataSizeOfColumns(Lists.newArrayList("decimal1"))); assertEquals(24, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1"))); assertEquals(1271, reader.getRawDataSizeOfColumns(Lists.newArrayList("middle", "list", "map", "float1"))); assertEquals(185, reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1", "byte1", "string1"))); assertEquals(1752, reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1", "byte1", "short1", "int1", "long1", "float1", "double1", "bytes1", "string1", "list", "map", "middle", "ts", "decimal1"))); // check the stats ColumnStatistics[] stats = reader.getStatistics(); assertEquals(2, stats[1].getNumberOfValues()); assertEquals(1, ((BooleanColumnStatistics) stats[1]).getFalseCount()); assertEquals(1, ((BooleanColumnStatistics) stats[1]).getTrueCount()); assertEquals("count: 2 hasNull: false bytesOnDisk: 5 true: 1", stats[1].toString()); assertEquals(2048, ((IntegerColumnStatistics) stats[3]).getMaximum()); assertEquals(1024, ((IntegerColumnStatistics) stats[3]).getMinimum()); assertEquals(true, ((IntegerColumnStatistics) stats[3]).isSumDefined()); assertEquals(3072, ((IntegerColumnStatistics) stats[3]).getSum()); assertEquals("count: 2 hasNull: false bytesOnDisk: 9 min: 1024 max: 2048 sum: 3072", stats[3].toString()); assertEquals(Long.MAX_VALUE, ((IntegerColumnStatistics) stats[5]).getMaximum()); assertEquals(Long.MAX_VALUE, ((IntegerColumnStatistics) stats[5]).getMinimum()); assertEquals(false, ((IntegerColumnStatistics) stats[5]).isSumDefined()); assertEquals("count: 2 hasNull: false bytesOnDisk: 12 min: 9223372036854775807 max: 9223372036854775807", stats[5].toString()); assertEquals(-15.0, ((DoubleColumnStatistics) stats[7]).getMinimum()); assertEquals(-5.0, ((DoubleColumnStatistics) stats[7]).getMaximum()); assertEquals(-20.0, ((DoubleColumnStatistics) stats[7]).getSum(), 0.00001); assertEquals("count: 2 hasNull: false bytesOnDisk: 15 min: -15.0 max: -5.0 sum: -20.0", stats[7].toString()); assertEquals("count: 2 hasNull: false bytesOnDisk: 14 min: bye max: hi sum: 5", stats[9].toString()); } @Test public void testOrcSerDeStatsComplexOldFormat() throws Exception { ObjectInspector inspector; synchronized (TestOrcSerDeStats.class) { inspector = ObjectInspectorFactory.getReflectionObjectInspector (BigRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); } Writer writer = OrcFile.createWriter(testFilePath, OrcFile.writerOptions(conf) .inspector(inspector) .stripeSize(100000) .version(OrcFile.Version.V_0_11) .bufferSize(10000)); // 1 + 2 + 4 + 8 + 4 + 8 + 5 + 2 + 4 + 3 + 4 + 4 + 4 + 4 + 4 + 3 = 64 writer.addRow(new BigRow(false, (byte) 1, (short) 1024, 65536, Long.MAX_VALUE, (float) 1.0, -15.0, bytes(0, 1, 2, 3, 4), "hi", new MiddleStruct(inner(1, "bye"), inner(2, "sigh")), list(inner(3, "good"), inner(4, "bad")), map(), Timestamp.valueOf("2000-03-12 15:00:00"), HiveDecimal.create( "12345678.6547456"))); // 1 + 2 + 4 + 8 + 4 + 8 + 3 + 4 + 3 + 4 + 4 + 4 + 3 + 4 + 2 + 4 + 3 + 5 + 4 + 5 + 7 + 4 + 7 = // 97 writer.addRow(new BigRow(true, (byte) 100, (short) 2048, 65536, Long.MAX_VALUE, (float) 2.0, -5.0, bytes(), "bye", new MiddleStruct(inner(1, "bye"), inner(2, "sigh")), list(inner(100000000, "cat"), inner(-100000, "in"), inner(1234, "hat")), map(inner(5, "chani"), inner(1, "mauddib")), Timestamp.valueOf("2000-03-11 15:00:00"), HiveDecimal.create("12345678.6547452"))); writer.close(); long rowCount = writer.getNumberOfRows(); long rawDataSize = writer.getRawDataSize(); assertEquals(2, rowCount); assertEquals(1668, rawDataSize); Reader reader = OrcFile.createReader(testFilePath, OrcFile.readerOptions(conf).filesystem(fs)); assertEquals(2, reader.getNumberOfRows()); assertEquals(1760, reader.getRawDataSize()); assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1"))); assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("byte1"))); assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("short1"))); assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("int1"))); assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("long1"))); assertEquals(8, reader.getRawDataSizeOfColumns(Lists.newArrayList("float1"))); assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("double1"))); assertEquals(5, reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1"))); assertEquals(172, reader.getRawDataSizeOfColumns(Lists.newArrayList("string1"))); assertEquals(483, reader.getRawDataSizeOfColumns(Lists.newArrayList("list"))); assertEquals(384, reader.getRawDataSizeOfColumns(Lists.newArrayList("map"))); assertEquals(396, reader.getRawDataSizeOfColumns(Lists.newArrayList("middle"))); assertEquals(16, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts"))); assertEquals(224, reader.getRawDataSizeOfColumns(Lists.newArrayList("decimal1"))); assertEquals(24, reader.getRawDataSizeOfColumns(Lists.newArrayList("ts", "int1"))); assertEquals(1271, reader.getRawDataSizeOfColumns(Lists.newArrayList("middle", "list", "map", "float1"))); assertEquals(185, reader.getRawDataSizeOfColumns(Lists.newArrayList("bytes1", "byte1", "string1"))); assertEquals(1752, reader.getRawDataSizeOfColumns(Lists.newArrayList("boolean1", "byte1", "short1", "int1", "long1", "float1", "double1", "bytes1", "string1", "list", "map", "middle", "ts", "decimal1"))); // check the stats ColumnStatistics[] stats = reader.getStatistics(); assertEquals(2, stats[1].getNumberOfValues()); assertEquals(1, ((BooleanColumnStatistics) stats[1]).getFalseCount()); assertEquals(1, ((BooleanColumnStatistics) stats[1]).getTrueCount()); assertEquals("count: 2 hasNull: false bytesOnDisk: 5 true: 1", stats[1].toString()); assertEquals(2048, ((IntegerColumnStatistics) stats[3]).getMaximum()); assertEquals(1024, ((IntegerColumnStatistics) stats[3]).getMinimum()); assertEquals(true, ((IntegerColumnStatistics) stats[3]).isSumDefined()); assertEquals(3072, ((IntegerColumnStatistics) stats[3]).getSum()); assertEquals("count: 2 hasNull: false bytesOnDisk: 8 min: 1024 max: 2048 sum: 3072", stats[3].toString()); assertEquals(Long.MAX_VALUE, ((IntegerColumnStatistics) stats[5]).getMaximum()); assertEquals(Long.MAX_VALUE, ((IntegerColumnStatistics) stats[5]).getMinimum()); assertEquals(false, ((IntegerColumnStatistics) stats[5]).isSumDefined()); assertEquals("count: 2 hasNull: false bytesOnDisk: 12 min: 9223372036854775807 max: 9223372036854775807", stats[5].toString()); assertEquals(-15.0, ((DoubleColumnStatistics) stats[7]).getMinimum()); assertEquals(-5.0, ((DoubleColumnStatistics) stats[7]).getMaximum()); assertEquals(-20.0, ((DoubleColumnStatistics) stats[7]).getSum(), 0.00001); assertEquals("count: 2 hasNull: false bytesOnDisk: 15 min: -15.0 max: -5.0 sum: -20.0", stats[7].toString()); assertEquals(5, ((BinaryColumnStatistics) stats[8]).getSum()); assertEquals("count: 2 hasNull: false bytesOnDisk: 14 sum: 5", stats[8].toString()); assertEquals("bye", ((StringColumnStatistics) stats[9]).getMinimum()); assertEquals("hi", ((StringColumnStatistics) stats[9]).getMaximum()); assertEquals(5, ((StringColumnStatistics) stats[9]).getSum()); assertEquals("count: 2 hasNull: false bytesOnDisk: 20 min: bye max: hi sum: 5", stats[9].toString()); } @Test(expected = ClassCastException.class) public void testSerdeStatsOldFormat() throws Exception { Path oldFilePath = new Path(HiveTestUtils.getFileFromClasspath("orc-file-11-format.orc")); Reader reader = OrcFile.createReader(oldFilePath, OrcFile.readerOptions(conf).filesystem(fs)); int stripeCount = 0; int rowCount = 0; long currentOffset = -1; for (StripeInformation stripe : reader.getStripes()) { stripeCount += 1; rowCount += stripe.getNumberOfRows(); if (currentOffset < 0) { currentOffset = stripe.getOffset() + stripe.getIndexLength() + stripe.getDataLength() + stripe.getFooterLength(); } else { assertEquals(currentOffset, stripe.getOffset()); currentOffset += stripe.getIndexLength() + stripe.getDataLength() + stripe.getFooterLength(); } } assertEquals(reader.getNumberOfRows(), rowCount); assertEquals(6615000, reader.getRawDataSize()); assertEquals(2, stripeCount); // check the stats ColumnStatistics[] stats = reader.getStatistics(); assertEquals(7500, stats[1].getNumberOfValues()); assertEquals(3750, ((BooleanColumnStatistics) stats[1]).getFalseCount()); assertEquals(3750, ((BooleanColumnStatistics) stats[1]).getTrueCount()); assertEquals("count: 7500 hasNull: true true: 3750", stats[1].toString()); assertEquals(2048, ((IntegerColumnStatistics) stats[3]).getMaximum()); assertEquals(1024, ((IntegerColumnStatistics) stats[3]).getMinimum()); assertEquals(true, ((IntegerColumnStatistics) stats[3]).isSumDefined()); assertEquals(11520000, ((IntegerColumnStatistics) stats[3]).getSum()); assertEquals("count: 7500 hasNull: true min: 1024 max: 2048 sum: 11520000", stats[3].toString()); assertEquals(Long.MAX_VALUE, ((IntegerColumnStatistics) stats[5]).getMaximum()); assertEquals(Long.MAX_VALUE, ((IntegerColumnStatistics) stats[5]).getMinimum()); assertEquals(false, ((IntegerColumnStatistics) stats[5]).isSumDefined()); assertEquals( "count: 7500 hasNull: true min: 9223372036854775807 max: 9223372036854775807", stats[5].toString()); assertEquals(-15.0, ((DoubleColumnStatistics) stats[7]).getMinimum()); assertEquals(-5.0, ((DoubleColumnStatistics) stats[7]).getMaximum()); assertEquals(-75000.0, ((DoubleColumnStatistics) stats[7]).getSum(), 0.00001); assertEquals("count: 7500 hasNull: true min: -15.0 max: -5.0 sum: -75000.0", stats[7].toString()); assertEquals("bye", ((StringColumnStatistics) stats[9]).getMinimum()); assertEquals("hi", ((StringColumnStatistics) stats[9]).getMaximum()); assertEquals(0, ((StringColumnStatistics) stats[9]).getSum()); assertEquals("count: 7500 hasNull: true min: bye max: hi sum: 0", stats[9].toString()); // old orc format will not have binary statistics. toString() will show only // the general column statistics assertEquals("count: 7500 hasNull: true", stats[8].toString()); // since old orc format doesn't support binary statistics, // this should throw ClassCastException assertEquals(5, ((BinaryColumnStatistics) stats[8]).getSum()); } }
package ch.tkuhn.memetools; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.supercsv.io.CsvListWriter; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; public class CalculateTopHistory { @Parameter(description = "chronologically-sorted-input-file", required = true) private List<String> parameters = new ArrayList<String>(); private File inputFile; @Parameter(names = "-w", description = "Window size", required = true) private int windowSize; @Parameter(names = "-s", description = "Step size", required = true) private int stepSize; private int stepsPerWindow; @Parameter(names = "-d", description = "Set n parameter") private int delta = 1; @Parameter(names = "-o", description = "Output file") private File outputFile; @Parameter(names = "-v", description = "Write detailed log") private boolean verbose = false; private File logFile; public static final void main(String[] args) { CalculateTopHistory obj = new CalculateTopHistory(); JCommander jc = new JCommander(obj); try { jc.parse(args); } catch (ParameterException ex) { jc.usage(); System.exit(1); } if (obj.parameters.size() != 1) { System.err.println("ERROR: Exactly one main argument is needed"); jc.usage(); System.exit(1); } if (obj.stepSize <= 0 || obj.windowSize <= obj.stepSize) { System.err.println("ERROR: Step size has to be positive and smaller than window size"); jc.usage(); System.exit(1); } if (obj.windowSize % obj.stepSize != 0) { System.err.println("ERROR: Window size must be a multiple of step size"); jc.usage(); System.exit(1); } obj.inputFile = new File(obj.parameters.get(0)); obj.run(); } private MemeScorer[] ms; private List<String> topMemes; private List<Double> secondScoreTimeline; private List<Double> firstScoreTimeline; private List<String> topMemeTimeline; public void run() { init(); try { extractTerms(); processData(); ms = null; // free for garbage collection writeOutput(); } catch (Throwable th) { log(th); System.exit(1); } log("Finished"); } private void init() { logFile = new File(MemeUtils.getLogDir(), getOutputFileName() + ".log"); log("=========="); if (outputFile == null) { outputFile = new File(MemeUtils.getOutputDataDir(), getOutputFileName() + ".csv"); } stepsPerWindow = windowSize / stepSize; ms = new MemeScorer[stepsPerWindow]; ms[0] = new MemeScorer(MemeScorer.DECOMPOSED_SCREEN_MODE); for (int i = 1 ; i < stepsPerWindow ; i++) { ms[i] = new MemeScorer(ms[0], MemeScorer.DECOMPOSED_SCREEN_MODE); } topMemes = new ArrayList<String>(); secondScoreTimeline = new ArrayList<Double>(); firstScoreTimeline = new ArrayList<Double>(); topMemeTimeline = new ArrayList<String>(); } private void extractTerms() throws IOException { log("Extract terms from " + inputFile + " ..."); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); int entryCount = 0; String inputLine; while ((inputLine = reader.readLine()) != null) { logProgress(entryCount); DataEntry d = new DataEntry(inputLine); ms[0].screenTerms(d); entryCount++; } log("Number of terms extracted: " + ms[0].getTerms().size()); reader.close(); } private void processData() throws IOException { log("Processing data from " + inputFile + " ..."); BufferedReader reader = new BufferedReader(new FileReader(inputFile)); int inputEntryCount = 0; int outputEntryCount = 0; int bin = 0; String inputLine; while ((inputLine = reader.readLine()) != null) { logProgress(inputEntryCount); DataEntry d = new DataEntry(inputLine); ms[bin].recordTerms(d); inputEntryCount++; bin = (inputEntryCount % windowSize) / stepSize; if (inputEntryCount % stepSize == 0) { // Current bin is full if (inputEntryCount >= windowSize) { // All bins are full makeOutputEntry(outputEntryCount); outputEntryCount++; } logDetail("Start new bin " + bin + " (at entry " + inputEntryCount + ")"); ms[bin].clear(); } } log(outputEntryCount + " output entries created"); log((inputEntryCount % stepSize) + " final input entries ignored"); reader.close(); } private void makeOutputEntry(int count) { String firstTerm = null; double firstScore = -1.0; double secondScore = -1.0; for (String term : ms[0].getTerms().keySet()) { int mmVal = 0, mVal = 0, xmVal = 0, xVal = 0, fVal = 0; for (MemeScorer m : ms) { mmVal += m.getMM(term); mVal += m.getM(term); xmVal += m.getXM(term); xVal += m.getX(term); fVal += m.getF(term); } double score = MemeScorer.calculateMemeScoreValues(mmVal, mVal, xmVal, xVal, fVal, delta)[3]; if (score > firstScore) { secondScore = firstScore; firstScore = score; firstTerm = term; } else if (score > secondScore) { secondScore = score; } } if (!topMemes.contains(firstTerm)) { topMemes.add(firstTerm); } topMemeTimeline.add(firstTerm); firstScoreTimeline.add(firstScore); secondScoreTimeline.add(secondScore); } private void writeOutput() throws IOException { log("Writing result to " + outputFile + " ..."); BufferedWriter w = new BufferedWriter(new FileWriter(outputFile)); CsvListWriter csvWriter = new CsvListWriter(w, MemeUtils.getCsvPreference()); List<String> line = new ArrayList<String>(); line.add("SECOND"); for (String term : topMemes) line.add(term); csvWriter.write(line); for (int i = 0 ; i < topMemeTimeline.size() ; i++) { line = new ArrayList<String>(); line.add(secondScoreTimeline.get(i) + ""); for (String term : topMemes) { if (term.equals(topMemeTimeline.get(i))) { line.add((firstScoreTimeline.get(i) - secondScoreTimeline.get(i)) + ""); } else { line.add("0"); } } csvWriter.write(line); } csvWriter.close(); } private String getOutputFileName() { String basename = inputFile.getName().replaceAll("\\..*$", ""); basename = basename.replace("-chronologic", ""); String filename = "hi-topms-" + basename + "-d" + delta + "-w" + windowSize + "-s" + stepSize; return filename; } private void logProgress(int p) { if (p % 100000 == 0) log(p + "..."); } private void log(Object obj) { MemeUtils.log(logFile, obj); } private void logDetail(Object obj) { if (verbose) log(obj); } }
/* * JBoss, Home of Professional Open Source * Copyright 2011 Red Hat Inc. and/or its affiliates and other contributors * as indicated by the @authors tag. All rights reserved. * See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.arquillian.config.impl.extension; import java.util.HashMap; import java.util.Map; import org.jboss.arquillian.config.descriptor.api.ArquillianDescriptor; import org.jboss.arquillian.core.api.Injector; import org.jboss.arquillian.core.api.Instance; import org.jboss.arquillian.core.api.annotation.Inject; import org.jboss.arquillian.core.api.event.ManagerStarted; import org.jboss.arquillian.core.test.AbstractManagerTestBase; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** * ConfigurationRegistrarTestCase * * @author <a href="mailto:aslak@redhat.com">Aslak Knutsen</a> * @version $Revision: $ */ public class ConfigurationRegistrarTestCase extends AbstractManagerTestBase { @Inject private Instance<Injector> injectorInst; @Inject private Instance<ArquillianDescriptor> descInst; private ConfigurationRegistrar registrar; static void validate(String property, String value, AssertCallback callback) { try { System.setProperty(property, value); callback.validate(); } finally { System.clearProperty(property); } } @Before public void injectConfigurationRegistrar() { registrar = injectorInst.get().inject(new ConfigurationRegistrar()); } @Test public void shouldBeAbleToLoadEmptyDefaultConfiguration() throws Exception { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(0, desc.getContainers().size()); Assert.assertEquals(0, desc.getGroups().size()); Assert.assertEquals(0, desc.getExtensions().size()); } @Test public void shouldBeAbleToLoadConfiguredXMLFileResource() throws Exception { validate( ConfigurationRegistrar.ARQUILLIAN_XML_PROPERTY, "registrar_tests/named_arquillian.xml", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(1, desc.getContainers().size()); Assert.assertEquals("A", desc.getContainers().get(0).getContainerName()); // verify mode = class, override test will set it to suite Assert.assertEquals("class", desc.getContainers().get(0).getMode()); } }); } @Test public void shouldBeAbleToLoadConfiguredXMLClasspathResource() throws Exception { validate( ConfigurationRegistrar.ARQUILLIAN_XML_PROPERTY, "registrar_tests/named_arquillian.xml", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(1, desc.getContainers().size()); Assert.assertEquals("A", desc.getContainers().get(0).getContainerName()); // verify mode = class, override test will set it to suite Assert.assertEquals("class", desc.getContainers().get(0).getMode()); } }); } @Test(expected = IllegalArgumentException.class) public void shouldThrowExceptionOnMissingConfiguredXMLResource() throws Exception { validate( ConfigurationRegistrar.ARQUILLIAN_XML_PROPERTY, "registrar_tests/named_arquillian_SHOULD_NOT_BE_FOUND_.xml", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); } }); } @Test public void shouldBeAbleToLoadConfiguredPropertiesFileResource() throws Exception { validate( ConfigurationRegistrar.ARQUILLIAN_PROP_PROPERTY, "registrar_tests/named_arquillian.properties", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(1, desc.getContainers().size()); Assert.assertEquals("B", desc.getContainers().get(0).getContainerName()); } }); } @Test public void shouldBeAbleToLoadConfiguredPropertiesClasspathResource() throws Exception { validate( ConfigurationRegistrar.ARQUILLIAN_PROP_PROPERTY, "registrar_tests/named_arquillian.properties", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(1, desc.getContainers().size()); Assert.assertEquals("B", desc.getContainers().get(0).getContainerName()); Assert.assertEquals("manual", desc.getContainers().get(0).getMode()); } }); } @Test(expected = IllegalArgumentException.class) public void shouldThrowExceptionOnMissingConfiguredPropertiesResource() throws Exception { validate( ConfigurationRegistrar.ARQUILLIAN_PROP_PROPERTY, "registrar_tests/named_arquillian_SHOULD_NOT_BE_FOUND_.properties", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); } }); } @Test public void shouldBeAbleToAddSystemProperties() throws Exception { validate( "arq.container.C.mode", "manual", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(1, desc.getContainers().size()); Assert.assertEquals("C", desc.getContainers().get(0).getContainerName()); Assert.assertEquals("manual", desc.getContainers().get(0).getMode()); } }); } @Test public void shouldBeAbleToOverrideWithSystemProperties() throws Exception { validate( ConfigurationRegistrar.ARQUILLIAN_XML_PROPERTY, "registrar_tests/named_arquillian.xml", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate( "arq.container.A.mode", "suite", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(1, desc.getContainers().size()); Assert.assertEquals("A", desc.getContainers().get(0).getContainerName()); Assert.assertEquals("suite", desc.getContainers().get(0).getMode()); } }); } }); } @Test public void shouldBeAbleToAddToXMLWithProperties() throws Exception { validate( ConfigurationRegistrar.ARQUILLIAN_XML_PROPERTY, "registrar_tests/named_arquillian.xml", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate( ConfigurationRegistrar.ARQUILLIAN_PROP_PROPERTY, "registrar_tests/named_arquillian.properties", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(2, desc.getContainers().size()); Assert.assertEquals("A", desc.getContainers().get(0).getContainerName()); Assert.assertEquals("B", desc.getContainers().get(1).getContainerName()); } }); } }); } @Test public void shouldBeAbleToOverrideToXMLWithProperties() throws Exception { validate( ConfigurationRegistrar.ARQUILLIAN_XML_PROPERTY, "registrar_tests/named_arquillian.xml", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate( ConfigurationRegistrar.ARQUILLIAN_PROP_PROPERTY, "registrar_tests/override_named_arquillian.properties", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(1, desc.getContainers().size()); Assert.assertEquals("A", desc.getContainers().get(0).getContainerName()); Assert.assertEquals("suite", desc.getContainers().get(0).getMode()); } }); } }); } @Test public void shouldToOverrideToPropertiesWithSystemEnvironment() throws Exception { validate( ConfigurationRegistrar.ARQUILLIAN_XML_PROPERTY, "registrar_tests/named_arquillian.xml", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate( ConfigurationRegistrar.ARQUILLIAN_PROP_PROPERTY, "registrar_tests/override_named_arquillian.properties", new AssertCallback() { @Override public void validate() { Map<String, String> envVars = new HashMap<String, String>(); envVars.put("arq.container.A.mode", "none"); registrar.setEnvironmentVariables(envVars); registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(1, desc.getContainers().size()); Assert.assertEquals("A", desc.getContainers().get(0).getContainerName()); Assert.assertEquals("none", desc.getContainers().get(0).getMode()); } }); } }); } @Test public void shouldBeAbleToOverrideDefaultProtocolTXMLWithPlaceholderReplace() throws Exception { validate("env.ENV1", "env1", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate("env.ENV3", "env3", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate( ConfigurationRegistrar.ARQUILLIAN_XML_PROPERTY, "registrar_tests/property_arquillian.xml", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate( ConfigurationRegistrar.ARQUILLIAN_PROP_PROPERTY, "registrar_tests/property_arquillian.properties", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertNotNull(desc.getDefaultProtocol()); Assert.assertEquals("X BBB X", desc.getDefaultProtocol().getProperty("bbb")); Assert.assertEquals("X X", desc.getDefaultProtocol().getProperty("bbb2")); } }); } }); } }); } }); } @Test public void shouldBeAbleToOverrideContainersToXMLWithPlaceholderReplace() throws Exception { validate("env.ENV1", "env1", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate("env.ENV3", "env3", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate( ConfigurationRegistrar.ARQUILLIAN_XML_PROPERTY, "registrar_tests/property_arquillian.xml", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate( ConfigurationRegistrar.ARQUILLIAN_PROP_PROPERTY, "registrar_tests/property_arquillian.properties", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(1, desc.getContainers().size()); Assert.assertNotNull(desc.getContainers().get(0)); Assert.assertNotNull("Y AAA Y", desc.getContainers().get(0).getContainerProperty("aaa")); Assert.assertNotNull("Y Y", desc.getContainers().get(0).getContainerProperty("aaa2")); } }); } }); } }); } }); } @Test public void shouldBeAbleToOverrideExtensionsToXMLWithPlaceholderReplace() throws Exception { validate("env.ENV1", "env1", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate("env.ENV3", "env3", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate( ConfigurationRegistrar.ARQUILLIAN_XML_PROPERTY, "registrar_tests/property_arquillian.xml", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate( ConfigurationRegistrar.ARQUILLIAN_PROP_PROPERTY, "registrar_tests/property_arquillian.properties", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(1, desc.getExtensions().size()); Assert.assertNotNull(desc.getExtensions().get(0)); Assert.assertNotNull("Z DDD Z", desc.getExtensions().get(0).getExtensionProperty("ddd")); Assert.assertNotNull("Z Z", desc.getExtensions().get(0).getExtensionProperty("ddd2")); } }); } }); } }); } }); } @Test public void shouldBeAbleToOverrideGroupsToXMLWithPlaceholderReplace() throws Exception { validate("env.ENV1", "env1", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate("env.ENV3", "env3", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate( ConfigurationRegistrar.ARQUILLIAN_XML_PROPERTY, "registrar_tests/property_arquillian.xml", new AssertCallback() { @Override public void validate() { ConfigurationRegistrarTestCase.validate( ConfigurationRegistrar.ARQUILLIAN_PROP_PROPERTY, "registrar_tests/property_arquillian.properties", new AssertCallback() { @Override public void validate() { registrar.loadConfiguration(new ManagerStarted()); ArquillianDescriptor desc = descInst.get(); Assert.assertEquals(2, desc.getGroups().size()); Assert.assertNotNull(desc.getGroups().get(0)); Assert.assertEquals(1, desc.getGroups().get(0).getGroupContainers().size()); Assert.assertEquals("T EEE T", desc.getGroups().get(0).getGroupContainers().get(0).getContainerProperty("eee")); Assert.assertEquals("T T", desc.getGroups().get(0).getGroupContainers().get(0).getContainerProperty("eee2")); Assert.assertEquals(1, desc.getGroups().get(0).getGroupContainers().get(0).getProtocols().size()); Assert.assertEquals("R FFF R", desc.getGroups().get(0).getGroupContainers().get(0).getProtocols().get(0).getProtocolProperty("fff")); Assert.assertEquals("R R", desc.getGroups().get(0).getGroupContainers().get(0).getProtocols().get(0).getProtocolProperty("fff2")); Assert.assertEquals("env1", desc.getGroups().get(0).getGroupContainers().get(0).getProtocols().get(0).getProtocolProperty("ggg1")); Assert.assertEquals("${env.ENV2}", desc.getGroups().get(0).getGroupContainers().get(0).getProtocols().get(0).getProtocolProperty("ggg2")); Assert.assertEquals("G env1 G", desc.getGroups().get(0).getGroupContainers().get(0).getProtocols().get(0).getProtocolProperty("ggg3")); Assert.assertEquals("env3 HHH", desc.getGroups().get(0).getGroupContainers().get(0).getProtocols().get(0).getProtocolProperty("hhh1")); Assert.assertEquals("${env.ENV4} HHH", desc.getGroups().get(0).getGroupContainers().get(0).getProtocols().get(0).getProtocolProperty("hhh2")); Assert.assertEquals("H env1 HHH H", desc.getGroups().get(0).getGroupContainers().get(0).getProtocols().get(0).getProtocolProperty("hhh3")); Assert.assertEquals("env1 ${env.ENV2}", desc.getGroups().get(0).getGroupContainers().get(0).getProtocols().get(0).getProtocolProperty("hhh4")); Assert.assertNotNull(desc.getGroups().get(1)); Assert.assertEquals(1, desc.getGroups().get(1).getGroupContainers().size()); Assert.assertEquals("WITHOUT ", desc.getGroups().get(1).getGroupContainers().get(0).getProtocols().get(0).getProtocolProperty("foo")); } }); } }); } }); } }); } public interface AssertCallback { void validate(); } }
/* * RDV * Real-time Data Viewer * http://rdv.googlecode.com/ * * Copyright (c) 2005-2007 University at Buffalo * Copyright (c) 2005-2007 NEES Cyberinfrastructure Center * Copyright (c) 2008 Palta Software * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * $URL$ * $Revision$ * $Date$ * $Author$ */ package org.rdv.action; import java.awt.event.ActionEvent; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.swing.JOptionPane; import org.rdv.data.Channel; import org.rdv.data.DataFileReader; import org.rdv.data.NumericDataSample; import org.rdv.rbnb.RBNBController; import org.rdv.rbnb.RBNBException; import org.rdv.rbnb.RBNBSource; import org.rdv.ui.UIUtilities; import org.rdv.ui.ProgressWindow; import java.net.URLDecoder; import java.io.UnsupportedEncodingException; /** * An action to import data files. * * @author Jason P. Hanley * */ public class DataImportAction extends DataViewerAction { /** serialization version identifier */ private static final long serialVersionUID = 4075250858285498451L; /** the window to show the progress of the import */ private ProgressWindow progressWindow; /** the number of samples to collect before sending the data to the server */ private static int SAMPLES_PER_FLUSH = 50; public DataImportAction() { super("Import data file", "Import local data to RBNB server"); } /** constructor with generic parameters */ public DataImportAction(String text, String desc) { super(text, desc); } /** * Prompts the user for the data file and uploads the data to the RBNB server. */ public void actionPerformed(ActionEvent ae) { importData(); } /** * Prompts the user for the data file and uploads the data to the RBNB server. */ public void importData() { File dataFile = UIUtilities.getFile("Import"); if (dataFile == null) { return; } if (!dataFile.exists()) { return; } if (!dataFile.isFile()) { return; } importData(dataFile); } /** * Uploads the data to the RBNB server. * * @param dataFile the data file */ public void importData(File dataFile) { importData(dataFile, null); } /** * Uploads the data to the RBNB server and optionally deletes the data file * after import. * * @param dataFile the data file * @param deleteDataFile if true delete the data file after import */ public void importData(File dataFile, boolean deleteDataFile) { importData(dataFile, null, deleteDataFile); } /** * Uploads the data to the RBNB server using the given source name. * * @param dataFile the data file * @param sourceName the source name */ public void importData(File dataFile, String sourceName) { importData(dataFile, sourceName, false); } /** * Uploads the data to the RBNB server using the given source name and * optionally deletes the data file after import. * * @param dataFile the data file * @param sourceName the source name * @param deleteDataFile if true delete the data file after import */ public void importData(File dataFile, String sourceName, boolean deleteDataFile) { URL dataFileURL = null; try { dataFileURL = dataFile.toURI().toURL(); } catch (MalformedURLException e) {} importData(dataFileURL, sourceName, deleteDataFile); } /** * Uploads the data to the RBNB server. * * @param dataFile the data file URL */ public void importData(URL dataFile) { importData(dataFile, null); } /** * Uploads the data to the RBNB server using the given source name. * * @param dataFile the data file URL * @param sourceName the source name */ public void importData(URL dataFile, String sourceName) { importData(dataFile, sourceName, false); } /** * Uploads the data to the RBNB server using the given source name and * optionally deletes the data file after import. * * @param dataFile the data file URL * @param sourceName the source name * @param deleteDataFile if true delete the data file after import */ private void importData(URL dataFile, String sourceName, boolean deleteDataFile) { if (sourceName == null || sourceName.length() == 0) { sourceName = getDefaultSourceName(dataFile); } importData(Collections.singletonList(dataFile), Collections.singletonList(sourceName), deleteDataFile); } /** * Uploads the data to the RBNB server. * * @param dataFiles the data files */ public void importData(List<URL> dataFiles) { List<String> sourceNames = new ArrayList<String>(); for (URL dataFile : dataFiles) { sourceNames.add(getDefaultSourceName(dataFile)); } importData(dataFiles, sourceNames, false); } /** * Uploads the data to the RBNB using the given source names. This will happen * in a separate thread. The data files will optionally be deleted after the * import is complete (file URL's only). * * @param dataFiles the data files * @param sourceNames the source names * @param deleteDataFiles if true delete the data files after import */ public void importData(final List<URL> dataFiles, final List<String> sourceNames, final boolean deleteDataFiles) { progressWindow = new ProgressWindow(UIUtilities.getMainFrame(), "Importing data..."); progressWindow.setVisible(true); new Thread() { public void run() { boolean error = false; try { importDataThread(dataFiles, sourceNames); } catch (Exception e) { error = true; e.printStackTrace(); } if (deleteDataFiles) { for (URL dataFile : dataFiles) { if (dataFile.getProtocol().equals("file")) { try { File file = new File(dataFile.toURI()); file.delete(); } catch (URISyntaxException e) { e.printStackTrace(); } } } } progressWindow.dispose(); RBNBController.getInstance().updateMetadata(); if (!error) { JOptionPane.showMessageDialog(UIUtilities.getMainFrame(), "Import complete.", "Import complete", JOptionPane.INFORMATION_MESSAGE); } else { JOptionPane.showMessageDialog(UIUtilities.getMainFrame(), "There was an error importing the data file.", "Import failed", JOptionPane.ERROR_MESSAGE); } } }.start(); } /** * The thread that does the importing. * * @param dataFiles the list of data files to import * @param sourceNames the names of the sources for the data files * @throws IOException if there is an error reading the data files * @throws RBNBException if there is an error importing the data */ private void importDataThread(List<URL> dataFiles, List<String> sourceNames) throws IOException, RBNBException { for (int i=0; i<dataFiles.size(); i++) { URL dataFile = dataFiles.get(i); String sourceName = sourceNames.get(i); progressWindow.setStatus("Importing data file " + getFileName(dataFile)); float minProgress = (float)(i)/dataFiles.size(); float maxProgress = (float)(i+1)/dataFiles.size(); DataFileReader reader = new DataFileReader(dataFile); if (reader.getProperty("samples") == null) { throw new IOException("Unable to determine the number of data samples."); } int samples = Integer.parseInt(reader.getProperty("samples")); int archiveSize = (int)Math.ceil((double)samples / SAMPLES_PER_FLUSH); RBNBController rbnb = RBNBController.getInstance(); RBNBSource source = new RBNBSource(sourceName, archiveSize, rbnb.getRBNBHostName(), rbnb.getRBNBPortNumber()); List<Channel> channels = reader.getChannels(); for (Channel channel : channels) { source.addChannel(channel.getName(), "application/octet-stream", channel.getUnit()); } int currentSample = 0; NumericDataSample sample; while ((sample = reader.readSample()) != null) { double timestamp = sample.getTimestamp(); Number[] values = sample.getValues(); for (int j=0; j<values.length; j++) { if (values[j] == null) { continue; } source.putData(channels.get(j).getName(), timestamp, values[j].doubleValue()); } currentSample++; if (currentSample % 50 == 0) { source.flush(); } if (samples != -1) { progressWindow.setProgress(minProgress + maxProgress * currentSample/samples); } } source.flush(); source.close(); progressWindow.setProgress(maxProgress); } } /** * Gets the name of the file from the URL. * * @param file the file URL * @return the name of the file */ private static String getFileName(URL file) { String fileName = file.getPath(); int lastPathIndex = fileName.lastIndexOf('/'); if (fileName.length() > lastPathIndex+1) { fileName = fileName.substring(lastPathIndex+1); } // fix for possible non-ascii characters encoded in file name try { fileName = URLDecoder.decode(fileName, "UTF-8"); } catch (UnsupportedEncodingException ue) { } return fileName; } /** * Gets the default name of the source for the given the file. This will be * the name of the file without the extension. * * @param dataFile the data file * @return the source name */ private static String getDefaultSourceName(URL dataFile) { String sourceName = getFileName(dataFile); int dotIndex = sourceName.lastIndexOf('.'); if (dotIndex != -1) { sourceName = sourceName.substring(0, dotIndex); } return sourceName; } }
/* * MIT License * * Copyright 2017-2018 Sabre GLBL Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.sabre.oss.conf4j.jaxb.converter; import com.sabre.oss.conf4j.converter.TypeConverter; import com.sabre.oss.conf4j.jaxb.converter.JaxbConverter.JaxbPool.Handle; import org.xml.sax.SAXException; import javax.xml.XMLConstants; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchema; import javax.xml.validation.Schema; import javax.xml.validation.SchemaFactory; import java.io.ByteArrayInputStream; import java.io.StringWriter; import java.lang.ref.WeakReference; import java.lang.reflect.AnnotatedElement; import java.lang.reflect.Type; import java.net.URL; import java.util.ArrayDeque; import java.util.Deque; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import static java.lang.Thread.currentThread; import static java.util.Objects.requireNonNull; import static javax.xml.bind.Marshaller.JAXB_FORMATTED_OUTPUT; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.Validate.notNull; /** * Type converter which supports converts object to/from xml. The object class must be properly annotated * with JAXB annotations. Only classes which has {@link XmlRootElement} applied are supported. * * @see XmlRootElement */ public class JaxbConverter<T> implements TypeConverter<T> { // Since there is only one instance of this object created, we want to be sure it is capable of handling all kinds of // Jaxb objects used in conf4j classes; thus there will be a separate JaxbPool for each distinct Jaxb class. private final ConcurrentMap<Class<T>, JaxbPool> jaxbPoolMap = new ConcurrentHashMap<>(); @Override public boolean isApplicable(Type type, Map<String, String> attributes) { requireNonNull(type, "type cannot be null"); return type instanceof Class && ((AnnotatedElement) type).getAnnotation(XmlRootElement.class) != null; } @Override public T fromString(Type type, String value, Map<String, String> attributes) { requireNonNull(type, "type cannot be null"); if (isBlank(value)) { return null; } @SuppressWarnings("unchecked") Class<T> targetClazz = (Class<T>) type; JaxbPool jaxbPool = getJaxbPool(targetClazz); try (Handle<Unmarshaller> handle = jaxbPool.borrowUnmarshaller()) { return targetClazz.cast(handle.get().unmarshal(new ByteArrayInputStream(value.getBytes()))); } catch (JAXBException e) { throw new IllegalArgumentException("Unable to convert xml to " + targetClazz + " using jaxb", e); } } @Override public String toString(Type type, T value, Map<String, String> attributes) { requireNonNull(type, "type cannot be null"); if (value == null) { return null; } @SuppressWarnings("unchecked") JaxbPool jaxbPool = getJaxbPool((Class<T>) type); try (Handle<Marshaller> handle = jaxbPool.borrowMarshaller()) { Marshaller marshaller = handle.get(); StringWriter stringWriter = new StringWriter(); marshaller.marshal(value, stringWriter); return stringWriter.toString(); } catch (JAXBException e) { throw new IllegalArgumentException("Unable to convert " + value.getClass().getName() + " to xml using jaxb", e); } } private JaxbPool getJaxbPool(Class<T> clazz) { return jaxbPoolMap.computeIfAbsent(clazz, JaxbPool::new); } /** * This class represents single JaxbPool, which is responsible for handling single Jaxb object */ static final class JaxbPool { interface Handle<T> extends AutoCloseable { T get(); @Override void close(); } private final JAXBContext context; private final Schema schema; private final Pool<Marshaller> marshallers = new Pool<>(new MarshallerSupplier()); private final Pool<Unmarshaller> unmarshallers = new Pool<>(new UnmarshallerSupplier()); JaxbPool(Class<?> clazz) { try { this.context = JAXBContext.newInstance(clazz); this.schema = readXsdSchema(clazz); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } } /** * Returns XSD schema by annotation of given class or null when schemat doesn not exist within the class * * @param clazz class to read schema * @return XSD schema or null */ Schema readXsdSchema(Class<?> clazz) throws SAXException { XmlSchema xmlSchema = clazz.getPackage().getAnnotation(XmlSchema.class); String schemaLocation = xmlSchema != null ? xmlSchema.location() : null; if (schemaLocation == null) { return null; } SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); return schemaFactory.newSchema(getResource(schemaLocation)); } public JAXBContext getContext() { return context; } public Handle<Marshaller> borrowMarshaller() { return new HandleImpl<>(marshallers); } public Handle<Unmarshaller> borrowUnmarshaller() { return new HandleImpl<>(unmarshallers); } private static final class HandleImpl<E> implements Handle<E> { private final Pool<E> pool; private final AtomicReference<E> element = new AtomicReference<>(); private HandleImpl(Pool<E> pool) { this.pool = pool; } @Override public E get() { E e = pool.borrow(); element.set(e); return e; } @Override public void close() { E e = element.getAndSet(null); if (e != null) { pool.release(e); } } } private static final class Pool<E> { private final Supplier<E> supplier; private final Deque<WeakReference<E>> elements = new ArrayDeque<>(); private Pool(Supplier<E> supplier) { this.supplier = notNull(supplier, "supplier cannot be null"); } public E borrow() { E element = null; synchronized (elements) { while (!elements.isEmpty()) { WeakReference<E> reference = elements.removeLast(); element = reference.get(); if (element != null) { break; } } } if (element == null) { element = supplier.get(); } return element; } public void release(E element) { notNull(element, "element cannot be null"); synchronized (elements) { elements.addLast(new WeakReference<>(element)); } } } private final class MarshallerSupplier implements Supplier<Marshaller> { @Override public Marshaller get() { try { Marshaller marshaller = context.createMarshaller(); marshaller.setProperty(JAXB_FORMATTED_OUTPUT, Boolean.TRUE); marshaller.setSchema(schema); return marshaller; } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } } } private final class UnmarshallerSupplier implements Supplier<Unmarshaller> { @Override public Unmarshaller get() { try { Unmarshaller unmarshaller = context.createUnmarshaller(); unmarshaller.setSchema(schema); return unmarshaller; } catch (JAXBException e) { throw new RuntimeException(e); } } } private static URL getResource(String resourceName) { ClassLoader loader = defaultIfNull(currentThread().getContextClassLoader(), JaxbPool.class.getClassLoader()); URL url = loader.getResource(resourceName); if (url == null) { throw new IllegalArgumentException("resource " + resourceName + " not found."); } return url; } } }
package org.jetbrains.plugins.ipnb.editor.panels.code; import com.intellij.icons.AllIcons; import com.intellij.openapi.actionSystem.DefaultActionGroup; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.VerticalFlowLayout; import com.intellij.openapi.ui.popup.ListPopup; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.ui.KeyStrokeAdapter; import com.intellij.ui.OnePixelSplitter; import com.intellij.ui.awt.RelativePoint; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.ipnb.configuration.IpnbConnectionManager; import org.jetbrains.plugins.ipnb.editor.IpnbEditorUtil; import org.jetbrains.plugins.ipnb.editor.IpnbFileEditor; import org.jetbrains.plugins.ipnb.editor.actions.IpnbHideOutputAction; import org.jetbrains.plugins.ipnb.editor.panels.IpnbEditablePanel; import org.jetbrains.plugins.ipnb.editor.panels.IpnbFilePanel; import org.jetbrains.plugins.ipnb.format.cells.IpnbCodeCell; import org.jetbrains.plugins.ipnb.format.cells.output.*; import javax.swing.*; import java.awt.*; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.Arrays; import java.util.Map; public class IpnbCodePanel extends IpnbEditablePanel<JComponent, IpnbCodeCell> { private final Project myProject; @NotNull private final IpnbFileEditor myParent; private final static String COLLAPSED_METADATA = "collapsed"; private IpnbCodeSourcePanel myCodeSourcePanel; private HideableOutputPanel myHideableOutputPanel; private boolean mySelectNext; private JComponent myLastAddedPanel; public IpnbCodePanel(@NotNull final Project project, @NotNull final IpnbFileEditor parent, @NotNull final IpnbCodeCell cell) { super(cell, new BorderLayout()); myProject = project; myParent = parent; myViewPanel = createViewPanel(); add(myViewPanel); addRightClickMenu(); addKeyListener(new KeyStrokeAdapter() { @Override public void keyPressed(KeyEvent event) { myParent.getIpnbFilePanel().processKeyPressed(event); } }); setBorder(BorderFactory.createLineBorder(IpnbEditorUtil.getBackground())); } @NotNull public IpnbFileEditor getFileEditor() { return myParent; } public Editor getEditor() { return myCodeSourcePanel.getEditor(); } public void addPromptPanel(@NotNull final JComponent parent, Integer promptNumber, @NotNull final IpnbEditorUtil.PromptType promptType, @NotNull final JComponent component) { super.addPromptPanel(parent, promptNumber, promptType, component); myLastAddedPanel = component; } @Override protected JComponent createViewPanel() { final JPanel panel = new JPanel(new VerticalFlowLayout(VerticalFlowLayout.TOP)); panel.setBackground(IpnbEditorUtil.getBackground()); panel.add(createCodeComponent()); myHideableOutputPanel = new HideableOutputPanel(); panel.add(myHideableOutputPanel); return panel; } @Override protected void addRightClickMenu() { myHideableOutputPanel.addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { if (SwingUtilities.isRightMouseButton(e) && e.getClickCount() == 1) { final ListPopup menu = createPopupMenu(new DefaultActionGroup(new IpnbHideOutputAction(IpnbCodePanel.this))); menu.show(RelativePoint.fromScreen(e.getLocationOnScreen())); } } }); } public void dispose() { removeAll(); myLastAddedPanel = null; myCodeSourcePanel.dispose(); myCodeSourcePanel = null; myHideableOutputPanel.removeAll(); myHideableOutputPanel = null; myViewPanel.removeAll(); myViewPanel = null; } class HideableOutputPanel extends OnePixelSplitter { final JPanel myToggleBar; final JPanel myOutputComponent; public HideableOutputPanel() { super(true); myToggleBar = createToggleBar(this); myOutputComponent = createOutputPanel(); final Map<String, Object> metadata = myCell.getMetadata(); if (metadata.containsKey(COLLAPSED_METADATA)) { final boolean isCollapsed = (Boolean)metadata.get(COLLAPSED_METADATA); if (isCollapsed && !myCell.getCellOutputs().isEmpty()) { setFirstComponent(myToggleBar); return; } } setSecondComponent(myOutputComponent); } public void hideOutputPanel() { setOutputStateInCell(true); setFirstComponent(myToggleBar); setSecondComponent(null); } } @NotNull private JPanel createCodeComponent() { myCodeSourcePanel = new IpnbCodeSourcePanel(myProject, this, myCell); final JPanel panel = new JPanel(new GridBagLayout()); panel.setBackground(IpnbEditorUtil.getBackground()); addPromptPanel(panel, myCell.getPromptNumber(), IpnbEditorUtil.PromptType.In, myCodeSourcePanel); final JPanel topComponent = new JPanel(new BorderLayout()); topComponent.add(panel, BorderLayout.PAGE_START); return topComponent; } @NotNull private JPanel createOutputPanel() { final JPanel outputPanel = new JPanel(new VerticalFlowLayout(VerticalFlowLayout.TOP, true, false)); outputPanel.setBackground(IpnbEditorUtil.getBackground()); for (IpnbOutputCell outputCell : myCell.getCellOutputs()) { addOutputPanel(outputPanel, outputCell, outputCell instanceof IpnbOutOutputCell); } return outputPanel; } public void hideOutputPanel() { myHideableOutputPanel.hideOutputPanel(); } @NotNull private MouseAdapter createShowOutputListener(final OnePixelSplitter splitter, final JPanel secondPanel, JLabel label) { return new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { if (SwingUtilities.isLeftMouseButton(e) && e.getClickCount() == 1) { showOutputPanel(); } } @Override public void mouseEntered(MouseEvent e) { updateBackground(UIUtil.getListSelectionBackground()); } @Override public void mouseExited(MouseEvent e) { updateBackground(IpnbEditorUtil.getBackground()); } private void updateBackground(Color background) { secondPanel.setBackground(background); label.setBackground(background); } private void showOutputPanel() { setOutputStateInCell(false); updateBackground(IpnbEditorUtil.getBackground()); splitter.setFirstComponent(null); final JPanel outputPanel = createOutputPanel(); splitter.setSecondComponent(outputPanel); } }; } private void setOutputStateInCell(boolean isCollapsed) { final Map<String, Object> metadata = myCell.getMetadata(); metadata.put("collapsed", isCollapsed); } private JPanel createToggleBar(OnePixelSplitter splitter) { final JPanel panel = new JPanel(new BorderLayout()); final JLabel label = new JLabel(AllIcons.Actions.Down); panel.setBackground(IpnbEditorUtil.getBackground()); label.setBackground(IpnbEditorUtil.getBackground()); panel.add(label, BorderLayout.CENTER); panel.addMouseListener(createShowOutputListener(splitter, panel, label)); return panel; } private void addOutputPanel(@NotNull final JComponent mainPanel, @NotNull final IpnbOutputCell outputCell, boolean addPrompt) { final IpnbEditorUtil.PromptType promptType = addPrompt ? IpnbEditorUtil.PromptType.Out : IpnbEditorUtil.PromptType.None; final JPanel panel = new JPanel(new GridBagLayout()); panel.setBackground(IpnbEditorUtil.getBackground()); if (outputCell instanceof IpnbImageOutputCell) { addPromptPanel(panel, myCell.getPromptNumber(), promptType, new IpnbImagePanel((IpnbImageOutputCell)outputCell, this)); } else if (outputCell instanceof IpnbHtmlOutputCell) { addPromptPanel(panel, myCell.getPromptNumber(), promptType, new IpnbHtmlPanel((IpnbHtmlOutputCell)outputCell, myParent.getIpnbFilePanel(), this)); } else if (outputCell instanceof IpnbLatexOutputCell) { addPromptPanel(panel, myCell.getPromptNumber(), promptType, new IpnbLatexPanel((IpnbLatexOutputCell)outputCell, myParent.getIpnbFilePanel(), this)); } else if (outputCell instanceof IpnbErrorOutputCell) { addPromptPanel(panel, myCell.getPromptNumber(), promptType, new IpnbErrorPanel((IpnbErrorOutputCell)outputCell, this)); } else if (outputCell instanceof IpnbStreamOutputCell) { if (myLastAddedPanel instanceof IpnbStreamPanel) { ((IpnbStreamPanel)myLastAddedPanel).addOutput(outputCell); return; } else { addPromptPanel(panel, myCell.getPromptNumber(), IpnbEditorUtil.PromptType.None, new IpnbStreamPanel((IpnbStreamOutputCell)outputCell, this)); } } else if (outputCell.getSourceAsString() != null) { addPromptPanel(panel, myCell.getPromptNumber(), promptType, new IpnbCodeOutputPanel<>(outputCell, myParent.getIpnbFilePanel(), this)); } mainPanel.add(panel); } @Override public void switchToEditing() { setEditing(true); IdeFocusManager.findInstance().requestFocus(myCodeSourcePanel.getEditor().getContentComponent(), true); } @Override public void runCell(boolean selectNext) { mySelectNext = selectNext; updateCellSource(); updatePrompt(); final IpnbConnectionManager connectionManager = IpnbConnectionManager.getInstance(myProject); connectionManager.executeCell(this); setEditing(false); } @Override public boolean isModified() { return true; } @Override public void updateCellSource() { final Document document = myCodeSourcePanel.getEditor().getDocument(); final String text = document.getText(); myCell.setSource(Arrays.asList(StringUtil.splitByLinesKeepSeparators(text))); } public void updatePrompt() { final Application application = ApplicationManager.getApplication(); application.invokeAndWait(() -> { myCell.setPromptNumber(-1); final String promptText = IpnbEditorUtil.prompt(myCell.getPromptNumber(), IpnbEditorUtil.PromptType.In); myPromptLabel.setText(promptText); }, ModalityState.stateForComponent(this)); } public void finishExecution() { final Application application = ApplicationManager.getApplication(); application.invokeAndWait(() -> { final String promptText = IpnbEditorUtil.prompt(myCell.getPromptNumber(), IpnbEditorUtil.PromptType.In); myPromptLabel.setText(promptText); final IpnbFilePanel filePanel = myParent.getIpnbFilePanel(); setEditing(false); IdeFocusManager.findInstance().requestFocus(filePanel, true); if (mySelectNext) { filePanel.selectNext(this, true); } if (myOnFinish != null) { myOnFinish.run(); myOnFinish = null; } }, ModalityState.stateForComponent(this)); } public void updatePanel(@Nullable final String replacementContent, @Nullable final IpnbOutputCell outputContent) { final Application application = ApplicationManager.getApplication(); application.invokeAndWait(() -> { if (replacementContent == null && outputContent == null) { myCell.removeCellOutputs(); myViewPanel.removeAll(); application.runReadAction(() -> { final JComponent panel = createViewPanel(); myViewPanel.add(panel); String prompt = IpnbEditorUtil.prompt(-1, IpnbEditorUtil.PromptType.In); myPromptLabel.setText(prompt); }); } if (replacementContent != null) { myCell.setSource(Arrays.asList(StringUtil.splitByLinesKeepSeparators(replacementContent))); String prompt = IpnbEditorUtil.prompt(null, IpnbEditorUtil.PromptType.In); myCell.setPromptNumber(null); myPromptLabel.setText(prompt); application.runWriteAction(() -> myCodeSourcePanel.getEditor().getDocument().setText(replacementContent)); } if (outputContent != null) { myCell.addCellOutput(outputContent); final JComponent component = myHideableOutputPanel.getSecondComponent(); if (component != null) { addOutputPanel(component, outputContent, outputContent instanceof IpnbOutOutputCell); } } }, ModalityState.stateForComponent(this)); } @Override public void updateCellView() { myViewPanel.removeAll(); final JComponent panel = createViewPanel(); myViewPanel.add(panel); } @Override public int getCaretPosition() { return myCodeSourcePanel.getEditor().getCaretModel().getOffset(); } @Nullable @Override public String getText(int from, int to) { return myCodeSourcePanel.getEditor().getDocument().getText(new TextRange(from, to)); } @Override public String getText(int from) { return getText(from, myCodeSourcePanel.getEditor().getDocument().getTextLength()); } @SuppressWarnings({"CloneDoesntCallSuperClone", "CloneDoesntDeclareCloneNotSupportedException"}) @Override protected Object clone() { return new IpnbCodePanel(myProject, myParent, (IpnbCodeCell)myCell.clone()); } }
package org.broadinstitute.hellbender.engine.filters; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.SAMReadGroupRecord; import org.broadinstitute.hellbender.exceptions.UserException; import org.broadinstitute.hellbender.utils.read.ArtificialReadUtils; import org.broadinstitute.hellbender.utils.read.GATKRead; import org.broadinstitute.hellbender.utils.read.ReadUtils; import org.broadinstitute.hellbender.utils.test.BaseTest; import org.testng.Assert; import org.testng.annotations.Test; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; public final class ReadGroupBlackListReadFilterUnitTest extends BaseTest { private static final int CHR_COUNT = 1; private static final int CHR_START = 1; private static final int CHR_SIZE = 1000; private static final int GROUP_COUNT = 5; private SAMFileHeader header= ArtificialReadUtils.createArtificialSamHeaderWithGroups(CHR_COUNT, CHR_START, CHR_SIZE, GROUP_COUNT); @Test(expectedExceptions=UserException.class) public void testBadFilter() throws IOException { List<String> badFilters = Collections.singletonList("bad"); new ReadGroupBlackListReadFilter(badFilters, header); } @Test(expectedExceptions=UserException.class) public void testBadFilterTag() throws IOException { List<String> badFilters = Collections.singletonList("bad:filter"); new ReadGroupBlackListReadFilter(badFilters, header); } @Test(expectedExceptions=UserException.class) public void testBadFilterFile() throws IOException { List<String> badFilters = Collections.singletonList("/foo/bar/rgbl.txt"); new ReadGroupBlackListReadFilter(badFilters, header); } private String getReadGroupId(final int index) { return header.getReadGroups().get(index).getReadGroupId(); } private String getPlatformUnit(final int index) { return header.getReadGroups().get(index).getPlatformUnit(); } @Test public void testFilterReadGroup() throws IOException { GATKRead filteredRecord = ArtificialReadUtils.createArtificialRead(header, "readUno", 0, 1, 20); filteredRecord.setReadGroup(getReadGroupId(1)); GATKRead unfilteredRecord = ArtificialReadUtils.createArtificialRead(header, "readDos", 0, 2, 20); unfilteredRecord.setReadGroup(getReadGroupId(2)); List<String> filterList = new ArrayList<>(); filterList.add("RG:" + getReadGroupId(1)); ReadGroupBlackListReadFilter filter = new ReadGroupBlackListReadFilter(filterList, header); Assert.assertTrue(!filter.test(filteredRecord)); Assert.assertFalse(!filter.test(unfilteredRecord)); } @Test public void testFilterPlatformUnit() throws IOException { GATKRead filteredRecord = ArtificialReadUtils.createArtificialRead(header, "readUno", 0, 1, 20); filteredRecord.setReadGroup(getReadGroupId(1)); GATKRead unfilteredRecord = ArtificialReadUtils.createArtificialRead(header, "readDos", 0, 2, 20); unfilteredRecord.setReadGroup(getReadGroupId(2)); List<String> filterList = new ArrayList<>(); filterList.add("PU:" + getPlatformUnit(1)); ReadGroupBlackListReadFilter filter = new ReadGroupBlackListReadFilter(filterList, header); Assert.assertTrue(!filter.test(filteredRecord)); Assert.assertFalse(!filter.test(unfilteredRecord)); } @Test public void testFilterOutByReadGroup() throws IOException { int recordsPerGroup = 3; List<GATKRead> records = new ArrayList<>(); int alignmentStart = 0; for (int x = 0; x < GROUP_COUNT; x++) { SAMReadGroupRecord groupRecord = header.getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { GATKRead record = ArtificialReadUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); record.setReadGroup(groupRecord.getReadGroupId()); records.add(record); } } List<String> filterList = new ArrayList<>(); filterList.add("RG:" + getReadGroupId(1)); filterList.add("RG:" + getReadGroupId(3)); ReadGroupBlackListReadFilter filter = new ReadGroupBlackListReadFilter(filterList, header); int filtered = 0; int unfiltered = 0; for (GATKRead record : records) { String readGroupName = record.getReadGroup(); if (!filter.test(record)) { if (!filterList.contains("RG:" + readGroupName)) Assert.fail("Read group " + readGroupName + " was filtered"); filtered++; } else { if (filterList.contains("RG:" + readGroupName)) Assert.fail("Read group " + readGroupName + " was not filtered"); unfiltered++; } } int filteredExpected = recordsPerGroup * 2; int unfilteredExpected = recordsPerGroup * (GROUP_COUNT - 2); Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered"); } @Test public void testFilterOutByAttribute() throws IOException { int recordsPerGroup = 3; List<GATKRead> records = new ArrayList<>(); int alignmentStart = 0; for (int x = 0; x < GROUP_COUNT; x++) { SAMReadGroupRecord groupRecord = header.getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { GATKRead record = ArtificialReadUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); record.setReadGroup(groupRecord.getReadGroupId()); records.add(record); } } List<String> filterList = new ArrayList<>(); filterList.add("PU:" + getPlatformUnit(1)); ReadGroupBlackListReadFilter filter = new ReadGroupBlackListReadFilter(filterList, header); int filtered = 0; int unfiltered = 0; for (GATKRead record : records) { String platformUnit = ReadUtils.getPlatformUnit(record, header); if (!filter.test(record)) { if (!filterList.contains("PU:" + platformUnit)) Assert.fail("Platform unit " + platformUnit + " was filtered"); filtered++; } else { if (filterList.contains("PU:" + platformUnit)) Assert.fail("Platform unit " + platformUnit + " was not filtered"); unfiltered++; } } int filteredExpected = 6; int unfilteredExpected = 9; Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered"); } @Test public void testFilterOutByFile() throws IOException { int recordsPerGroup = 3; List<GATKRead> records = new ArrayList<>(); int alignmentStart = 0; for (int x = 0; x < GROUP_COUNT; x++) { SAMReadGroupRecord groupRecord = header.getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { GATKRead record = ArtificialReadUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); record.setReadGroup(groupRecord.getReadGroupId()); records.add(record); } } List<String> filterList = new ArrayList<>(); filterList.add(publicTestDir + "readgroupblacklisttest.txt"); ReadGroupBlackListReadFilter filter = new ReadGroupBlackListReadFilter(filterList, header); int filtered = 0; int unfiltered = 0; for (GATKRead record : records) { String readGroup = record.getReadGroup(); if (!filter.test(record)) { if (!("ReadGroup3".equals(readGroup) || "ReadGroup4".equals(readGroup))) Assert.fail("Read group " + readGroup + " was filtered"); filtered++; } else { if ("ReadGroup3".equals(readGroup) || "ReadGroup4".equals(readGroup)) Assert.fail("Read group " + readGroup + " was not filtered"); unfiltered++; } } int filteredExpected = recordsPerGroup * 2; int unfilteredExpected = recordsPerGroup * (GROUP_COUNT - 2); Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered"); } @Test public void testFilterOutByListFile() throws IOException { int recordsPerGroup = 3; List<GATKRead> records = new ArrayList<>(); int alignmentStart = 0; for (int x = 0; x < GROUP_COUNT; x++) { SAMReadGroupRecord groupRecord = header.getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { GATKRead record = ArtificialReadUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); record.setReadGroup(groupRecord.getReadGroupId()); records.add(record); } } List<String> filterList = new ArrayList<>(); filterList.add(publicTestDir + "readgroupblacklisttestlist.txt"); ReadGroupBlackListReadFilter filter = new ReadGroupBlackListReadFilter(filterList, header); int filtered = 0; int unfiltered = 0; for (GATKRead record : records) { String readGroup = record.getReadGroup(); if (!filter.test(record)) { if (!("ReadGroup3".equals(readGroup) || "ReadGroup4".equals(readGroup))) Assert.fail("Read group " + readGroup + " was filtered"); filtered++; } else { if ("ReadGroup3".equals(readGroup) || "ReadGroup4".equals(readGroup)) Assert.fail("Read group " + readGroup + " was not filtered"); unfiltered++; } } int filteredExpected = recordsPerGroup * 2; int unfilteredExpected = recordsPerGroup * (GROUP_COUNT - 2); Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Unfiltered"); } }
/* * The MIT License (MIT) * * Copyright (c) 2015 Lachlan Dowding * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package permafrost.tundra.lang; import java.lang.reflect.Array; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.Set; public final class ClassHelper { /** * Disallow instantiation of this class. */ private ClassHelper() {} /** * Converts the given object to a Class. * * @param object The object to be converted. * @return The converted object. */ public static Class normalize(Object object) { Class value = null; if (object instanceof Class) { value = (Class)object; } else if (object instanceof String) { try { value = Class.forName((String)object); } catch(ClassNotFoundException ex) { throw new RuntimeException(ex); } } return value; } /** * Returns an array of Class objects associated with the class or interface with the given names. * * @param classNames A list of class or interface names. * @return A list of Class objects associated with the given names. * @throws ClassNotFoundException If a class with the given name cannot be found. */ public static Class[] forName(String[] classNames) throws ClassNotFoundException { if (classNames == null) return null; Class[] classes = new Class[classNames.length]; for (int i = 0; i < classNames.length; i++) { if (classNames[i] != null) { classes[i] = Class.forName(classNames[i]); } } return classes; } /** * Returns the one dimensional array class associated with the given component class. * * @param componentClass The component class to use for the array class. * @param <T> The component class of the array. * @return The one dimensional array class associated with the given component class. */ public static <T> Class<?> getArrayClass(Class<T> componentClass) { return getArrayClass(componentClass, 1); } /** * Returns the array class associated with the given component class and number of dimensions. * * @param componentClass The component class to use for the array class. * @param dimensions The array dimensions to use. * @param <T> The component class of the array. * @return The array class associated with the given component class and number of dimensions. */ public static <T> Class<?> getArrayClass(Class<T> componentClass, int dimensions) { if (dimensions < 1) throw new IllegalArgumentException("array dimensions must be >= 1"); int[] dimensionArray = new int[dimensions]; Arrays.fill(dimensionArray, 0); return Array.newInstance(componentClass, dimensionArray).getClass(); } /** * Converts the given list of class names to a list of classes. * * @param classNames A list of class names. * @return A list of classes that correspond to the given names. * @throws ClassNotFoundException If a class name cannot be not found. */ static Class<?>[] toClassArray(String[] classNames) throws ClassNotFoundException { if (classNames == null) return null; Class<?>[] classes = new Class<?>[classNames.length]; for (int i = 0; i < classes.length; i++) { classes[i] = Class.forName(classNames[i]); } return classes; } /** * Returns all the ancestor classes from nearest to furthest for the given class. * * @param klass A class to fetch the ancestors of. * @return All the ancestor classes from nearest to furthest for the given class. */ static Set<Class<?>> getAncestors(Class<?> klass) { Set<Class<?>> ancestors = new LinkedHashSet<Class<?>>(); Set<Class<?>> parents = new LinkedHashSet<Class<?>>(); parents.add(klass); do { ancestors.addAll(parents); Set<Class<?>> children = new LinkedHashSet<Class<?>>(parents); parents.clear(); for (Class<?> child : children) { Class<?> parent = child.getSuperclass(); if (parent != null) parents.add(parent); Collections.addAll(parents, child.getInterfaces()); } } while (!parents.isEmpty()); return ancestors; } /** * Returns all the common ancestor classes from the given set of class names. * * @param classNames A list of class names. * @return All the common ancestor classes from the given set of class names. * @throws ClassNotFoundException If a class name cannot be found. */ public static Class<?>[] getAncestors(String... classNames) throws ClassNotFoundException { return getAncestors(toClassArray(classNames)); } /** * Returns all the common ancestor classes from the given set of classes. * * @param classes A list of classes. * @return All the common ancestor classes from the given set of class names. */ public static Class<?>[] getAncestors(Class<?>... classes) { Set<Class<?>> ancestors = getAncestors(Arrays.asList(classes)); return ancestors.toArray(new Class<?>[0]); } /** * Returns all the common ancestor classes from the given set of classes. * * @param classes A collection of classes. * @return All the common ancestor classes from the given set of class names. */ public static Set<Class<?>> getAncestors(Collection<Class<?>> classes) { Set<Class<?>> intersection = new LinkedHashSet<Class<?>>(); for (Class<?> klass : classes) { if (intersection.size() == 0) { intersection.addAll(getAncestors(klass)); } else { intersection.retainAll(getAncestors(klass)); } } return intersection; } /** * Returns the nearest class which is an ancestor to all the classes in the given set. * * @param classNames A set of class names for which the nearest ancestor will be returned. * @return The nearest ancestor class which is an ancestor to all the classes in the given * set. * @throws ClassNotFoundException If any of the class names cannot be found. */ public static Class<?> getNearestAncestor(String... classNames) throws ClassNotFoundException { return getNearestAncestor(toClassArray(classNames)); } /** * Returns the nearest class which is an ancestor to all the classes in the given set. * * @param classes A set of classes for which the nearest ancestor will be returned. * @return The nearest ancestor class which is an ancestor to all the classes in the given set. */ public static Class<?> getNearestAncestor(Class<?>... classes) { return ObjectHelper.getNearestAncestor(Arrays.asList(classes)); } /** * Returns the nearest class which is an ancestor to all the classes in the given set. * * @param classes A set of classes for which the nearest ancestor will be returned. * @return The nearest ancestor class which is an ancestor to all the classes in the given set. */ public static Class<?> getNearestAncestor(Set<Class<?>> classes) { Class<?> nearest = null; Set<Class<?>> ancestors = getAncestors(classes); if (ancestors.size() > 0) { nearest = ancestors.iterator().next(); } if (nearest == null) nearest = Object.class; return nearest; } }
package hudson.plugins.git; import hudson.EnvVars; import hudson.Extension; import hudson.model.AbstractDescribableImpl; import hudson.model.Descriptor; import org.apache.commons.lang.StringUtils; import org.kohsuke.stapler.DataBoundConstructor; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.StringTokenizer; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.kohsuke.stapler.export.Exported; import org.kohsuke.stapler.export.ExportedBean; import edu.umd.cs.findbugs.annotations.NonNull; /** * A specification of branches to build. Rather like a refspec. * * eg: * <pre> * master * origin/master * origin/&#42; * origin/&#42;/thing * </pre> */ @ExportedBean public class BranchSpec extends AbstractDescribableImpl<BranchSpec> implements Serializable { private static final long serialVersionUID = -6177158367915899356L; private String name; @Exported public String getName() { return name; } public void setName(String name) { if(name == null) throw new IllegalArgumentException(); else if(name.length() == 0) this.name = "**"; else this.name = name.trim(); } @DataBoundConstructor public BranchSpec(String name) { setName(name); } public String toString() { return name; } public boolean matches(String item) { EnvVars env = new EnvVars(); return matches(item, env); } /** * Compare a git branch reference to configured pattern. * <p> * reference uses normalized format `ref/(heads|tags)/xx` * pattern do support * <ul> * <li>ref/heads/branch</li> * <li>(remote-name)?/branch</li> * <li>ref/remotes/branch</li> * <li>tag</li> * <li>(commit sha1)</li> * </ul> * @param ref branch reference to compare * @param env environment variables to use in comparison * @return true if ref matches configured pattern */ public boolean matches(String ref, EnvVars env) { return getPattern(env).matcher(ref).matches(); } /** * Compare the configured pattern to a git branch defined by the repository name and branch name. * @param repositoryName git repository name * @param branchName git branch name * @return true if repositoryName/branchName matches this BranchSpec */ public boolean matchesRepositoryBranch(String repositoryName, String branchName) { if (branchName == null) { return false; } Pattern pattern = getPattern(new EnvVars(), repositoryName); String branchWithoutRefs = cutRefs(branchName); return pattern.matcher(branchWithoutRefs).matches() || pattern.matcher(join(repositoryName, branchWithoutRefs)).matches(); } /** * @deprecated use {@link #filterMatching(Collection, EnvVars)} * @param branches source branch list to be filtered by configured branch specification using a newly constructed EnvVars * @return branch names which match */ public List<String> filterMatching(Collection<String> branches) { EnvVars env = new EnvVars(); return filterMatching(branches, env); } public List<String> filterMatching(Collection<String> branches, EnvVars env) { List<String> items = new ArrayList<>(); for(String b : branches) { if(matches(b, env)) items.add(b); } return items; } public List<Branch> filterMatchingBranches(Collection<Branch> branches) { EnvVars env = new EnvVars(); return filterMatchingBranches(branches, env); } public List<Branch> filterMatchingBranches(Collection<Branch> branches, EnvVars env) { List<Branch> items = new ArrayList<>(); for(Branch b : branches) { if(matches(b.getName(), env)) items.add(b); } return items; } private String getExpandedName(EnvVars env) { String expandedName = env.expand(name); if (expandedName.length() == 0) { return "**"; } return expandedName; } private Pattern getPattern(EnvVars env) { return getPattern(env, null); } private Pattern getPattern(EnvVars env, String repositoryName) { String expandedName = getExpandedName(env); // use regex syntax directly if name starts with colon if (expandedName.startsWith(":") && expandedName.length() > 1) { String regexSubstring = expandedName.substring(1, expandedName.length()); return Pattern.compile(regexSubstring); } if (repositoryName != null) { // remove the "refs/.../" stuff from the branch-spec if necessary String pattern = cutRefs(expandedName) // remove a leading "remotes/" from the branch spec .replaceAll("^remotes/", ""); pattern = convertWildcardStringToRegex(pattern); return Pattern.compile(pattern); } // build a pattern into this builder StringBuilder builder = new StringBuilder(); // for legacy reasons (sic) we do support various branch spec format to declare remotes / branches builder.append("(refs/heads/"); // if an unqualified branch was given, consider all remotes (with various possible syntaxes) // so it will match branches from any remote repositories as the user probably intended if (!expandedName.contains("**") && !expandedName.contains("/")) { builder.append("|refs/remotes/[^/]+/|remotes/[^/]+/|[^/]+/"); } else { builder.append("|refs/remotes/|remotes/"); } builder.append(")?"); builder.append(convertWildcardStringToRegex(expandedName)); return Pattern.compile(builder.toString()); } private String convertWildcardStringToRegex(String expandedName) { StringBuilder builder = new StringBuilder(); // was the last token a wildcard? boolean foundWildcard = false; // split the string at the wildcards StringTokenizer tokenizer = new StringTokenizer(expandedName, "*", true); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); // is this token is a wildcard? if (token.equals("*")) { // yes, was the previous token a wildcard? if (foundWildcard) { // yes, we found "**" // match over any number of characters builder.append(".*"); foundWildcard = false; } else { // no, set foundWildcard to true and go on foundWildcard = true; } } else { // no, was the previous token a wildcard? if (foundWildcard) { // yes, we found "*" followed by a non-wildcard // match any number of characters other than a "/" builder.append("[^/]*"); foundWildcard = false; } // quote the non-wildcard token before adding it to the phrase builder.append(Pattern.quote(token)); } } // if the string ended with a wildcard add it now if (foundWildcard) { builder.append("[^/]*"); } return builder.toString(); } private String cutRefs(@NonNull String name) { Matcher matcher = GitSCM.GIT_REF.matcher(name); return matcher.matches() ? matcher.group(2) : name; } private String join(String repositoryName, String branchWithoutRefs) { return StringUtils.join(Arrays.asList(repositoryName, branchWithoutRefs), "/"); } @Extension public static class DescriptorImpl extends Descriptor<BranchSpec> { @Override public String getDisplayName() { return "Branch Spec"; } } }
package com.esri.gpt.control.filter; import java.io.IOException; import java.io.PrintStream; import java.io.PrintWriter; import java.io.StringWriter; import java.util.Enumeration; import java.util.Locale; import java.util.Vector; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; /** * * @author Esri */ public class LocaleFilter implements Filter { private static final boolean debug = true; // The filter configuration object we are associated with. If // this value is null, this filter instance is not currently // configured. private FilterConfig filterConfig = null; private ILocaleMethod localeMethod; public LocaleFilter() { } /** * * @param request The servlet request we are processing * @param response The servlet response we are creating * @param chain The filter chain we are processing * * @exception IOException if an input/output error occurs * @exception ServletException if a servlet error occurs */ @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { Throwable problem = null; try { ServletRequest wrapedRequest = request; if (localeMethod!=null) { wrapedRequest = new LocaleWrapperHttpServletRequest((HttpServletRequest) request,localeMethod); } chain.doFilter(wrapedRequest, response); } catch (Throwable t) { // If an exception is thrown somewhere down the filter chain, // we still want to execute our after processing, and then // rethrow the problem after that. problem = t; t.printStackTrace(); } // If there was a problem, we want to rethrow it if it is // a known type, otherwise log it. if (problem != null) { if (problem instanceof ServletException) { throw (ServletException) problem; } if (problem instanceof IOException) { throw (IOException) problem; } sendProcessingError(problem, response); } } /** * Request wrapper that overrides "getLocale" method. */ private static class LocaleWrapperHttpServletRequest extends HttpServletRequestWrapper { final private HttpServletRequest httpServletRequest; final private ILocaleMethod localeMethod; final private Locale defaultLocale; final private Enumeration<Locale> defaultLocales; /** * Constructor. * * @param request The HTTP servlet request. */ public LocaleWrapperHttpServletRequest(HttpServletRequest request,ILocaleMethod localeMethod) { super(request); this.httpServletRequest = request; this.localeMethod = localeMethod; this.defaultLocale = request.getLocale(); this.defaultLocales = request.getLocales(); } @Override public Locale getLocale() { Locale locale = localeMethod.getLocale(this); if (locale==null) { return defaultLocale; } return locale; } @Override public Enumeration<Locale> getLocales() { Locale locale = localeMethod.getLocale(this); if (locale==null) { return defaultLocales; } Vector<Locale> locales = new Vector<Locale>(); locales.add(locale); return locales.elements(); } } /** * Return the filter configuration object for this filter. */ public FilterConfig getFilterConfig() { return (this.filterConfig); } /** * Set the filter configuration object for this filter. * * @param filterConfig The filter configuration object */ public void setFilterConfig(FilterConfig filterConfig) { this.filterConfig = filterConfig; } /** * Destroy method for this filter */ @Override public void destroy() { } /** * Init method for this filter */ @Override public void init(FilterConfig filterConfig) { this.filterConfig = filterConfig; if (filterConfig != null) { String method = filterConfig.getInitParameter("locale-method"); this.localeMethod = parseLocaleMethod(method); } } private ILocaleMethod parseLocaleMethod(String method) { ILocaleMethod locMeth = null; if (method!=null && !method.isEmpty()) { String [] parts = method.split(":"); if (parts.length==2) { if (parts[0].equals("cookie")) { locMeth = new CookieLocaleMethod(parts[1]); } else if (parts[0].equals("session")) { locMeth = new SessionLocaleMethod(parts[1]); } } } return locMeth; } /** * Return a String representation of this object. */ @Override public String toString() { if (filterConfig == null) { return ("LocaleFilter()"); } StringBuilder sb = new StringBuilder("LocaleFilter("); sb.append(filterConfig); sb.append(")"); return (sb.toString()); } private void sendProcessingError(Throwable t, ServletResponse response) { String stackTrace = getStackTrace(t); if (stackTrace != null && !stackTrace.equals("")) { try { response.setContentType("text/html"); PrintStream ps = new PrintStream(response.getOutputStream()); PrintWriter pw = new PrintWriter(ps); pw.print("<html>\n<head>\n<title>Error</title>\n</head>\n<body>\n"); //NOI18N // PENDING! Localize this for next official release pw.print("<h1>The resource did not process correctly</h1>\n<pre>\n"); pw.print(stackTrace); pw.print("</pre></body>\n</html>"); //NOI18N pw.close(); ps.close(); response.getOutputStream().close(); } catch (Exception ex) { } } else { try { PrintStream ps = new PrintStream(response.getOutputStream()); t.printStackTrace(ps); ps.close(); response.getOutputStream().close(); } catch (Exception ex) { } } } private static String getStackTrace(Throwable t) { String stackTrace = null; try { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); t.printStackTrace(pw); pw.close(); sw.close(); stackTrace = sw.getBuffer().toString(); } catch (Exception ex) { } return stackTrace; } public void log(String msg) { filterConfig.getServletContext().log(msg); } private Locale createLocaleFromString(String locale) { if (locale==null) return null; String [] elements = locale.split("_"); if (elements==null) return null; switch (elements.length) { case 1: return new Locale(elements[0]); case 2: return new Locale(elements[0],elements[1]); case 3: return new Locale(elements[0],elements[1],elements[2]); } return null; } private interface ILocaleMethod { Locale getLocale(HttpServletRequest request); } private class CookieLocaleMethod implements ILocaleMethod { private String name; public CookieLocaleMethod(String name) { this.name = name; } @Override public Locale getLocale(HttpServletRequest request) { return readLocaleFromCookie(request, name); } private Locale readLocaleFromCookie(HttpServletRequest request, String cookieName) { for (Cookie cookie: request.getCookies()) { if (cookie.getName().endsWith(cookieName)) { return createLocaleFromString(cookie.getValue()); } } return null; } } private class SessionLocaleMethod implements ILocaleMethod { private String name; public SessionLocaleMethod(String name) { this.name = name; } @Override public Locale getLocale(HttpServletRequest request) { return readLocaleFromSession(request, name); } private Locale readLocaleFromSession(HttpServletRequest request, String attributeName) { Object attribute = request.getSession(true).getAttribute(attributeName); if (attribute instanceof String) { return createLocaleFromString((String)attribute); } return null; } } }
package api; import java.math.BigDecimal; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.JSONValue; import qora.account.Account; import qora.account.PrivateKeyAccount; import qora.assets.Asset; import qora.crypto.Crypto; import qora.naming.Name; import qora.payment.Payment; import qora.transaction.Transaction; import utils.APIUtils; import utils.GZIP; import utils.Pair; import utils.StorageUtils; import com.google.common.base.Splitter; import com.google.common.collect.Lists; import controller.Controller; import database.DBSet; @Path("namestorage") @Produces(MediaType.APPLICATION_JSON) public class NameStorageResource { public static final String ASSET_JSON_KEY = "asset"; public static final String AMOUNT_JSON_KEY = "amount"; public static final String PAYMENTS_JSON_KEY = "payments"; @Context HttpServletRequest request; @SuppressWarnings("unchecked") @GET @Path("/{name}/list") public String listNameStorage(@PathParam("name") String name) { Name nameObj = DBSet.getInstance().getNameMap().get(name); if (nameObj == null) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_NAME_NOT_REGISTERED); } Map<String, String> map = DBSet.getInstance().getNameStorageMap() .get(name); JSONObject json = new JSONObject(); if (map != null) { Set<String> keySet = map.keySet(); for (String key : keySet) { json.put(key, map.get(key)); } } return json.toJSONString(); } @SuppressWarnings("unchecked") @GET @Path("/{name}/keys") public String listKeysNameStorage(@PathParam("name") String name) { Name nameObj = DBSet.getInstance().getNameMap().get(name); if (nameObj == null) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_NAME_NOT_REGISTERED); } Map<String, String> map = DBSet.getInstance().getNameStorageMap() .get(name); JSONArray json = new JSONArray(); if (map != null) { Set<String> keySet = map.keySet(); for (String key : keySet) { json.add(key); } } return json.toJSONString(); } @SuppressWarnings("unchecked") @GET @Path("/{name}/key/{key}") public String getNameStorageValue(@PathParam("name") String name, @PathParam("key") String key) { Map<String, String> map = DBSet.getInstance().getNameStorageMap() .get(name); if (map == null) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_NAME_NOT_REGISTERED); } JSONObject json = new JSONObject(); if (map != null && map.containsKey(key)) { json.put(key, map.get(key)); } return json.toJSONString(); } @SuppressWarnings("unchecked") @POST @Path("/update/{name}") public String updateEntry(String x, @PathParam("name") String name) { try { APIUtils.disallowRemote(request); // READ JSON JSONObject jsonObject = (JSONObject) JSONValue.parse(x); // CHECK IF WALLET EXISTS if (!Controller.getInstance().doesWalletExists()) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_WALLET_NO_EXISTS); } // CHECK WALLET IN SYNC if (Controller.getInstance().getStatus() != Controller.STATUS_OK) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_WALLET_NOT_IN_SYNC); } Name nameObj = DBSet.getInstance().getNameMap().get(name); // Controller.getInstance().getAccountByAddress(name) String creator; if (nameObj == null) { //check if addressstorage Account accountByAddress = Controller.getInstance().getAccountByAddress(name); if(accountByAddress == null) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_NAME_NOT_REGISTERED); } creator = name; } else { creator = nameObj.getOwner().getAddress(); } // CHECK ADDRESS if (!Crypto.getInstance().isValidAddress(creator)) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_INVALID_ADDRESS); } // CHECK ACCOUNT IN WALLET if (Controller.getInstance().getAccountByAddress(creator) == null) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_WALLET_ADDRESS_NO_EXISTS); } jsonObject.put("name", name); String paymentsOpt = (String) jsonObject.get(PAYMENTS_JSON_KEY); List<Payment> resultPayments = new ArrayList<>(); if(paymentsOpt != null) { // READ JSON JSONObject paymentsJson = (JSONObject) JSONValue.parse(paymentsOpt); Set<String> addresses = paymentsJson.keySet(); for (String address : addresses) { if (!Crypto.getInstance().isValidAddress(address)) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_INVALID_ADDRESS); } String amountAssetJson = (String) paymentsJson.get(address); JSONObject amountAssetJsonObject = (JSONObject) JSONValue.parse(amountAssetJson); String amount = (String) amountAssetJsonObject.get(AMOUNT_JSON_KEY); BigDecimal bdAmount; try { bdAmount = new BigDecimal(amount); bdAmount = bdAmount.setScale(8); } catch (Exception e) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_INVALID_AMOUNT); } Asset paymentAsset = Controller.getInstance().getAsset(new Long(0L)); if(amountAssetJsonObject.containsKey(ASSET_JSON_KEY)) { try { paymentAsset = Controller.getInstance().getAsset(new Long(amountAssetJsonObject.get(ASSET_JSON_KEY).toString())); } catch (Exception e) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_INVALID_ASSET_ID); } } Payment payment = new Payment(new Account(address), paymentAsset.getKey(), bdAmount); resultPayments.add(payment); } // remove payments from json jsonObject.remove(PAYMENTS_JSON_KEY); } List<Payment> paymentsForCalculation = new ArrayList<>(resultPayments); String jsonString = jsonObject.toJSONString(); String compressedJsonString = GZIP.compress(jsonString); if (compressedJsonString.length() < jsonString.length()) { jsonString = compressedJsonString; } byte[] bytes = jsonString.getBytes(StandardCharsets.UTF_8); List<String> askApicalls = new ArrayList<String>(); List<String> decompressedValue = new ArrayList<String>(); JSONObject jsonObjectForCheck = (JSONObject) JSONValue.parse(x); // TODO IN CASE OF MULTIPAYMENT 4000 CAN BE A PROBLEM, THE FIRST TX CAN CONTAIN MULTIPAYMENTS WHICH NEED EXTRA SPACE // IF VALUE TOO LARGE FOR ONE ARB TX AND WE ONLY HAVE ADDCOMPLETE // WITH ONE KEY if (bytes.length > 4000 && jsonObjectForCheck.containsKey(StorageUtils.ADD_COMPLETE_KEY) && jsonObjectForCheck.keySet().size() == 1) { JSONObject innerJsonObject = (JSONObject) JSONValue.parse((String) jsonObjectForCheck.get(StorageUtils.ADD_COMPLETE_KEY)); if (innerJsonObject.keySet().size() == 1) { // Starting Multi TX String key = (String) innerJsonObject.keySet().iterator() .next(); String value = (String) innerJsonObject.get(key); Iterable<String> chunks = Splitter.fixedLength(3500).split( value); List<String> arbTxs = Lists.newArrayList(chunks); BigDecimal completeFee = BigDecimal.ZERO; List<Pair<byte[], BigDecimal>> allTxPairs = new ArrayList<>(); boolean isFirst = true; for (String valueString : arbTxs) { Pair<String, String> keypair = new Pair<String, String>( key, valueString); JSONObject storageJsonObject; if (isFirst) { storageJsonObject = StorageUtils .getStorageJsonObject( Collections.singletonList(keypair), null, null, null, null, null); isFirst = false; } else { storageJsonObject = StorageUtils .getStorageJsonObject(null, null, null, null, Collections.singletonList(keypair), null); } storageJsonObject.put("name", name); String jsonStringForMultipleTx = storageJsonObject .toJSONString(); String compressedjsonStringForMultipleTx = GZIP .compress(jsonStringForMultipleTx); if (compressedjsonStringForMultipleTx.length() < jsonStringForMultipleTx .length()) { jsonStringForMultipleTx = compressedjsonStringForMultipleTx; } byte[] resultbyteArray = jsonStringForMultipleTx .getBytes(StandardCharsets.UTF_8); BigDecimal currentFee = Controller .getInstance() .calcRecommendedFeeForArbitraryTransaction( resultbyteArray, paymentsForCalculation).getA(); //multipayment only for first tx paymentsForCalculation = null; completeFee = completeFee.add(currentFee); allTxPairs.add(new Pair<>(resultbyteArray, currentFee)); String decompressed = GZIP.webDecompress(jsonStringForMultipleTx); askApicalls.add("POST namestorage/update/" + name + "\n" + decompressed + "\nfee: " + currentFee.toPlainString()); decompressedValue.add(decompressed); } if(allTxPairs.size() > ApiErrorFactory.BATCH_TX_AMOUNT) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_TX_AMOUNT); } //recalculating qora amount BigDecimal newCompleteFee = BigDecimal.ZERO; BigDecimal oldAmount = BigDecimal.ZERO; List<Pair<byte[], BigDecimal>> newPairs = new ArrayList<Pair<byte[],BigDecimal>>(); for (Pair<byte[], BigDecimal> pair : allTxPairs) { if(oldAmount.equals(BigDecimal.ZERO)) { oldAmount = pair.getB(); newCompleteFee = oldAmount; newPairs.add(pair); continue; } BigDecimal newAmount = oldAmount.multiply(new BigDecimal(1.15)); newAmount = newAmount.setScale(0, BigDecimal.ROUND_UP).setScale(8); pair.setB(newAmount); newPairs.add(pair); oldAmount = newAmount; newCompleteFee= newCompleteFee.add(newAmount); } String apicalls = ""; for (int i = 0; i < newPairs.size(); i++) { apicalls += "POST namestorage/update/" + name + "\n" + decompressedValue.get(i) + "\nfee: " + newPairs.get(i).getB().toPlainString()+"\n"; } String basicInfo = getMultiPaymentsWarning(resultPayments); basicInfo = "Because of the size of the data this call will create " + allTxPairs.size() + " transactions.\nAll Arbitrary Transactions will cost: " + newCompleteFee.toPlainString() + " Qora.\nDetails:\n\n"; // basicInfo += StringUtils.join(askApicalls, "\n"); basicInfo += apicalls; APIUtils.askAPICallAllowed(basicInfo, request); //CHECK WALLET UNLOCKED if (!Controller.getInstance().isWalletUnlocked()) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_WALLET_LOCKED); } // GET ACCOUNT PrivateKeyAccount account = Controller.getInstance() .getPrivateKeyAccountByAddress(creator); if (account == null) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_INVALID_ADDRESS); } if (account.getBalance(1, DBSet.getInstance()).compareTo( completeFee) == -1) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_NO_BALANCE); } Pair<Transaction, Integer> result; String results = ""; for (Pair<byte[], BigDecimal> pair : newPairs) { result = Controller.getInstance() .createArbitraryTransaction(account, resultPayments.size() > 0 ? resultPayments : null, 10, pair.getA(), pair.getB()); //add multipayments only to first tx resultPayments.clear(); results += ArbitraryTransactionsResource .checkArbitraryTransaction(result) + "\n"; } return results; } } String basicInfo = getMultiPaymentsWarning(resultPayments); BigDecimal fee = Controller.getInstance() .calcRecommendedFeeForArbitraryTransaction(bytes, resultPayments).getA(); APIUtils.askAPICallAllowed(basicInfo + "POST namestorage/update/" + name + "\n" + GZIP.webDecompress(jsonString) + "\nfee: " + fee.toPlainString(), request); //CHECK WALLET UNLOCKED if (!Controller.getInstance().isWalletUnlocked()) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_WALLET_LOCKED); } // GET ACCOUNT PrivateKeyAccount account = Controller.getInstance() .getPrivateKeyAccountByAddress(creator); if (account == null) { throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_INVALID_ADDRESS); } // SEND PAYMENT Pair<Transaction, Integer> result = Controller.getInstance() .createArbitraryTransaction(account,resultPayments , 10, bytes, fee); return ArbitraryTransactionsResource .checkArbitraryTransaction(result); } catch (NullPointerException e) { // JSON EXCEPTION throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_JSON); } catch (ClassCastException e) { // JSON EXCEPTION throw ApiErrorFactory.getInstance().createError( ApiErrorFactory.ERROR_JSON); } } public String getMultiPaymentsWarning(List<Payment> resultPayments) { if(resultPayments.size() == 0) { return ""; } String basicInfo = "WARNING : This call contains multipayments:\n"; for (Payment payment : resultPayments) { basicInfo += "Recipient " + payment.getRecipient().getAddress() + " Amount: " + payment.getAmount().toPlainString() + " AssetID " + payment.getAsset() + (payment.getAsset() == 0L ? "(Qora)\n" : "\n"); } basicInfo +="\n"; return basicInfo; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.metadata; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.ql.metadata.CheckResult.PartitionResult; import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner; import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.thrift.TException; import com.google.common.util.concurrent.ThreadFactoryBuilder; /** * Verify that the information in the metastore matches what is on the * filesystem. Return a CheckResult object containing lists of missing and any * unexpected tables and partitions. */ public class HiveMetaStoreChecker { public static final Logger LOG = LoggerFactory.getLogger(HiveMetaStoreChecker.class); private final Hive hive; private final HiveConf conf; public HiveMetaStoreChecker(Hive hive) { super(); this.hive = hive; conf = hive.getConf(); } /** * Check the metastore for inconsistencies, data missing in either the * metastore or on the dfs. * * @param dbName * name of the database, if not specified the default will be used. * @param tableName * Table we want to run the check for. If null we'll check all the * tables in the database. * @param partitions * List of partition name value pairs, if null or empty check all * partitions * @param result * Fill this with the results of the check * @throws HiveException * Failed to get required information from the metastore. * @throws IOException * Most likely filesystem related */ public void checkMetastore(String dbName, String tableName, List<? extends Map<String, String>> partitions, CheckResult result) throws HiveException, IOException { if (dbName == null || "".equalsIgnoreCase(dbName)) { dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME; } try { if (tableName == null || "".equals(tableName)) { // no table specified, check all tables and all partitions. List<String> tables = hive.getTablesForDb(dbName, ".*"); for (String currentTableName : tables) { checkTable(dbName, currentTableName, null, result); } findUnknownTables(dbName, tables, result); } else if (partitions == null || partitions.isEmpty()) { // only one table, let's check all partitions checkTable(dbName, tableName, null, result); } else { // check the specified partitions checkTable(dbName, tableName, partitions, result); } Collections.sort(result.getPartitionsNotInMs()); Collections.sort(result.getPartitionsNotOnFs()); Collections.sort(result.getTablesNotInMs()); Collections.sort(result.getTablesNotOnFs()); } catch (MetaException e) { throw new HiveException(e); } catch (TException e) { throw new HiveException(e); } } /** * Check for table directories that aren't in the metastore. * * @param dbName * Name of the database * @param tables * List of table names * @param result * Add any found tables to this * @throws HiveException * Failed to get required information from the metastore. * @throws IOException * Most likely filesystem related * @throws MetaException * Failed to get required information from the metastore. * @throws NoSuchObjectException * Failed to get required information from the metastore. * @throws TException * Thrift communication error. */ void findUnknownTables(String dbName, List<String> tables, CheckResult result) throws IOException, MetaException, TException, HiveException { Set<Path> dbPaths = new HashSet<Path>(); Set<String> tableNames = new HashSet<String>(tables); for (String tableName : tables) { Table table = hive.getTable(dbName, tableName); // hack, instead figure out a way to get the db paths String isExternal = table.getParameters().get("EXTERNAL"); if (isExternal == null || !"TRUE".equalsIgnoreCase(isExternal)) { dbPaths.add(table.getPath().getParent()); } } for (Path dbPath : dbPaths) { FileSystem fs = dbPath.getFileSystem(conf); FileStatus[] statuses = fs.listStatus(dbPath, FileUtils.HIDDEN_FILES_PATH_FILTER); for (FileStatus status : statuses) { if (status.isDir() && !tableNames.contains(status.getPath().getName())) { result.getTablesNotInMs().add(status.getPath().getName()); } } } } /** * Check the metastore for inconsistencies, data missing in either the * metastore or on the dfs. * * @param dbName * Name of the database * @param tableName * Name of the table * @param partitions * Partitions to check, if null or empty get all the partitions. * @param result * Result object * @throws HiveException * Failed to get required information from the metastore. * @throws IOException * Most likely filesystem related * @throws MetaException * Failed to get required information from the metastore. */ void checkTable(String dbName, String tableName, List<? extends Map<String, String>> partitions, CheckResult result) throws MetaException, IOException, HiveException { Table table = null; try { table = hive.getTable(dbName, tableName); } catch (HiveException e) { result.getTablesNotInMs().add(tableName); return; } List<Partition> parts = new ArrayList<Partition>(); boolean findUnknownPartitions = true; if (table.isPartitioned()) { if (partitions == null || partitions.isEmpty()) { PrunedPartitionList prunedPartList = PartitionPruner.prune(table, null, conf, toString(), null); // no partitions specified, let's get all parts.addAll(prunedPartList.getPartitions()); } else { // we're interested in specific partitions, // don't check for any others findUnknownPartitions = false; for (Map<String, String> map : partitions) { Partition part = hive.getPartition(table, map, false); if (part == null) { PartitionResult pr = new PartitionResult(); pr.setTableName(tableName); pr.setPartitionName(Warehouse.makePartPath(map)); result.getPartitionsNotInMs().add(pr); } else { parts.add(part); } } } } checkTable(table, parts, findUnknownPartitions, result); } /** * Check the metastore for inconsistencies, data missing in either the * metastore or on the dfs. * * @param table * Table to check * @param parts * Partitions to check * @param result * Result object * @param findUnknownPartitions * Should we try to find unknown partitions? * @throws IOException * Could not get information from filesystem * @throws HiveException * Could not create Partition object */ void checkTable(Table table, List<Partition> parts, boolean findUnknownPartitions, CheckResult result) throws IOException, HiveException { Path tablePath = table.getPath(); FileSystem fs = tablePath.getFileSystem(conf); if (!fs.exists(tablePath)) { result.getTablesNotOnFs().add(table.getTableName()); return; } Set<Path> partPaths = new HashSet<Path>(); // check that the partition folders exist on disk for (Partition partition : parts) { if (partition == null) { // most likely the user specified an invalid partition continue; } Path partPath = partition.getDataLocation(); fs = partPath.getFileSystem(conf); if (!fs.exists(partPath)) { PartitionResult pr = new PartitionResult(); pr.setPartitionName(partition.getName()); pr.setTableName(partition.getTable().getTableName()); result.getPartitionsNotOnFs().add(pr); } for (int i = 0; i < partition.getSpec().size(); i++) { partPaths.add(partPath.makeQualified(fs)); partPath = partPath.getParent(); } } if (findUnknownPartitions) { findUnknownPartitions(table, partPaths, result); } } /** * Find partitions on the fs that are unknown to the metastore. * * @param table * Table where the partitions would be located * @param partPaths * Paths of the partitions the ms knows about * @param result * Result object * @throws IOException * Thrown if we fail at fetching listings from the fs. * @throws HiveException */ void findUnknownPartitions(Table table, Set<Path> partPaths, CheckResult result) throws IOException, HiveException { Path tablePath = table.getPath(); // now check the table folder and see if we find anything // that isn't in the metastore Set<Path> allPartDirs = new HashSet<Path>(); getAllLeafDirs(tablePath, allPartDirs); // don't want the table dir allPartDirs.remove(tablePath); // remove the partition paths we know about allPartDirs.removeAll(partPaths); // we should now only have the unexpected folders left for (Path partPath : allPartDirs) { FileSystem fs = partPath.getFileSystem(conf); String partitionName = getPartitionName(fs.makeQualified(tablePath), partPath); if (partitionName != null) { PartitionResult pr = new PartitionResult(); pr.setPartitionName(partitionName); pr.setTableName(table.getTableName()); result.getPartitionsNotInMs().add(pr); } } } /** * Get the partition name from the path. * * @param tablePath * Path of the table. * @param partitionPath * Path of the partition. * @return Partition name, for example partitiondate=2008-01-01 */ private String getPartitionName(Path tablePath, Path partitionPath) { String result = null; Path currPath = partitionPath; while (currPath != null && !tablePath.equals(currPath)) { if (result == null) { result = currPath.getName(); } else { result = currPath.getName() + Path.SEPARATOR + result; } currPath = currPath.getParent(); } return result; } /** * Recursive method to get the leaf directories of a base path. Example: * base/dir1/dir2 base/dir3 * * This will return dir2 and dir3 but not dir1. * * @param basePath * Start directory * @param allDirs * This set will contain the leaf paths at the end. * @throws IOException * Thrown if we can't get lists from the fs. * @throws HiveException */ private void getAllLeafDirs(Path basePath, Set<Path> allDirs) throws IOException, HiveException { ConcurrentLinkedQueue<Path> basePaths = new ConcurrentLinkedQueue<>(); basePaths.add(basePath); // we only use the keySet of ConcurrentHashMap // Neither the key nor the value can be null. Map<Path, Object> dirSet = new ConcurrentHashMap<>(); // Here we just reuse the THREAD_COUNT configuration for // HIVE_MOVE_FILES_THREAD_COUNT final ExecutorService pool = conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25) > 0 ? Executors .newFixedThreadPool(conf.getInt(ConfVars.HIVE_MOVE_FILES_THREAD_COUNT.varname, 25), new ThreadFactoryBuilder().setDaemon(true).setNameFormat("MSCK-GetPaths-%d").build()) : null; if (pool == null) { LOG.debug("Not-using threaded version of MSCK-GetPaths"); } else { LOG.debug("Using threaded version of MSCK-GetPaths with number of threads " + ((ThreadPoolExecutor) pool).getPoolSize()); } getAllLeafDirs(pool, basePaths, dirSet, basePath.getFileSystem(conf)); pool.shutdown(); allDirs.addAll(dirSet.keySet()); } // process the basePaths in parallel and then the next level of basePaths private void getAllLeafDirs(final ExecutorService pool, final ConcurrentLinkedQueue<Path> basePaths, final Map<Path, Object> allDirs, final FileSystem fs) throws IOException, HiveException { final ConcurrentLinkedQueue<Path> nextLevel = new ConcurrentLinkedQueue<>(); if (null == pool) { for (final Path path : basePaths) { FileStatus[] statuses = fs.listStatus(path, FileUtils.HIDDEN_FILES_PATH_FILTER); boolean directoryFound = false; for (FileStatus status : statuses) { if (status.isDir()) { directoryFound = true; nextLevel.add(status.getPath()); } } if (!directoryFound) { // true is just a boolean object place holder because neither the key nor the value can be null. allDirs.put(path, true); } if (!nextLevel.isEmpty()) { getAllLeafDirs(pool, nextLevel, allDirs, fs); } } } else { final List<Future<Void>> futures = new LinkedList<>(); for (final Path path : basePaths) { futures.add(pool.submit(new Callable<Void>() { @Override public Void call() throws Exception { FileStatus[] statuses = fs.listStatus(path, FileUtils.HIDDEN_FILES_PATH_FILTER); boolean directoryFound = false; for (FileStatus status : statuses) { if (status.isDir()) { directoryFound = true; nextLevel.add(status.getPath()); } } if (!directoryFound) { allDirs.put(path, true); } return null; } })); } for (Future<Void> future : futures) { try { future.get(); } catch (Exception e) { LOG.error(e.getMessage()); pool.shutdownNow(); throw new HiveException(e.getCause()); } } if (!nextLevel.isEmpty()) { getAllLeafDirs(pool, nextLevel, allDirs, fs); } } } }
/* Copyright (c) 2015, University of Oslo, Norway * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of the University of Oslo nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package vtk.web.search; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import vtk.repository.IllegalOperationException; import vtk.repository.MultiHostSearcher; import vtk.repository.Namespace; import vtk.repository.Property; import vtk.repository.PropertySet; import vtk.repository.resourcetype.PropertyType; import vtk.repository.resourcetype.PropertyType.Type; import vtk.repository.resourcetype.Value; import vtk.repository.resourcetype.ValueFormatException; import vtk.repository.search.ResultSet; import vtk.web.service.URL; /** * * Utility functions for MultiHost properties */ public class MultiHostUtil { private static final Logger logger = LoggerFactory.getLogger(MultiHostUtil.class); /** * Get MultiHostUrl Property from a PropertySet. * * @param ps PropertySet containing MultiHostUrl Property * @return MultiHostUrl Property or null */ public static Property getMultiHostUrlProp(PropertySet ps) { return ps.getProperty(Namespace.DEFAULT_NAMESPACE, MultiHostSearcher.URL_PROP_NAME); } /** * Checks if MultiHostUrl Property is contained in a PropertySet. * * @param ps PropertySet containing MultiHostUrl Property * @return true if MultiHostUrl Property is contained in a PropertySet */ public static boolean isMultiHostPropertySet(PropertySet ps) { return getMultiHostUrlProp(ps) != null; } /** * Resolve all IMAGE_REF Properties in a ResultSet to absolute URI. The absolute URI will be set per Property as a * new value. * * @param rs ResultSet to be changed * @return PropertySet with new IMAGE_REF Property values */ public static ResultSet resolveResultSetImageRefProperties(ResultSet rs) { for (PropertySet ps : rs.getAllResults()) { if (isMultiHostPropertySet(ps)) { resolveImageRefProperties(ps); } } return rs; } /** * Resolve all IMAGE_REF Properties in a Set of PropertySets to absolute URI. The absolute URI will be set per * Property as a new value. * * @param propertySets Set to be changed * @return Set of PropertySets with new IMAGE_REF Property values */ public static Set<PropertySet> resolveSetImageRefProperties(Set<PropertySet> propertySets) { for (PropertySet ps : propertySets) { if (isMultiHostPropertySet(ps)) { resolveImageRefProperties(ps); } } return propertySets; } /** * Resolve all IMAGE_REF Properties in a PropertySet to absolute URI. The absolute URI will be set per Property as a * new value. * * @param ps PropertySet containing IMAGE_REF Properties * @return PropertySet with new IMAGE_REF Property values */ public static PropertySet resolveImageRefProperties(PropertySet ps) { Property multiHostUrl = getMultiHostUrlProp(ps); if (multiHostUrl != null) { for (Property prop : ps) { try { if (prop.getType().equals(Type.IMAGE_REF)) { /* * resolveImageRefProperty with thumbnailForNoneAbsoluteRefs=true to maintain * behavior form SolrResultMapper.java as default */ resolveImageRefProperty(prop, multiHostUrl, true); } } catch (IllegalOperationException | ValueFormatException e) { logger.error("An error occured while mapping property '" + prop.getDefinition().getName() + "' for " + "resource " + ps.getURI() + ": " + e.getClass().getSimpleName() + ": " + e.getMessage()); } } } return ps; } /** * Resolve URI Property and set new value. * * @param prop to be resolved * @param multiHostUrl Required to resolve {@code prop} * @return Property {@code prop} resolved */ public static Property resolveImageRefProperty(Property prop, Property multiHostUrl) { return resolveImageRefProperty(prop, multiHostUrl, false); } private static Property resolveImageRefProperty(Property prop, Property multiHostUrl, boolean addThumbnailForNoneAbsoluteRefs) { if (!prop.getType().equals(Type.IMAGE_REF)) { throw new IllegalArgumentException(prop.getDefinition().getName() + " is not of type IMAGE_REF"); } if (!prop.getDefinition().getName().equals(PropertyType.PICTURE_PROP_NAME)) { addThumbnailForNoneAbsoluteRefs = false; } Value[] vals; if (prop.getDefinition().isMultiple()) { vals = prop.getValues(); } else { vals = new Value[]{prop.getValue()}; } Value[] resolvedVals = new Value[vals.length]; for (int i = 0; i < vals.length; i++) { String resolvedRef = resolveImageRef(vals[i].getStringValue(), multiHostUrl, addThumbnailForNoneAbsoluteRefs); resolvedVals[i] = new Value(resolvedRef, Type.IMAGE_REF); } if (prop.getDefinition().isMultiple()) { prop.setValues(resolvedVals); } else { prop.setValue(resolvedVals[0]); } return prop; } /** * Resolve URI Property to {@code String}. * * * @param prop to be resolved * @param multiHostUrl Required to resolve {@code prop} * @param addThumbnailForNoneAbsoluteRefs if true adds parameter vrtx=thumbnail to return value if IMAGE_REF is a * relative path * @return resolved String value for IMAGE_REF */ public static String resolveImageRefStringValue(Property prop, Property multiHostUrl, boolean addThumbnailForNoneAbsoluteRefs) { if (!prop.getType().equals(Type.IMAGE_REF)) { throw new IllegalArgumentException(prop.getDefinition().getName() + " is not of type IMAGE_REF"); } if (!prop.getDefinition().getName().equals(PropertyType.PICTURE_PROP_NAME)) { addThumbnailForNoneAbsoluteRefs = false; } // XXX this will fail for multi-value props return resolveImageRef(prop.getStringValue(), multiHostUrl, addThumbnailForNoneAbsoluteRefs); } private static String resolveImageRef(String ref, Property multiHostUrl, boolean addThumbnailForNoneAbsoluteRefs) { if (multiHostUrl == null) { // Nothing to resolve against return ref; } URL remoteResource = URL.parse(multiHostUrl.getStringValue()); URL remoteRelative = remoteResource.relativeURL(ref); if (remoteRelative.getHost() != null && remoteResource.getHost() != null) { if (addThumbnailForNoneAbsoluteRefs && remoteResource.getHost().equals(remoteRelative.getHost())) { remoteRelative.setParameter("vrtx", "thumbnail"); } } return remoteRelative.toString(); } }
/* * Copyright 2021 Apollo Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.ctrip.framework.apollo.core.utils; import java.util.concurrent.atomic.AtomicInteger; import org.slf4j.Logger; import org.slf4j.Marker; /** * Only the following methods implement the delayed log function * <pre class="code"> * public void debug(String s); * public void debug(String s, Object o); * public void debug(String s, Object... objects); * public void debug(String s, Object o, Object o1); * public void debug(String s, Throwable throwable); * public void info(String s); * public void info(String s, Object o); * public void info(String s, Object... objects); * public void info(String s, Object o, Object o1); * public void info(String s, Throwable throwable); * public void warn(String s); * public void warn(String s, Object o); * public void warn(String s, Object... objects); * public void warn(String s, Object o, Object o1); * public void warn(String s, Throwable throwable) * public void error(String s); * public void error(String s, Object o); * public void error(String s, Object o, Object o1); * public void error(String s, Object... objects); * public void error(String s, Throwable throwable); * </pre> * * @author kl (http://kailing.pub) * @since 2021/5/19 */ public class DeferredLogger implements Logger { private final Logger logger; /** * Delay the opening state of the log -1: Uninitialized 0: Disable 1: Enabled */ private static final AtomicInteger state = new AtomicInteger(-1); public static void enable() { state.compareAndSet(-1, 1); } public static void disable() { state.set(0); } public static boolean isEnabled() { return state.get() == 1; } public static void replayTo() { disable(); DeferredLogCache.replayTo(); } public DeferredLogger(Logger logger) { this.logger = logger; } @Override public String getName() { return logger.getName(); } @Override public boolean isTraceEnabled() { return logger.isTraceEnabled(); } @Override public void trace(String s) { logger.trace(s); } @Override public void trace(String s, Object o) { logger.trace(s, o); } @Override public void trace(String s, Object o, Object o1) { logger.trace(s, o, o1); } @Override public void trace(String s, Object... objects) { logger.trace(s, objects); } @Override public void trace(String s, Throwable throwable) { logger.trace(s, throwable); } @Override public boolean isTraceEnabled(Marker marker) { return logger.isTraceEnabled(marker); } @Override public void trace(Marker marker, String s) { logger.trace(marker, s); } @Override public void trace(Marker marker, String s, Object o) { logger.trace(marker, s, o); } @Override public void trace(Marker marker, String s, Object o, Object o1) { logger.trace(marker, s, o, o1); } @Override public void trace(Marker marker, String s, Object... objects) { logger.trace(marker, s, objects); } @Override public void trace(Marker marker, String s, Throwable throwable) { logger.trace(marker, s, throwable); } @Override public boolean isDebugEnabled() { return logger.isDebugEnabled(); } @Override public void debug(String s) { if (isEnabled()) { DeferredLogCache.debug(logger, s); } else { logger.debug(s); } } @Override public void debug(String s, Object o) { if (isEnabled()) { DeferredLogCache.debug(logger, s, o); } else { logger.debug(s, o); } } @Override public void debug(String s, Object o, Object o1) { if (isEnabled()) { DeferredLogCache.debug(logger, s, o, o1); } else { logger.debug(s, o, o1); } } @Override public void debug(String s, Object... objects) { if (isEnabled()) { DeferredLogCache.debug(logger, s, objects); } else { logger.debug(s, objects); } } @Override public void debug(String s, Throwable throwable) { if (isEnabled()) { DeferredLogCache.debug(logger, s, throwable); } else { logger.debug(s, throwable); } } @Override public boolean isDebugEnabled(Marker marker) { return logger.isDebugEnabled(marker); } @Override public void debug(Marker marker, String s) { logger.debug(marker, s); } @Override public void debug(Marker marker, String s, Object o) { logger.debug(marker, s, o); } @Override public void debug(Marker marker, String s, Object o, Object o1) { logger.debug(marker, s, o, o1); } @Override public void debug(Marker marker, String s, Object... objects) { logger.debug(marker, s, objects); } @Override public void debug(Marker marker, String s, Throwable throwable) { logger.debug(marker, s, throwable); } @Override public boolean isInfoEnabled() { return logger.isInfoEnabled(); } @Override public void info(String s) { if (isEnabled()) { DeferredLogCache.info(logger, s); } else { logger.info(s); } } @Override public void info(String s, Object o) { if (isEnabled()) { DeferredLogCache.info(logger, s, o); } else { logger.info(s, o); } } @Override public void info(String s, Object o, Object o1) { if (isEnabled()) { DeferredLogCache.info(logger, s, o, o1); } else { logger.info(s, o, o1); } } @Override public void info(String s, Object... objects) { if (isEnabled()) { DeferredLogCache.info(logger, s, objects); } else { logger.info(s, objects); } } @Override public void info(String s, Throwable throwable) { if (isEnabled()) { DeferredLogCache.info(logger, s, throwable); } else { logger.info(s, throwable); } } @Override public boolean isInfoEnabled(Marker marker) { return logger.isInfoEnabled(marker); } @Override public void info(Marker marker, String s) { logger.info(marker, s); } @Override public void info(Marker marker, String s, Object o) { logger.info(marker, s, o); } @Override public void info(Marker marker, String s, Object o, Object o1) { logger.info(marker, s, o, o1); } @Override public void info(Marker marker, String s, Object... objects) { logger.info(marker, s, objects); } @Override public void info(Marker marker, String s, Throwable throwable) { logger.info(marker, s, throwable); } @Override public boolean isWarnEnabled() { return logger.isWarnEnabled(); } @Override public void warn(String s) { if (isEnabled()) { DeferredLogCache.warn(logger, s); } else { logger.warn(s); } } @Override public void warn(String s, Object o) { if (isEnabled()) { DeferredLogCache.warn(logger, s, o); } else { logger.warn(s, o); } } @Override public void warn(String s, Object... objects) { if (isEnabled()) { DeferredLogCache.warn(logger, s, objects); } else { logger.warn(s, objects); } } @Override public void warn(String s, Object o, Object o1) { if (isEnabled()) { DeferredLogCache.warn(logger, s, o, o1); } else { logger.warn(s, o, o1); } } @Override public void warn(String s, Throwable throwable) { if (isEnabled()) { DeferredLogCache.warn(logger, s, throwable); } else { logger.warn(s, throwable); } } @Override public boolean isWarnEnabled(Marker marker) { return logger.isWarnEnabled(marker); } @Override public void warn(Marker marker, String s) { logger.warn(marker, s); } @Override public void warn(Marker marker, String s, Object o) { logger.warn(marker, s, o); } @Override public void warn(Marker marker, String s, Object o, Object o1) { logger.warn(marker, s, o, o1); } @Override public void warn(Marker marker, String s, Object... objects) { logger.warn(marker, s, objects); } @Override public void warn(Marker marker, String s, Throwable throwable) { logger.warn(marker, s, throwable); } @Override public boolean isErrorEnabled() { return logger.isErrorEnabled(); } @Override public void error(String s) { if (isEnabled()) { DeferredLogCache.error(logger, s); } else { logger.error(s); } } @Override public void error(String s, Object o) { if (isEnabled()) { DeferredLogCache.error(logger, s, o); } else { logger.error(s, o); } } @Override public void error(String s, Object o, Object o1) { if (isEnabled()) { DeferredLogCache.error(logger, s, o, o1); } else { logger.error(s, o, o1); } } @Override public void error(String s, Object... objects) { if (isEnabled()) { DeferredLogCache.error(logger, s, objects); } else { logger.error(s, objects); } } @Override public void error(String s, Throwable throwable) { if (isEnabled()) { DeferredLogCache.error(logger, s, throwable); } else { logger.error(s, throwable); } } @Override public boolean isErrorEnabled(Marker marker) { return logger.isErrorEnabled(marker); } @Override public void error(Marker marker, String s) { logger.error(marker, s); } @Override public void error(Marker marker, String s, Object o) { logger.error(marker, s, o); } @Override public void error(Marker marker, String s, Object o, Object o1) { logger.error(marker, s, o, o1); } @Override public void error(Marker marker, String s, Object... objects) { logger.error(marker, s, objects); } @Override public void error(Marker marker, String s, Throwable throwable) { logger.error(marker, s, throwable); } }
/* Copyright (c) 2001-2011, The HSQL Development Group * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the HSQL Development Group nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG, * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hsqldb; import java.util.Vector; import org.hsqldb.error.Error; import org.hsqldb.error.ErrorCode; import org.hsqldb.lib.FileUtil; import org.hsqldb.lib.HashMap; import org.hsqldb.lib.HashSet; import org.hsqldb.lib.HsqlTimer; import org.hsqldb.lib.IntKeyHashMap; import org.hsqldb.lib.Iterator; import org.hsqldb.persist.HsqlProperties; import org.hsqldb.server.Server; import org.hsqldb.server.ServerConstants; import org.hsqldb.store.ValuePool; /** * Handles initial attempts to connect to HSQLDB databases within the JVM * (or a classloader within the JVM). Opens the database if it is not open * or connects to it if it is already open. This allows the same database to * be used by different instances of Server and by direct connections.<p> * * Maintains a map of Server instances and notifies each server when its * database has shut down.<p> * * Maintains a reference to the timer used for file locks and logging.<p> * * @author Fred Toussi (fredt@users dot sourceforge.net) * @version 2.0.1 * @since 1.7.2 */ public class DatabaseManager { // Database and Server registry /** provides unique ID's for the Databases currently in registry */ private static int dbIDCounter; /** name to Database mapping for mem: databases */ static final HashMap memDatabaseMap = new HashMap(); /** File to Database mapping for file: databases */ static final HashMap fileDatabaseMap = new HashMap(); /** File to Database mapping for res: databases */ static final HashMap resDatabaseMap = new HashMap(); /** id number to Database for Databases currently in registry */ static final IntKeyHashMap databaseIDMap = new IntKeyHashMap(); /** * Returns a vector containing the URI (type + path) for all the databases. */ public static Vector getDatabaseURIs() { Vector v = new Vector(); synchronized (databaseIDMap) { Iterator it = databaseIDMap.values().iterator(); while (it.hasNext()) { Database db = (Database) it.next(); v.addElement(db.getURI()); } } return v; } /** * Closes all the databases using the given mode.<p> * * CLOSEMODE_IMMEDIATELY = 1; * CLOSEMODE_NORMAL = 2; * CLOSEMODE_COMPACT = 3; * CLOSEMODE_SCRIPT = 4; */ public static void closeDatabases(int mode) { synchronized (databaseIDMap) { Iterator it = databaseIDMap.values().iterator(); while (it.hasNext()) { Database db = (Database) it.next(); try { db.close(mode); } catch (HsqlException e) {} } } } /** * Used by server to open a new session */ public static Session newSession(int dbID, String user, String password, String zoneString, int timeZoneSeconds) { Database db = null; synchronized (databaseIDMap) { db = (Database) databaseIDMap.get(dbID); } if (db == null) { return null; } Session session = db.connect(user, password, zoneString, timeZoneSeconds); session.isNetwork = true; return session; } /** * Used by in-process connections and by Servlet */ public static Session newSession(String type, String path, String user, String password, HsqlProperties props, String zoneString, int timeZoneSeconds) { Database db = getDatabase(type, path, props); if (db == null) { return null; } return db.connect(user, password, zoneString, timeZoneSeconds); } /** * Returns an existing session. Used with repeat HTTP connections * belonging to the same JDBC Conenction / HSQL Session pair. */ public static Session getSession(int dbId, long sessionId) { Database db = null; synchronized (databaseIDMap) { db = (Database) databaseIDMap.get(dbId); } return db == null ? null : db.sessionManager.getSession(sessionId); } /** * Used by server to open or create a database */ public static int getDatabase(String type, String path, Server server, HsqlProperties props) { Database db = getDatabase(type, path, props); registerServer(server, db); return db.databaseID; } public static Database getDatabase(int id) { return (Database) databaseIDMap.get(id); } public static void shutdownDatabases(Server server, int shutdownMode) { HashSet databases = (HashSet) serverMap.get(server); Database[] dbArray = new Database[databases.size()]; databases.toArray(dbArray); for (int i = 0; i < dbArray.length; i++) { dbArray[i].close(shutdownMode); } } /** * This has to be improved once a threading model is in place. * Current behaviour: * * Attempts to connect to different databases do not block. Two db's can * open simultaneously. * * Attempts to connect to a db while it is opening or closing will block * until the db is open or closed. At this point the db state is either * DATABASE_ONLINE (after db.open() has returned) which allows a new * connection to be made, or the state is DATABASE_SHUTDOWN which means * the db can be reopened for the new connection). * */ public static Database getDatabase(String type, String path, HsqlProperties props) { // If the (type, path) pair does not correspond to a registered // instance, then getDatabaseObject() returns a newly constructed // and registered Database instance. // The database state will be DATABASE_SHUTDOWN, // which means that the switch below will attempt to // open the database instance. Database db = getDatabaseObject(type, path, props); synchronized (db) { switch (db.getState()) { case Database.DATABASE_ONLINE : break; case Database.DATABASE_SHUTDOWN : // if the database was shutdown while this attempt // was waiting, add the database back to the registry if (lookupDatabaseObject(type, path) == null) { addDatabaseObject(type, path, db); } db.open(); break; // This state will currently not be reached as Database.Close() is // called while a lock is held on the database. // If we remove the lock from this method and a database is // being shutdown by a thread and in the meantime another thread // attempts to connect to the db. The threads could belong to // different server instances or be in-process. case Database.DATABASE_CLOSING : // this case will not be reached as the state is set and // cleared within the db.open() call above, which is called // from this synchronized block // it is here simply as a placeholder for future development case Database.DATABASE_OPENING : throw Error.error(ErrorCode.LOCK_FILE_ACQUISITION_FAILURE, ErrorCode.M_DatabaseManager_getDatabase); } } return db; } private static synchronized Database getDatabaseObject(String type, String path, HsqlProperties props) { Database db; String key = path; HashMap databaseMap; if (type == DatabaseURL.S_FILE) { databaseMap = fileDatabaseMap; key = filePathToKey(path); db = (Database) databaseMap.get(key); if (db == null) { if (databaseMap.size() > 0) { Iterator it = databaseMap.keySet().iterator(); while (it.hasNext()) { String current = (String) it.next(); if (key.equalsIgnoreCase(current)) { key = current; break; } } } } } else if (type == DatabaseURL.S_RES) { databaseMap = resDatabaseMap; } else if (type == DatabaseURL.S_MEM) { databaseMap = memDatabaseMap; } else { throw Error.runtimeError(ErrorCode.U_S0500, "DatabaseManager"); } db = (Database) databaseMap.get(key); if (db == null) { db = new Database(type, path, key, props); db.databaseID = dbIDCounter; synchronized (databaseIDMap) { databaseIDMap.put(dbIDCounter, db); dbIDCounter++; } databaseMap.put(key, db); } return db; } /** * Looks up database of a given type and path in the registry. Returns * null if there is none. */ public static synchronized Database lookupDatabaseObject(String type, String path) { Object key = path; HashMap databaseMap; if (type == DatabaseURL.S_FILE) { databaseMap = fileDatabaseMap; key = filePathToKey(path); } else if (type == DatabaseURL.S_RES) { databaseMap = resDatabaseMap; } else if (type == DatabaseURL.S_MEM) { databaseMap = memDatabaseMap; } else { throw (Error.runtimeError(ErrorCode.U_S0500, "DatabaseManager")); } return (Database) databaseMap.get(key); } /** * Adds a database to the registry. */ private static synchronized void addDatabaseObject(String type, String path, Database db) { Object key = path; HashMap databaseMap; if (type == DatabaseURL.S_FILE) { databaseMap = fileDatabaseMap; key = filePathToKey(path); } else if (type == DatabaseURL.S_RES) { databaseMap = resDatabaseMap; } else if (type == DatabaseURL.S_MEM) { databaseMap = memDatabaseMap; } else { throw Error.runtimeError(ErrorCode.U_S0500, "DatabaseManager"); } synchronized (databaseIDMap) { databaseIDMap.put(db.databaseID, db); } databaseMap.put(key, db); } /** * Removes the database from registry. */ static void removeDatabase(Database database) { int dbID = database.databaseID; String type = database.getType(); String path = database.getPath(); Object key = path; HashMap databaseMap; notifyServers(database); if (type == DatabaseURL.S_FILE) { databaseMap = fileDatabaseMap; key = filePathToKey(path); } else if (type == DatabaseURL.S_RES) { databaseMap = resDatabaseMap; } else if (type == DatabaseURL.S_MEM) { databaseMap = memDatabaseMap; } else { throw (Error.runtimeError(ErrorCode.U_S0500, "DatabaseManager")); } boolean isEmpty = false; synchronized (databaseIDMap) { databaseIDMap.remove(dbID); isEmpty = databaseIDMap.isEmpty(); } synchronized (databaseMap) { databaseMap.remove(key); } if (isEmpty) { ValuePool.resetPool(); } } /** * Maintains a map of servers to sets of databases. * Servers register each of their databases. * When a database is shutdown, all the servers accessing it are notified. * The database is then removed form the sets for all servers and the * servers that have no other database are removed from the map. */ static HashMap serverMap = new HashMap(); /** * Deregisters a server completely. */ public static void deRegisterServer(Server server) { serverMap.remove(server); } /** * Deregisters a server as serving a given database. Not yet used. */ private static void deRegisterServer(Server server, Database db) { Iterator it = serverMap.values().iterator(); for (; it.hasNext(); ) { HashSet databases = (HashSet) it.next(); databases.remove(db); if (databases.isEmpty()) { it.remove(); } } } /** * Registers a server as serving a given database. */ private static void registerServer(Server server, Database db) { if (!serverMap.containsKey(server)) { serverMap.put(server, new HashSet()); } HashSet databases = (HashSet) serverMap.get(server); databases.add(db); } /** * Notifies all servers that serve the database that the database has been * shutdown. */ private static void notifyServers(Database db) { Iterator it = serverMap.keySet().iterator(); for (; it.hasNext(); ) { Server server = (Server) it.next(); HashSet databases = (HashSet) serverMap.get(server); if (databases.contains(db)) { server.notify(ServerConstants.SC_DATABASE_SHUTDOWN, db.databaseID); } } } static boolean isServerDB(Database db) { Iterator it = serverMap.keySet().iterator(); for (; it.hasNext(); ) { Server server = (Server) it.next(); HashSet databases = (HashSet) serverMap.get(server); if (databases.contains(db)) { return true; } } return false; } // Timer private static final HsqlTimer timer = new HsqlTimer(); public static HsqlTimer getTimer() { return timer; } // converts file path to database lookup key, converting any // thrown exception to an HsqlException in the process private static String filePathToKey(String path) { try { return FileUtil.getFileUtil().canonicalPath(path); } catch (Exception e) { return path; } } }
package org.myrobotlab.service; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import org.myrobotlab.framework.Peers; import org.myrobotlab.framework.Service; import org.myrobotlab.framework.Status; import org.myrobotlab.logging.Level; import org.myrobotlab.logging.LoggerFactory; import org.myrobotlab.logging.LoggingFactory; import org.myrobotlab.service.interfaces.RangeListener; import org.slf4j.Logger; public class UltrasonicSensor extends Service implements RangeListener { private static final long serialVersionUID = 1L; public final static Logger log = LoggerFactory.getLogger(UltrasonicSensor.class); public final Set<String> types = new HashSet<String>(Arrays.asList("SR04")); private int pings; private long max; private long min; private int sampleRate; private int sensorMinCM; private int sensorMaxCM; // TODO - avg ? private Integer trigPin = null; private Integer echoPin = null; private String type = "SR04"; private Long lastRange; private transient Arduino arduino; public static Peers getPeers(String name) { Peers peers = new Peers(name); // put peer definitions in peers.put("arduino", "Arduino", "arduino"); return peers; } public static void main(String[] args) { LoggingFactory.getInstance().configure(); LoggingFactory.getInstance().setLevel(Level.INFO); UltrasonicSensor sr04 = (UltrasonicSensor) Runtime.start("sr04", "UltrasonicSensor"); sr04.test(); /* * GUIService gui = new GUIService("gui"); gui.startService(); */ } public UltrasonicSensor(String n) { super(n); } // Uber good - .. although this is "chained" versus star routing // Star routing would be routing from the Arduino directly to the Listener // The "chained" version takes 2 thread contexts :( .. but it has the // benefit // of the "publishRange" method being affected by the Sensor service e.g. // change units, sample rate, etc public void addRangeListener(Service service) { addListener("publishRange", service.getName(), "onRange", Long.class); } // ---- part of interfaces begin ----- public boolean attach(String port, int trigPin, int echoPin) throws IOException { this.trigPin = trigPin; this.echoPin = echoPin; this.arduino.connect(port); return arduino.sensorAttach(this) != -1; } // FIXME - should be MicroController Interface .. public Arduino getArduino() { return arduino; } @Override public String[] getCategories() { return new String[] { "sensor" }; } @Override public String getDescription() { return "used as a general template"; } public int getEchoPin() { return echoPin; } public long getLastRange() { return lastRange; } public int getTriggerPin() { return trigPin; } @Override public void onRange(Long range) { log.info(String.format("RANGE: %d", range)); } public long ping() { return ping(10); } public long ping(int timeout) { return arduino.pulseIn(trigPin, echoPin, timeout); } /* FIXME !!! IMPORTANT PUT IN INTERFACE & REMOVE SELF FROM ARDUINO !!! */ public Long publishRange(Long duration) { ++pings; // if (log.isDebugEnabled()){ // TODO - add TimeUnits - cm // long range = sd.duration / 58; lastRange = duration / 58; // if (log.isDebugEnabled()){ // log.debug(String.format("publishRange name %s duration %d range %d cm", // getName(), duration, lastRange)); // } return lastRange; } public long range() { return arduino.pulseIn(trigPin, echoPin) / 58; } public boolean setType(String type) { if (types.contains(type)) { this.type = type; return true; } return false; } public void startRanging() { startRanging(10); // 10000 uS = 10 ms } public void startRanging(int timeoutMS) { arduino.sensorPollingStart(getName(), timeoutMS); } // ---- part of interfaces end ----- @Override public void startService() { super.startService(); arduino = (Arduino) startPeer("arduino"); } public void stopRanging() { arduino.sensorPollingStop(getName()); } /** * FIXME - needs to be in a Sensor Interface * THIS IS A GOOD METHOD ! - ITS THE RAW DATA ARRAY IN A NON PUBLISHED * DIRECT CALL BACK FROM ARDUINO (through an interface soon) * The "business" logic to make this meaningful is rightfully in this class * @param data */ public void onSensorData(int[] data){ long duration = Serial.bytesToInt(data, 0, 4); invoke("publishRange", duration); } // TODO - Virtual Serial test - do a record of tx & rx on a real sensor // then send the data - IT MUST BE INTERLEAVED @Override public Status test() { Status status = Status.info("starting %s %s test", getName(), getType()); try { // FIXME - there has to be a properties method to configure // localized // testing boolean useGUI = true; boolean useVirtualPorts = false; int triggerPin = 6; int echoPin = 6; String port = "COM18"; UltrasonicSensor sr04 = (UltrasonicSensor) Runtime.start(getName(), "UltrasonicSensor"); // Servo servo = (Servo) Runtime.start("servo", "Servo"); // && depending on headless if (useGUI) { Runtime.start("gui", "GUIService"); } Serial uart = null; if (useVirtualPorts) { uart = arduino.getSerial(); uart.record(); } // nice simple interface sr04.attach(port, triggerPin, echoPin); // arduino.re // TODO - VIRTUAL NULL MODEM WITH TEST DATA !!!! // RECORD FROM ACTUAL SENSOR !!! // sr04.arduino.setLoadTimingEnabled(true); sr04.addRangeListener(this); sr04.startRanging(); log.info("here"); sr04.stopRanging(); if (useVirtualPorts) { return status; } uart.stopRecording(); sr04.arduino.setLoadTimingEnabled(true); sr04.arduino.setLoadTimingEnabled(false); /* servo.attach("sr04.arduino", 4); servo.setSpeed(0.99f); servo.setEventsEnabled(true); servo.setEventsEnabled(false); servo.moveTo(30); servo.setEventsEnabled(true); sr04.startRanging(); log.info("here"); servo.moveTo(130); sr04.stopRanging(); // sensor.attach(arduino, "COM15", 7, 8); for (int i = 1; i < 200; i += 10) { sr04.startRanging(i); servo.setSpeed(0.8f); servo.moveTo(30); servo.moveTo(175); sr04.stopRanging(); } */ sr04.startRanging(5); sr04.startRanging(10); sr04.startRanging(); sr04.stopRanging(); } catch (Exception e) { status.addError(e); } return status; } }
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.apiv7.admin.pipelineconfig; import com.thoughtworks.go.api.ApiController; import com.thoughtworks.go.api.ApiVersion; import com.thoughtworks.go.api.CrudController; import com.thoughtworks.go.api.base.OutputWriter; import com.thoughtworks.go.api.representers.JsonReader; import com.thoughtworks.go.api.spring.ApiAuthenticationHelper; import com.thoughtworks.go.api.util.GsonTransformer; import com.thoughtworks.go.apiv7.admin.pipelineconfig.representers.PipelineConfigRepresenter; import com.thoughtworks.go.apiv7.admin.shared.representers.stages.ConfigHelperOptions; import com.thoughtworks.go.config.PipelineConfig; import com.thoughtworks.go.config.exceptions.EntityType; import com.thoughtworks.go.config.exceptions.HttpException; import com.thoughtworks.go.config.materials.PasswordDeserializer; import com.thoughtworks.go.server.domain.Username; import com.thoughtworks.go.server.newsecurity.utils.SessionUtils; import com.thoughtworks.go.server.service.EntityHashingService; import com.thoughtworks.go.server.service.GoConfigService; import com.thoughtworks.go.server.service.PipelineConfigService; import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult; import com.thoughtworks.go.spark.Routes; import com.thoughtworks.go.spark.spring.SparkSpringController; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import spark.Request; import spark.Response; import java.io.IOException; import java.util.function.Consumer; import static com.thoughtworks.go.api.util.HaltApiResponses.*; import static spark.Spark.*; @Component public class PipelineConfigControllerV7 extends ApiController implements SparkSpringController, CrudController<PipelineConfig> { private final PipelineConfigService pipelineConfigService; private final ApiAuthenticationHelper apiAuthenticationHelper; private final EntityHashingService entityHashingService; private final PasswordDeserializer passwordDeserializer; private GoConfigService goConfigService; @Autowired public PipelineConfigControllerV7(PipelineConfigService pipelineConfigService, ApiAuthenticationHelper apiAuthenticationHelper, EntityHashingService entityHashingService, PasswordDeserializer passwordDeserializer, GoConfigService goConfigService) { super(ApiVersion.v7); this.pipelineConfigService = pipelineConfigService; this.apiAuthenticationHelper = apiAuthenticationHelper; this.entityHashingService = entityHashingService; this.passwordDeserializer = passwordDeserializer; this.goConfigService = goConfigService; } @Override public String controllerBasePath() { return Routes.PipelineConfig.BASE; } @Override public void setupRoutes() { path(controllerPath(), () -> { before("", mimeType, this::setContentType); before("/*", mimeType, this::setContentType); before("", mimeType, this::verifyContentType); before("/*", mimeType, this::verifyContentType); before("", mimeType, apiAuthenticationHelper::checkPipelineCreationAuthorizationAnd403); before(Routes.PipelineConfig.NAME, mimeType, apiAuthenticationHelper::checkPipelineGroupAdminUserAnd403); post("", mimeType, this::create); get(Routes.PipelineConfig.NAME, mimeType, this::show); put(Routes.PipelineConfig.NAME, mimeType, this::update); delete(Routes.PipelineConfig.NAME, mimeType, this::destroy); exception(HttpException.class, this::httpException); }); } public String destroy(Request req, Response res) throws IOException { PipelineConfig existingPipelineConfig = fetchEntityFromConfig(req.params("pipeline_name")); haltIfPipelineIsDefinedRemotely(existingPipelineConfig); HttpLocalizedOperationResult result = new HttpLocalizedOperationResult(); pipelineConfigService.deletePipelineConfig(SessionUtils.currentUsername(), existingPipelineConfig, result); return renderHTTPOperationResult(result, req, res); } public String update(Request req, Response res) { PipelineConfig existingPipelineConfig = fetchEntityFromConfig(req.params("pipeline_name")); PipelineConfig pipelineConfigFromRequest = buildEntityFromRequestBody(req); if (isRenameAttempt(existingPipelineConfig, pipelineConfigFromRequest)) { throw haltBecauseRenameOfEntityIsNotSupported("pipelines"); } haltIfPipelineIsDefinedRemotely(existingPipelineConfig); if (isPutRequestStale(req, existingPipelineConfig)) { throw haltBecauseEtagDoesNotMatch("pipeline", existingPipelineConfig.getName()); } HttpLocalizedOperationResult result = new HttpLocalizedOperationResult(); pipelineConfigService.updatePipelineConfig(SessionUtils.currentUsername(), pipelineConfigFromRequest, etagFor(existingPipelineConfig), result); return handleCreateOrUpdateResponse(req, res, pipelineConfigFromRequest, result); } public String create(Request req, Response res) { PipelineConfig pipelineConfigFromRequest = buildEntityFromRequestBody(req); haltIfEntityBySameNameInRequestExists(pipelineConfigFromRequest); String group = getOrHaltForGroupName(req); HttpLocalizedOperationResult result = new HttpLocalizedOperationResult(); Username userName = SessionUtils.currentUsername(); pipelineConfigService.createPipelineConfig(userName, pipelineConfigFromRequest, result, group); return handleCreateOrUpdateResponse(req, res, pipelineConfigFromRequest, result); } public String show(Request req, Response res) throws IOException { PipelineConfig pipelineConfig = fetchEntityFromConfig(req.params("pipeline_name")); if (isGetOrHeadRequestFresh(req, pipelineConfig)) { return notModified(res); } else { setEtagHeader(pipelineConfig, res); return writerForTopLevelObject(req, res, writer -> PipelineConfigRepresenter.toJSON(writer, pipelineConfig)); } } @Override public PipelineConfig doFetchEntityFromConfig(String name) { return pipelineConfigService.getPipelineConfig(name); } @Override public PipelineConfig buildEntityFromRequestBody(Request req) { ConfigHelperOptions options = new ConfigHelperOptions(goConfigService.getCurrentConfig(), passwordDeserializer); JsonReader jsonReader = GsonTransformer.getInstance().jsonReaderFrom(req.body()); if ("PUT".equalsIgnoreCase(req.requestMethod())) { return PipelineConfigRepresenter.fromJSON(jsonReader, options); } return PipelineConfigRepresenter.fromJSON(jsonReader.readJsonObject("pipeline"), options); } @Override public Consumer<OutputWriter> jsonWriter(PipelineConfig pipelineConfig) { return writer -> PipelineConfigRepresenter.toJSON(writer, pipelineConfig); } @Override public String etagFor(PipelineConfig pipelineConfig) { return entityHashingService.md5ForEntity(pipelineConfig); } @Override public EntityType getEntityType() { return EntityType.Pipeline; } private void haltIfPipelineIsDefinedRemotely(PipelineConfig existingPipelineConfig) { if (!existingPipelineConfig.isLocal()) { throw haltBecauseOfReason(String.format("Can not operate on pipeline '%s' as it is defined remotely in '%s'.", existingPipelineConfig.name(), existingPipelineConfig.getOrigin().displayName())); } } private boolean isRenameAttempt(PipelineConfig existingPipelineConfig, PipelineConfig pipelineConfigFromRequest) { return !existingPipelineConfig.getName().equals(pipelineConfigFromRequest.getName()); } private String getOrHaltForGroupName(Request req) { JsonReader jsonReader = GsonTransformer.getInstance().jsonReaderFrom(req.body()); if (!jsonReader.hasJsonObject("group") || StringUtils.isBlank(jsonReader.getString("group"))) { throw haltBecauseOfReason("Pipeline group must be specified for creating a pipeline."); } return jsonReader.getString("group"); } private void haltIfEntityBySameNameInRequestExists(PipelineConfig pipelineConfig) { if (pipelineConfigService.getPipelineConfig(pipelineConfig.name().toString()) == null) { return; } pipelineConfig.addError("name", EntityType.Pipeline.alreadyExists(pipelineConfig.name())); throw haltBecauseEntityAlreadyExists(jsonWriter(pipelineConfig), "pipeline", pipelineConfig.getName()); } }
/* * Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.identity.oauth.scope.endpoint.impl; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.identity.oauth.scope.endpoint.ScopesApiService; import org.wso2.carbon.identity.oauth.scope.endpoint.dto.ScopeDTO; import org.wso2.carbon.identity.oauth.scope.endpoint.dto.ScopeToUpdateDTO; import org.wso2.carbon.identity.oauth.scope.endpoint.util.ScopeUtils; import org.wso2.carbon.identity.oauth2.IdentityOAuth2ScopeClientException; import org.wso2.carbon.identity.oauth2.IdentityOAuth2ScopeException; import org.wso2.carbon.identity.oauth2.Oauth2ScopeConstants; import org.wso2.carbon.identity.oauth2.bean.Scope; import javax.ws.rs.core.Response; import java.util.Set; /** * ScopesApiServiceImpl is used to handling scope bindings */ public class ScopesApiServiceImpl extends ScopesApiService { private static final Log LOG = LogFactory.getLog(ScopesApiServiceImpl.class); /** * Register a scope with the bindings * * @param scope details of the scope to be registered * @return Response with the status of the registration */ @Override public Response registerScope(ScopeDTO scope) { Scope registeredScope = null; try { registeredScope = ScopeUtils.getOAuth2ScopeService().registerScope(ScopeUtils.getScope(scope)); } catch (IdentityOAuth2ScopeClientException e) { if (LOG.isDebugEnabled()) { LOG.debug("Client Error while registering scope \n" + scope.toString(), e); } if (Oauth2ScopeConstants.ErrorMessages.ERROR_CODE_CONFLICT_REQUEST_EXISTING_SCOPE.getCode() .equals(e.getErrorCode())) { ScopeUtils.handleErrorResponse(Response.Status.CONFLICT, Response.Status.CONFLICT.getReasonPhrase(), e, false, LOG); } else { ScopeUtils.handleErrorResponse(Response.Status.BAD_REQUEST, Response.Status.BAD_REQUEST.getReasonPhrase(), e, false, LOG); } } catch (IdentityOAuth2ScopeException e) { ScopeUtils.handleErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getReasonPhrase(), e, true, LOG); } catch (Throwable throwable) { ScopeUtils.handleErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getReasonPhrase(), throwable, true, LOG); } return Response.status(Response.Status.CREATED).entity(registeredScope).build(); } /** * Retrieve the scope of the given scope name * * @param name Name of the scope which need to get retrieved * @return Response with the retrieved scope/ retrieval status */ @Override public Response getScope(String name) { Scope scope = null; try { scope = ScopeUtils.getOAuth2ScopeService().getScope(name); } catch (IdentityOAuth2ScopeClientException e) { if (LOG.isDebugEnabled()) { LOG.debug("Client Error while getting scope " + name, e); } if (Oauth2ScopeConstants.ErrorMessages.ERROR_CODE_NOT_FOUND_SCOPE.getCode().equals(e.getErrorCode())) { ScopeUtils.handleErrorResponse(Response.Status.NOT_FOUND, Response.Status.NOT_FOUND.getReasonPhrase(), e, false, LOG); } else { ScopeUtils.handleErrorResponse(Response.Status.BAD_REQUEST, Response.Status.BAD_REQUEST.getReasonPhrase(), e, false, LOG); } } catch (IdentityOAuth2ScopeException e) { ScopeUtils.handleErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getReasonPhrase(), e, true, LOG); } catch (Throwable throwable) { ScopeUtils.handleErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getReasonPhrase(), throwable, true, LOG); } return Response.status(Response.Status.OK).entity(ScopeUtils.getScopeDTO(scope)).build(); } /** * Retrieve the available scope list * * @param startIndex Start Index of the result set to enforce pagination * @param count Number of elements in the result set to enforce pagination * @return Response with the retrieved scopes/ retrieval status */ @Override public Response getScopes(Integer startIndex, Integer count) { Set<Scope> scopes = null; try { scopes = ScopeUtils.getOAuth2ScopeService().getScopes(startIndex, count); } catch (IdentityOAuth2ScopeException e) { ScopeUtils.handleErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getReasonPhrase(), e, true, LOG); } catch (Throwable throwable) { ScopeUtils.handleErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getReasonPhrase(), throwable, true, LOG); } return Response.status(Response.Status.OK).entity(ScopeUtils.getScopeDTOs(scopes)).build(); } /** * Check the existence of a scope * * @param name Name of the scope * @return Response with the indication whether the scope exists or not */ @Override public Response isScopeExists(String name) { boolean isScopeExists = false; try { isScopeExists = ScopeUtils.getOAuth2ScopeService().isScopeExists(name); } catch (IdentityOAuth2ScopeClientException e) { if (LOG.isDebugEnabled()) { LOG.debug("Client Error while getting scope existence of scope name " + name, e); } ScopeUtils.handleErrorResponse(Response.Status.BAD_REQUEST, Response.Status.BAD_REQUEST.getReasonPhrase(), e, false, LOG); } catch (IdentityOAuth2ScopeException e) { ScopeUtils.handleErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getReasonPhrase(), e, true, LOG); } catch (Throwable throwable) { ScopeUtils.handleErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getReasonPhrase(), throwable, true, LOG); } if (isScopeExists) { return Response.status(Response.Status.OK).build(); } return Response.status(Response.Status.NOT_FOUND).build(); } /** * Update a scope * * @param scope details of the scope to be updated * @param name name of the scope to be updated * @return */ @Override public Response updateScope(ScopeToUpdateDTO scope, String name) { ScopeDTO updatedScope = null; try { updatedScope = ScopeUtils.getScopeDTO(ScopeUtils.getOAuth2ScopeService() .updateScope(ScopeUtils.getUpdatedScope(scope, name))); } catch (IdentityOAuth2ScopeClientException e) { if (LOG.isDebugEnabled()) { LOG.debug("Client Error while updating scope \n" + scope.toString(), e); } if (Oauth2ScopeConstants.ErrorMessages.ERROR_CODE_NOT_FOUND_SCOPE.getCode() .equals(e.getErrorCode())) { ScopeUtils.handleErrorResponse(Response.Status.NOT_FOUND, Response.Status.NOT_FOUND.getReasonPhrase(), e, false, LOG); } else { ScopeUtils.handleErrorResponse(Response.Status.BAD_REQUEST, Response.Status.BAD_REQUEST.getReasonPhrase(), e, false, LOG); } } catch (IdentityOAuth2ScopeException e) { ScopeUtils.handleErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getReasonPhrase(), e, true, LOG); } catch (Throwable throwable) { ScopeUtils.handleErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getReasonPhrase(), throwable, true, LOG); } return Response.status(Response.Status.OK).entity(updatedScope).build(); } /** * Delete the scope for the given scope name * * @param name Name of the scope which need to get deleted * @return Response with the status of scope deletion */ @Override public Response deleteScope(String name) { try { ScopeUtils.getOAuth2ScopeService().deleteScope(name); } catch (IdentityOAuth2ScopeClientException e) { if (LOG.isDebugEnabled()) { LOG.debug("Client Error while deleting scope " + name, e); } if (Oauth2ScopeConstants.ErrorMessages.ERROR_CODE_NOT_FOUND_SCOPE.getCode() .equals(e.getErrorCode())) { ScopeUtils.handleErrorResponse(Response.Status.NOT_FOUND, Response.Status.NOT_FOUND.getReasonPhrase(), e, false, LOG); } else { ScopeUtils.handleErrorResponse(Response.Status.BAD_REQUEST, Response.Status.BAD_REQUEST.getReasonPhrase(), e, false, LOG); } } catch (IdentityOAuth2ScopeException e) { ScopeUtils.handleErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getReasonPhrase(), e, true, LOG); } catch (Throwable throwable) { ScopeUtils.handleErrorResponse(Response.Status.INTERNAL_SERVER_ERROR, Response.Status.INTERNAL_SERVER_ERROR.getReasonPhrase(), throwable, true, LOG); } return Response.status(Response.Status.OK).build(); } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticache.model; import java.io.Serializable; /** * <p> * Provides all of the details about a particular cache engine version. * </p> */ public class CacheEngineVersion implements Serializable, Cloneable { /** * The name of the cache engine. */ private String engine; /** * The version number of the cache engine. */ private String engineVersion; /** * The name of the cache parameter group family associated with this * cache engine. */ private String cacheParameterGroupFamily; /** * The description of the cache engine. */ private String cacheEngineDescription; /** * The description of the cache engine version. */ private String cacheEngineVersionDescription; /** * The name of the cache engine. * * @return The name of the cache engine. */ public String getEngine() { return engine; } /** * The name of the cache engine. * * @param engine The name of the cache engine. */ public void setEngine(String engine) { this.engine = engine; } /** * The name of the cache engine. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param engine The name of the cache engine. * * @return A reference to this updated object so that method calls can be chained * together. */ public CacheEngineVersion withEngine(String engine) { this.engine = engine; return this; } /** * The version number of the cache engine. * * @return The version number of the cache engine. */ public String getEngineVersion() { return engineVersion; } /** * The version number of the cache engine. * * @param engineVersion The version number of the cache engine. */ public void setEngineVersion(String engineVersion) { this.engineVersion = engineVersion; } /** * The version number of the cache engine. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param engineVersion The version number of the cache engine. * * @return A reference to this updated object so that method calls can be chained * together. */ public CacheEngineVersion withEngineVersion(String engineVersion) { this.engineVersion = engineVersion; return this; } /** * The name of the cache parameter group family associated with this * cache engine. * * @return The name of the cache parameter group family associated with this * cache engine. */ public String getCacheParameterGroupFamily() { return cacheParameterGroupFamily; } /** * The name of the cache parameter group family associated with this * cache engine. * * @param cacheParameterGroupFamily The name of the cache parameter group family associated with this * cache engine. */ public void setCacheParameterGroupFamily(String cacheParameterGroupFamily) { this.cacheParameterGroupFamily = cacheParameterGroupFamily; } /** * The name of the cache parameter group family associated with this * cache engine. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param cacheParameterGroupFamily The name of the cache parameter group family associated with this * cache engine. * * @return A reference to this updated object so that method calls can be chained * together. */ public CacheEngineVersion withCacheParameterGroupFamily(String cacheParameterGroupFamily) { this.cacheParameterGroupFamily = cacheParameterGroupFamily; return this; } /** * The description of the cache engine. * * @return The description of the cache engine. */ public String getCacheEngineDescription() { return cacheEngineDescription; } /** * The description of the cache engine. * * @param cacheEngineDescription The description of the cache engine. */ public void setCacheEngineDescription(String cacheEngineDescription) { this.cacheEngineDescription = cacheEngineDescription; } /** * The description of the cache engine. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param cacheEngineDescription The description of the cache engine. * * @return A reference to this updated object so that method calls can be chained * together. */ public CacheEngineVersion withCacheEngineDescription(String cacheEngineDescription) { this.cacheEngineDescription = cacheEngineDescription; return this; } /** * The description of the cache engine version. * * @return The description of the cache engine version. */ public String getCacheEngineVersionDescription() { return cacheEngineVersionDescription; } /** * The description of the cache engine version. * * @param cacheEngineVersionDescription The description of the cache engine version. */ public void setCacheEngineVersionDescription(String cacheEngineVersionDescription) { this.cacheEngineVersionDescription = cacheEngineVersionDescription; } /** * The description of the cache engine version. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param cacheEngineVersionDescription The description of the cache engine version. * * @return A reference to this updated object so that method calls can be chained * together. */ public CacheEngineVersion withCacheEngineVersionDescription(String cacheEngineVersionDescription) { this.cacheEngineVersionDescription = cacheEngineVersionDescription; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getEngine() != null) sb.append("Engine: " + getEngine() + ","); if (getEngineVersion() != null) sb.append("EngineVersion: " + getEngineVersion() + ","); if (getCacheParameterGroupFamily() != null) sb.append("CacheParameterGroupFamily: " + getCacheParameterGroupFamily() + ","); if (getCacheEngineDescription() != null) sb.append("CacheEngineDescription: " + getCacheEngineDescription() + ","); if (getCacheEngineVersionDescription() != null) sb.append("CacheEngineVersionDescription: " + getCacheEngineVersionDescription() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getEngine() == null) ? 0 : getEngine().hashCode()); hashCode = prime * hashCode + ((getEngineVersion() == null) ? 0 : getEngineVersion().hashCode()); hashCode = prime * hashCode + ((getCacheParameterGroupFamily() == null) ? 0 : getCacheParameterGroupFamily().hashCode()); hashCode = prime * hashCode + ((getCacheEngineDescription() == null) ? 0 : getCacheEngineDescription().hashCode()); hashCode = prime * hashCode + ((getCacheEngineVersionDescription() == null) ? 0 : getCacheEngineVersionDescription().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CacheEngineVersion == false) return false; CacheEngineVersion other = (CacheEngineVersion)obj; if (other.getEngine() == null ^ this.getEngine() == null) return false; if (other.getEngine() != null && other.getEngine().equals(this.getEngine()) == false) return false; if (other.getEngineVersion() == null ^ this.getEngineVersion() == null) return false; if (other.getEngineVersion() != null && other.getEngineVersion().equals(this.getEngineVersion()) == false) return false; if (other.getCacheParameterGroupFamily() == null ^ this.getCacheParameterGroupFamily() == null) return false; if (other.getCacheParameterGroupFamily() != null && other.getCacheParameterGroupFamily().equals(this.getCacheParameterGroupFamily()) == false) return false; if (other.getCacheEngineDescription() == null ^ this.getCacheEngineDescription() == null) return false; if (other.getCacheEngineDescription() != null && other.getCacheEngineDescription().equals(this.getCacheEngineDescription()) == false) return false; if (other.getCacheEngineVersionDescription() == null ^ this.getCacheEngineVersionDescription() == null) return false; if (other.getCacheEngineVersionDescription() != null && other.getCacheEngineVersionDescription().equals(this.getCacheEngineVersionDescription()) == false) return false; return true; } @Override public CacheEngineVersion clone() { try { return (CacheEngineVersion) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package org.polyforms.delegation.builder.support; import java.lang.reflect.Method; import junit.framework.Assert; import org.easymock.EasyMock; import org.junit.Before; import org.junit.Test; import org.polyforms.delegation.builder.DelegationBuilder; import org.polyforms.delegation.builder.DelegationRegistry; import org.polyforms.parameter.ArgumentProvider; public class DefaultDelegationBuilderTest { private DelegationRegistry delegationRegistry; private DelegationBuilder delegationBuilder; @Before public void setUp() { delegationRegistry = EasyMock.createMock(DelegationRegistry.class); delegationBuilder = new DefaultDelegationBuilder(delegationRegistry); } @Test public void delegateAllToDomain() throws NoSuchMethodException { final Method delegatorMethod = DomainDelegator.class.getMethod("get", new Class<?>[] { DomainObject.class }); delegationRegistry.contains(DomainDelegator.class, delegatorMethod); EasyMock.expectLastCall().andReturn(false); delegationRegistry.contains(DomainDelegator.class, DomainDelegator.class.getMethod("set", new Class<?>[] { DomainObject.class, String.class })); EasyMock.expectLastCall().andReturn(true); delegationRegistry.register(new SimpleDelegation(DomainDelegator.class, delegatorMethod)); EasyMock.replay(delegationRegistry); delegationBuilder.delegateFrom(DomainDelegator.class); delegationBuilder.registerDelegations(); EasyMock.verify(delegationRegistry); } @Test public void delegateAllToDomainWithOverride() throws NoSuchMethodException { final Method getMethod = DomainDelegator.class.getMethod("get", new Class<?>[] { DomainObject.class }); final Method setMethod = DomainDelegator.class.getMethod("set", new Class<?>[] { DomainObject.class, String.class }); delegationRegistry.contains(DomainDelegator.class, getMethod); EasyMock.expectLastCall().andReturn(false); delegationRegistry.contains(DomainDelegator.class, setMethod); EasyMock.expectLastCall().andReturn(true); final ArgumentProvider argumentProvider = EasyMock.createMock(ArgumentProvider.class); argumentProvider.validate(setMethod); delegationRegistry.register(new SimpleDelegation(DomainDelegator.class, getMethod)); delegationRegistry.register(new SimpleDelegation(DomainDelegator.class, setMethod)); EasyMock.replay(delegationRegistry, argumentProvider); final DomainDelegator domainDelegator = delegationBuilder.delegateFrom(DomainDelegator.class); Assert.assertNull(delegationBuilder.delegate()); domainDelegator.set(null, null); final DomainObject domainObject = delegationBuilder.delegate(); delegationBuilder.parameter(argumentProvider); domainObject.set(null); delegationBuilder.map(RuntimeException.class, Exception.class); delegationBuilder.registerDelegations(); EasyMock.verify(delegationRegistry, argumentProvider); } @Test public void delegateToBean() throws NoSuchMethodException { final Method delegatorMethod = DomainDelegator.class.getMethod("get", new Class<?>[] { DomainObject.class }); delegationRegistry.register(new SimpleDelegation(DomainDelegator.class, delegatorMethod)); EasyMock.replay(delegationRegistry); final DomainDelegator domainDelegator = delegationBuilder.delegateFrom(DomainDelegator.class); delegationBuilder.delegateTo(DomainObject.class); delegationBuilder.withName("domainObject"); Assert.assertEquals(0, domainDelegator.get(null)); Assert.assertNotNull(delegationBuilder.delegate()); delegationBuilder.registerDelegations(); EasyMock.verify(delegationRegistry); } @Test public void delegateWithParameterMatching() throws NoSuchMethodException { final Method delegatorMethod = DomainDelegator.class.getMethod("parameterMatch", new Class<?>[] { DomainObject.class, int.class, String.class }); delegationRegistry.register(new SimpleDelegation(DomainDelegator.class, delegatorMethod)); EasyMock.replay(delegationRegistry); final DomainDelegator domainDelegator = delegationBuilder.delegateFrom(DomainDelegator.class); domainDelegator.parameterMatch(null, 0, null); final DomainObject domainObject = delegationBuilder.delegate(); domainObject.join(null, 0); delegationBuilder.registerDelegations(); EasyMock.verify(delegationRegistry); } @Test public void delegateWithDuplicateParameterTypes() throws NoSuchMethodException { final Method delegatorMethod = DomainDelegator.class.getMethod("parameterNotMatch", new Class<?>[] { DomainObject.class, String.class, String.class }); delegationRegistry.register(new SimpleDelegation(DomainDelegator.class, delegatorMethod)); EasyMock.replay(delegationRegistry); final DomainDelegator domainDelegator = delegationBuilder.delegateFrom(DomainDelegator.class); domainDelegator.parameterNotMatch(null, null, null); final DomainObject domainObject = delegationBuilder.delegate(); domainObject.join(null, 0); delegationBuilder.registerDelegations(); EasyMock.verify(delegationRegistry); } @Test public void delegateWithParameterNotMatching() throws NoSuchMethodException { final Method delegatorMethod = DomainDelegator.class.getMethod("parameterNotMatch", new Class<?>[] { DomainObject.class, long.class, String.class }); delegationRegistry.register(new SimpleDelegation(DomainDelegator.class, delegatorMethod)); EasyMock.replay(delegationRegistry); final DomainDelegator domainDelegator = delegationBuilder.delegateFrom(DomainDelegator.class); domainDelegator.parameterNotMatch(null, 0, null); final DomainObject domainObject = delegationBuilder.delegate(); domainObject.join(null, 0); delegationBuilder.registerDelegations(); EasyMock.verify(delegationRegistry); } @Test public void registerDelegationsWithoudDelegatorType() { EasyMock.replay(delegationRegistry); delegationBuilder.registerDelegations(); EasyMock.verify(delegationRegistry); } @Test(expected = IllegalArgumentException.class) public void invokeDelegatorMethodTwice() { final DomainDelegator domainDelegator = delegationBuilder.delegateFrom(DomainDelegator.class); domainDelegator.get(null); domainDelegator.get(null); } @Test(expected = IllegalArgumentException.class) public void invokeDelegateeMethodTwice() { final DomainDelegator domainDelegator = delegationBuilder.delegateFrom(DomainDelegator.class); Assert.assertEquals(0, domainDelegator.get(null)); final DomainObject domainObject = delegationBuilder.<DomainObject> delegate(); domainObject.get(); domainObject.get(); } @Test(expected = IllegalArgumentException.class) public void setParameterBeforeDelegate() { final DomainDelegator domainDelegator = delegationBuilder.delegateFrom(DomainDelegator.class); domainDelegator.get(null); delegationBuilder.parameter(EasyMock.createMock(ArgumentProvider.class)); } @Test(expected = IllegalArgumentException.class) public void setParameterMoreThanRequired() { final DomainDelegator domainDelegator = delegationBuilder.delegateFrom(DomainDelegator.class); domainDelegator.get(null); final DomainObject domainObject = delegationBuilder.<DomainObject> delegate(); delegationBuilder.parameter(EasyMock.createMock(ArgumentProvider.class)); delegationBuilder.parameter(EasyMock.createMock(ArgumentProvider.class)); domainObject.get(); } @Test(expected = IllegalArgumentException.class) public void cannotFindMethodInDelegateeType() { final DomainDelegator domainDelegator = delegationBuilder.delegateFrom(DomainDelegator.class); domainDelegator.cannotFindMethodInDelegateeType(null); Assert.assertNotNull(delegationBuilder.delegate()); delegationBuilder.registerDelegations(); } @Test(expected = IllegalArgumentException.class) public void mapWithoudDelegatorType() { delegationBuilder.map(Exception.class, RuntimeException.class); } @Test(expected = IllegalArgumentException.class) public void delegateWithoudDelegatorType() { delegationBuilder.delegate(); } public static abstract class DomainDelegator { public void concret() { } public abstract void noParameter(); public abstract void cannotFindMethodInDelegateeType(DomainObject domainObject); public abstract int get(DomainObject domainObject); public abstract void set(DomainObject domainObject, String string); public abstract void parameterMatch(DomainObject domainObject, int order, String name); public abstract void parameterNotMatch(DomainObject domainObject, String order, String name); public abstract void parameterNotMatch(DomainObject domainObject, long order, String name); } public interface DomainObject { int get(); void set(String string); void join(String name, int number); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.standard.merge; import org.apache.nifi.components.PropertyValue; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.ProcessSessionFactory; import org.apache.nifi.processors.standard.MergeContent; import org.apache.nifi.processors.standard.MergeRecord; import org.apache.nifi.serialization.RecordReader; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.function.Predicate; public class RecordBinManager { private final ProcessContext context; private final ProcessSessionFactory sessionFactory; private final ComponentLog logger; private final int maxBinCount; private final AtomicLong maxBinAgeNanos = new AtomicLong(Long.MAX_VALUE); private final Map<String, List<RecordBin>> groupBinMap = new HashMap<>(); // guarded by lock private final Lock lock = new ReentrantLock(); private final AtomicInteger binCount = new AtomicInteger(0); public RecordBinManager(final ProcessContext context, final ProcessSessionFactory sessionFactory, final ComponentLog logger) { this.context = context; this.sessionFactory = sessionFactory; this.logger = logger; final Integer maxBins = context.getProperty(MergeRecord.MAX_BIN_COUNT).asInteger(); this.maxBinCount = maxBins == null ? Integer.MAX_VALUE : maxBins.intValue(); } /** * Must be called only when there are no active threads modifying the bins. */ public void purge() { lock.lock(); try { for (final List<RecordBin> binList : groupBinMap.values()) { for (final RecordBin bin : binList) { bin.rollback(); } } groupBinMap.clear(); binCount.set(0); } finally { lock.unlock(); } } public void setMaxBinAge(final Long timePeriod, final TimeUnit timeUnit) { if (timePeriod == null) { maxBinAgeNanos.set(Long.MAX_VALUE); } else { maxBinAgeNanos.set(timeUnit.toNanos(timePeriod)); } } public int getBinCount() { return binCount.get(); } /** * Adds the given flowFiles to the first available bin in which it fits for the given group or creates a new bin in the specified group if necessary. * <p/> * * @param groupIdentifier the group to which the flow file belongs; can be null * @param flowFile flowFile to bin * @param reader RecordReader to use for reading FlowFile * @param session the ProcessSession to which the FlowFiles belong * @param block if another thread is already writing to the desired bin, passing <code>true</code> for this parameter will block until the other thread(s) have finished so * that the records can still be added to the desired bin. Passing <code>false</code> will result in moving on to another bin. * * @throws IOException if there is an IO problem reading from the stream or writing to the stream */ public void add(final String groupIdentifier, final FlowFile flowFile, final RecordReader reader, final ProcessSession session, final boolean block) throws IOException { final List<RecordBin> currentBins; lock.lock(); try { // Create a new List<RecordBin> if none exists for this Group ID. We use a CopyOnWriteArrayList here because // we need to traverse the list in a couple of places and just below here, we call bin.offer() (which is very expensive) // while traversing the List, so we don't want to do this within a synchronized block. If we end up seeing poor performance // from this, we could look at instead using a Synchronized List and instead of calling bin.offer() while iterating allow for some // sort of bin.tryLock() and have that lock only if the flowfile should be added. Then if it returns true, we can stop iterating // and perform the expensive part and then ensure that we always unlock currentBins = groupBinMap.computeIfAbsent(groupIdentifier, grpId -> new CopyOnWriteArrayList<>()); } finally { lock.unlock(); } RecordBin acceptedBin = null; for (final RecordBin bin : currentBins) { final boolean accepted = bin.offer(flowFile, reader, session, block); if (accepted) { acceptedBin = bin; logger.debug("Transferred id={} to {}", new Object[] {flowFile.getId(), bin}); break; } } // We have to do this outside of our for-loop above in order to avoid a concurrent modification Exception. if (acceptedBin != null) { if (acceptedBin.isComplete()) { removeBins(groupIdentifier, Collections.singletonList(acceptedBin)); } return; } // if we've reached this point then we couldn't fit it into any existing bins - gotta make a new one final RecordBin bin = new RecordBin(context, sessionFactory.createSession(), logger, createThresholds(flowFile)); final boolean binAccepted = bin.offer(flowFile, reader, session, true); if (!binAccepted) { session.rollback(); throw new RuntimeException("Attempted to add " + flowFile + " to a new bin but failed. This is unexpected. Will roll back session and try again."); } logger.debug("Transferred id={} to {}", new Object[] {flowFile.getId(), bin}); if (!bin.isComplete()) { final int updatedBinCount = binCount.incrementAndGet(); lock.lock(); try { // We have already obtained the list of RecordBins from this Map above. However, we released // the lock in order to avoid blocking while writing to a Bin. Because of this, it is possible // that another thread may have already come in and removed this List from the Map, if all // Bins in the List have been completed. As a result, we must now obtain the write lock again // and obtain the List (or a new one), and then update that. This ensures that we never lose // track of a Bin. If we don't lose this, we could completely lose a Bin. final List<RecordBin> bins = groupBinMap.computeIfAbsent(groupIdentifier, grpId -> new CopyOnWriteArrayList<>()); bins.add(bin); } finally { lock.unlock(); } if (updatedBinCount > maxBinCount) { completeOldestBin(); } } } private RecordBinThresholds createThresholds(FlowFile flowfile) { int minRecords = context.getProperty(MergeRecord.MIN_RECORDS).evaluateAttributeExpressions().asInteger(); final int maxRecords = context.getProperty(MergeRecord.MAX_RECORDS).evaluateAttributeExpressions().asInteger(); final long minBytes = context.getProperty(MergeRecord.MIN_SIZE).asDataSize(DataUnit.B).longValue(); final PropertyValue maxSizeValue = context.getProperty(MergeRecord.MAX_SIZE); final long maxBytes = maxSizeValue.isSet() ? maxSizeValue.asDataSize(DataUnit.B).longValue() : Long.MAX_VALUE; final PropertyValue maxMillisValue = context.getProperty(MergeRecord.MAX_BIN_AGE); final String maxBinAge = maxMillisValue.getValue(); final long maxBinMillis = maxMillisValue.isSet() ? maxMillisValue.asTimePeriod(TimeUnit.MILLISECONDS) : Long.MAX_VALUE; final String fragmentCountAttribute; final String mergeStrategy = context.getProperty(MergeRecord.MERGE_STRATEGY).getValue(); if (MergeRecord.MERGE_STRATEGY_DEFRAGMENT.getValue().equals(mergeStrategy)) { fragmentCountAttribute = MergeContent.FRAGMENT_COUNT_ATTRIBUTE; // We don't know minRecords in defragment mode. minRecords = Integer.MAX_VALUE; } else { fragmentCountAttribute = null; } return new RecordBinThresholds(minRecords, maxRecords, minBytes, maxBytes, maxBinMillis, maxBinAge, fragmentCountAttribute); } public void completeOldestBin() throws IOException { RecordBin oldestBin = null; lock.lock(); try { String oldestBinGroup = null; for (final Map.Entry<String, List<RecordBin>> group : groupBinMap.entrySet()) { for (final RecordBin bin : group.getValue()) { if (oldestBin == null || bin.isOlderThan(oldestBin)) { oldestBin = bin; oldestBinGroup = group.getKey(); } } } if (oldestBin == null) { return; } removeBins(oldestBinGroup, Collections.singletonList(oldestBin)); } finally { lock.unlock(); } logger.debug("Completing Bin " + oldestBin + " because the maximum number of bins has been exceeded"); oldestBin.complete("Maximum number of bins has been exceeded"); } public int completeExpiredBins() throws IOException { final long maxNanos = maxBinAgeNanos.get(); return handleCompletedBins(bin -> bin.isOlderThan(maxNanos, TimeUnit.NANOSECONDS), "Bin has reached Max Bin Age"); } public int completeFullEnoughBins() throws IOException { return handleCompletedBins(RecordBin::isFullEnough, "Bin is full enough"); } private int handleCompletedBins(final Predicate<RecordBin> completionTest, final String completionReason) throws IOException { final Map<String, List<RecordBin>> completedBinMap = new HashMap<>(); lock.lock(); try { for (final Map.Entry<String, List<RecordBin>> entry : groupBinMap.entrySet()) { final String key = entry.getKey(); final List<RecordBin> bins = entry.getValue(); for (final RecordBin bin : bins) { if (completionTest.test(bin)) { final List<RecordBin> expiredBinsForKey = completedBinMap.computeIfAbsent(key, ignore -> new ArrayList<>()); expiredBinsForKey.add(bin); } } } } finally { lock.unlock(); } int completed = 0; for (final Map.Entry<String, List<RecordBin>> entry : completedBinMap.entrySet()) { final String key = entry.getKey(); final List<RecordBin> completeBins = entry.getValue(); for (final RecordBin bin : completeBins) { logger.debug("Completing Bin {} because {}", new Object[]{bin, completionReason}); bin.complete(completionReason); completed++; } removeBins(key, completeBins); } return completed; } private void removeBins(final String key, final List<RecordBin> bins) { lock.lock(); try { final List<RecordBin> list = groupBinMap.get(key); if (list != null) { final int initialSize = list.size(); list.removeAll(bins); // Determine how many items were removed from the list and // update our binCount to keep track of this. final int removedCount = initialSize - list.size(); binCount.addAndGet(-removedCount); if (list.isEmpty()) { groupBinMap.remove(key); } } } finally { lock.unlock(); } } }
/** * Copyright (c) 2013, impossibl.com * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of impossibl.com nor the names of its contributors may * be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.impossibl.postgres.system.procs; import com.impossibl.postgres.protocol.ResultField.Format; import com.impossibl.postgres.system.Context; import com.impossibl.postgres.types.ArrayType; import com.impossibl.postgres.types.PrimitiveType; import com.impossibl.postgres.types.Type; import com.impossibl.postgres.types.Type.Codec; import java.io.IOException; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Collections; import java.util.List; import static java.lang.reflect.Array.newInstance; import io.netty.buffer.ByteBuf; /* * Array codec * */ public class Arrays extends SimpleProcProvider { public Arrays() { super(new TxtEncoder(), new TxtDecoder(), new BinEncoder(), new BinDecoder(), "array_", "anyarray_", "oidvector", "intvector"); } static class BinDecoder extends BinaryDecoder { @Override public PrimitiveType getInputPrimitiveType() { return PrimitiveType.Array; } @Override public Class<?> getOutputType() { return Object[].class; } @Override public Object decode(Type type, Short typeLength, Integer typeModifier, ByteBuf buffer, Context context) throws IOException { int length = buffer.readInt(); int readStart = buffer.readerIndex(); Object instance = null; if (length != -1) { ArrayType atype = ((ArrayType)type); // //Header // int dimensionCount = buffer.readInt(); /* boolean hasNulls = */ buffer.readInt() /* == 1 ? true : false */; Type elementType = context.getRegistry().loadType(buffer.readInt()); //Each Dimension int[] dimensions = new int[dimensionCount]; int[] lowerBounds = new int[dimensionCount]; for (int d = 0; d < dimensionCount; ++d) { //Dimension dimensions[d] = buffer.readInt(); //Lower bounds lowerBounds[d] = buffer.readInt(); } if (atype.getElementType().getId() != elementType.getId()) { context.refreshType(atype.getId()); } // //Array & Elements // instance = readArray(buffer, elementType, dimensions, context); if (length != buffer.readerIndex() - readStart) { throw new IOException("invalid length"); } } return instance; } Object readArray(ByteBuf buffer, Type type, int[] dims, Context context) throws IOException { if (dims.length == 0) { return readElements(buffer, type, 0, context); } else if (dims.length == 1) { return readElements(buffer, type, dims[0], context); } else { return readSubArray(buffer, type, dims, context); } } Object readSubArray(ByteBuf buffer, Type type, int[] dims, Context context) throws IOException { Class<?> elementClass = type.unwrap().getJavaType(Format.Binary, Collections.<String, Class<?>> emptyMap()); Object inst = newInstance(elementClass, dims); int[] subDims = java.util.Arrays.copyOfRange(dims, 1, dims.length); for (int c = 0; c < dims[0]; ++c) { Array.set(inst, c, readArray(buffer, type, subDims, context)); } return inst; } Object readElements(ByteBuf buffer, Type type, int len, Context context) throws IOException { Class<?> elementClass = type.unwrap().getJavaType(Format.Binary, Collections.<String, Class<?>> emptyMap()); Object inst = newInstance(elementClass, len); for (int c = 0; c < len; ++c) { Array.set(inst, c, type.getBinaryCodec().decoder.decode(type, null, null, buffer, context)); } return inst; } } static class BinEncoder extends BinaryEncoder { @Override public Class<?> getInputType() { return Object[].class; } @Override public PrimitiveType getOutputPrimitiveType() { return PrimitiveType.Array; } @Override public void encode(Type type, ByteBuf buffer, Object val, Context context) throws IOException { buffer.writeInt(-1); if (val != null) { int writeStart = buffer.writerIndex(); ArrayType atype = ((ArrayType)type); Type elementType = atype.getElementType(); // //Header // int dimensionCount = getDimensions(val.getClass(), atype.unwrapAll()); //Dimension count buffer.writeInt(dimensionCount); //Has nulls buffer.writeInt(hasNulls(val) ? 1 : 0); //Element type buffer.writeInt(elementType.getId()); //each dimension Object dim = val; for (int d = 0; d < dimensionCount; ++d) { int dimension = 0; if (dim != null) dimension = Array.getLength(dim); //Dimension buffer.writeInt(dimension); //Lower bounds buffer.writeInt(1); if (dimension == 0) dim = null; else if (dim != null) dim = Array.get(dim, 0); } // //Array & Elements writeArray(buffer, elementType, val, context); //Set length buffer.setInt(writeStart - 4, buffer.writerIndex() - writeStart); } } void writeArray(ByteBuf buffer, Type type, Object val, Context context) throws IOException { if (val.getClass().getComponentType().isArray() && !type.getBinaryCodec().encoder.getInputType().isArray()) { writeSubArray(buffer, type, val, context); } else { writeElements(buffer, type, val, context); } } void writeElements(ByteBuf buffer, Type type, Object val, Context context) throws IOException { int len = Array.getLength(val); for (int c = 0; c < len; ++c) { type.getBinaryCodec().encoder.encode(type, buffer, Array.get(val, c), context); } } void writeSubArray(ByteBuf buffer, Type type, Object val, Context context) throws IOException { int len = Array.getLength(val); for (int c = 0; c < len; ++c) { writeArray(buffer, type, Array.get(val, c), context); } } boolean hasNulls(Object value) { for (int c = 0, sz = Array.getLength(value); c < sz; ++c) { if (Array.get(value, c) == null) return true; } return false; } @Override public int length(Type type, Object val, Context context) throws IOException { int length = 4; if (val != null) { ArrayType arrayType = (ArrayType) type; Type elementType = arrayType.unwrapAll(); int dimensionCount = getDimensions(val.getClass(), arrayType.unwrapAll()); length += 12 + (dimensionCount * 8); length += subLength(elementType, dimensionCount, val, context); } return length; } private int subLength(Type type, int dimensionCount, Object val, Context context) throws IOException { int length = 0; if (dimensionCount > 1) { for (int d = 0, len = Array.getLength(val); d < len; ++d) { length += subLength(type, dimensionCount - 1, Array.get(val, d), context); } } else { for (int c = 0, len = Array.getLength(val); c < len; ++c) { length += type.getBinaryCodec().encoder.length(type, Array.get(val, c), context); } } return length; } } static class TxtDecoder extends TextDecoder { @Override public PrimitiveType getInputPrimitiveType() { return PrimitiveType.Array; } @Override public Class<?> getOutputType() { return Object[].class; } @Override public Object decode(Type type, Short typeLength, Integer typeModifier, CharSequence buffer, Context context) throws IOException { int length = buffer.length(); Object instance = null; if (length != 0) { ArrayType atype = ((ArrayType)type); instance = readArray(buffer, atype.getDelimeter(), type.unwrap(), context); } return instance; } Object readArray(CharSequence data, char delim, Type type, Context context) throws IOException { if (data.length() < 2 || (data.charAt(0) != '{' && data.charAt(data.length() - 1) != '}')) { return type.getCodec(Format.Text).decoder.decode(type, null, null, data, context); } data = data.subSequence(1, data.length() - 1); List<Object> elements = new ArrayList<>(); StringBuilder elementTxt = new StringBuilder(); boolean string = false; int opened = 0; int c; for (c = 0; c < data.length(); ++c) { char ch = data.charAt(c); switch (ch) { case '{': if (!string) opened++; else elementTxt.append(ch); break; case '}': if (!string) opened--; else elementTxt.append(ch); break; case '"': if (string && c < data.length() - 1 && data.charAt(c + 1) == '"') { elementTxt.append('"'); c++; } else { string = !string; } break; case '\\': if (string) { ++c; if (c < data.length()) elementTxt.append(data.charAt(c)); } break; default: if (ch == delim && opened == 0 && !string) { Object element = readArray(elementTxt.toString(), delim, type, context); elements.add(element); elementTxt = new StringBuilder(); } else { elementTxt.append(ch); } } } Object finalElement = readArray(elementTxt.toString(), delim, type, context); elements.add(finalElement); return elements.toArray(); } } static class TxtEncoder extends TextEncoder { @Override public Class<?> getInputType() { return Object[].class; } @Override public PrimitiveType getOutputPrimitiveType() { return PrimitiveType.Array; } @Override public void encode(Type type, StringBuilder buffer, Object val, Context context) throws IOException { ArrayType arrayType = (ArrayType) type; Type elementType = arrayType.getElementType(); writeArray(buffer, elementType.getDelimeter(), elementType, val, context); } void writeArray(StringBuilder out, char delim, Type type, Object val, Context context) throws IOException { Codec.Encoder encoder = type.getCodec(Format.Text).encoder; out.append('{'); int len = Array.getLength(val); for (int c = 0; c < len; ++c) { Object elemVal = Array.get(val, c); StringBuilder elemOut = new StringBuilder(); encoder.encode(type, elemOut, elemVal, context); String elemStr = elemOut.toString(); if (needsQuotes(elemStr, delim)) { elemStr = elemStr.replace("\\", "\\\\"); elemStr = elemStr.replace("\"", "\\\""); out.append('\"').append(elemStr).append('\"'); } else { out.append(elemStr); } if (c < len - 1) out.append(delim); } out.append('}'); } private boolean needsQuotes(String elemStr, char delim) { if (elemStr.isEmpty()) return true; if (elemStr.equalsIgnoreCase("NULL")) return true; for (int c = 0; c < elemStr.length(); ++c) { char ch = elemStr.charAt(c); if (ch == delim || ch == '"' || ch == '\\' || ch == '{' || ch == '}' || ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' || ch == '\f') return true; } return false; } } public static int getDimensions(Class<?> type, Type elementType) { if (type.isArray() && type != elementType.getBinaryCodec().encoder.getInputType()) return 1 + getDimensions(type.getComponentType(), elementType); return 0; } }
/* This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * <p/> * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package org.rzo.yajsw.os.ms.win.w32; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.commons.collections.Bag; import org.apache.commons.collections.bag.HashBag; import org.apache.commons.lang.StringUtils; import org.rzo.yajsw.util.File; import com.sun.jna.Memory; import com.sun.jna.Native; import com.sun.jna.Pointer; import com.sun.jna.Structure; import com.sun.jna.Union; import com.sun.jna.WString; import com.sun.jna.ptr.IntByReference; import com.sun.jna.ptr.PointerByReference; import com.sun.jna.win32.StdCallLibrary; // TODO: Auto-generated Javadoc /** * The Class Pdh. */ public class Pdh { /** * The Interface Pdhdll. */ interface Pdhdll extends StdCallLibrary { /** The INSTANCE. */ Pdhdll INSTANCE = (Pdhdll) Native.loadLibrary("pdh", Pdhdll.class); /* * PDH_STATUS PdhOpenQuery( __in LPCTSTR szDataSource, __in DWORD_PTR * dwUserData, __out PDH_HQUERY phQuery ); */ /** * Pdh open query. * * @param szDataSource * the sz data source * @param dwUserData * the dw user data * @param phQuery * the ph query * * @return the int */ int PdhOpenQuery(Pointer szDataSource, Pointer dwUserData, PointerByReference phQuery); /* * PDH_STATUS PdhValidatePath( __in LPCTSTR szFullCounterPath ); */ /** * Pdh validate path a. * * @param szFullCounterPath * the sz full counter path * * @return the int */ int PdhValidatePathA(String szFullCounterPath); /* * PDH_STATUS PdhCollectQueryData( __in_out PDH_HQUERY hQuery ); */ /** * Pdh collect query data. * * @param hQuery * the h query * * @return the int */ int PdhCollectQueryData(Pointer hQuery); /* * PDH_STATUS PdhGetFormattedCounterValue( __in PDH_HCOUNTER hCounter, * __in DWORD dwFormat, __out LPDWORD lpdwType, __out * PPDH_FMT_COUNTERVALUE pValue ); */ /** * Pdh get formatted counter value. * * @param hCounter * the h counter * @param dwFormat * the dw format * @param lpdwType * the lpdw type * @param pValue * the value * * @return the int */ int PdhGetFormattedCounterValue(Pointer hCounter, int dwFormat, IntByReference lpdwType, Pointer pValue); /* * typedef struct _PDH_FMT_COUNTERVALUE { DWORD CStatus; union { LONG * longValue; double doubleValue; LONGLONG largeValue; LPCSTR * AnsiStringValue; LPCWSTR WideStringValue; }; } PDH_FMT_COUNTERVALUE, * PPDH_FMT_COUNTERVALUE; */ /* * PDH_STATUS PdhAddCounter( __in PDH_HQUERY hQuery, __in LPCTSTR * szFullCounterPath, __in DWORD_PTR dwUserData, __out PDH_HCOUNTER * phCounter ); */ /** * Pdh add counter a. * * @param hQuery * the h query * @param szFullCounterPath * the sz full counter path * @param dwUserData * the dw user data * @param phCounter * the ph counter * * @return the int */ int PdhAddCounterA(Pointer hQuery, String szFullCounterPath, int dwUserData, PointerByReference phCounter); /* * PDH_STATUS PdhAddEnglishCounter( __in PDH_HQUERY hQuery, __in LPCTSTR * szFullCounterPath, __in DWORD_PTR dwUserData, __out PDH_HCOUNTER * phCounter ); */ /** * Pdh add009 counter a. * * @param hQuery * the h query * @param szFullCounterPath * the sz full counter path * @param dwUserData * the dw user data * @param phCounter * the ph counter * * @return the int */ int PdhAdd009CounterA(Pointer hQuery, String szFullCounterPath, int dwUserData, PointerByReference phCounter); /** The ERRO r_ success. */ int ERROR_SUCCESS = 0; /** The PD h_ fm t_ double. */ int PDH_FMT_DOUBLE = 0x00000200; /** The PD h_ fm t_ long. */ int PDH_FMT_LONG = 0x00000100; /* * PDH_STATUS PdhRemoveCounter( __in PDH_HCOUNTER hCounter ); */ /** * Pdh remove counter. * * @param hCounter * the h counter * * @return the int */ int PdhRemoveCounter(Pointer hCounter); /* * PDH_STATUS PdhCloseQuery( __in PDH_HQUERY hQuery ); */ /** * Pdh close query. * * @param hQuery * the h query * * @return the int */ int PdhCloseQuery(Pointer hQuery); /* * PDH_STATUS PdhEnumObjectItems( __in LPCTSTR szDataSource, __in * LPCTSTR szMachineName, __in LPCTSTR szObjectName, __out LPTSTR * mszCounterList, __in_out LPDWORD pcchCounterListLength, __out LPTSTR * mszInstanceList, __in_out LPDWORD pcchInstanceListLength, __in DWORD * dwDetailLevel, DWORD dwFlags ); */ /** * Pdh enum object items a. * * @param szDataSource * the sz data source * @param szMachineName * the sz machine name * @param szObjectName * the sz object name * @param mszCounterList * the msz counter list * @param pcchCounterListLength * the pcch counter list length * @param mszInstanceList * the msz instance list * @param pcchInstanceListLength * the pcch instance list length * @param dwDetailLevel * the dw detail level * @param dwFlags * the dw flags * * @return the int */ int PdhEnumObjectItemsA(String szDataSource, String szMachineName, String szObjectName, Memory mszCounterList, IntByReference pcchCounterListLength, Memory mszInstanceList, IntByReference pcchInstanceListLength, int dwDetailLevel, int dwFlags); /** The PER f_ detai l_ wizard. */ int PERF_DETAIL_WIZARD = 400; /** The PD h_ mor e_ data. */ int PDH_MORE_DATA = 0x800007D2; /* * DH_STATUS PdhLookupPerfNameByIndex( __in LPCTSTR szMachineName, __in * DWORD dwNameIndex, __out LPTSTR szNameBuffer, __in LPDWORD * pcchNameBufferSize ); */ /** * Pdh lookup perf name by index a. * * @param szMachineName * the sz machine name * @param dwNameIndex * the dw name index * @param szNameBuffer * the sz name buffer * @param pcchNameBufferSize * the pcch name buffer size * * @return the int */ int PdhLookupPerfNameByIndexA(String szMachineName, int dwNameIndex, Memory szNameBuffer, IntByReference pcchNameBufferSize); /* * PDH_STATUS PdhParseCounterPath( __in LPCTSTR szFullPathBuffer, __out * PDH_COUNTER_PATH_ELEMENTS pCounterPathElements, __in_out LPDWORD * pdwBufferSize, DWORD dwFlags ); */ /** * Pdh parse counter path a. * * @param szFullPathBuffer * the sz full path buffer * @param pCounterPathElements * the counter path elements * @param pdwBufferSize * the pdw buffer size * @param dwFlags * the dw flags * * @return the int */ int PdhParseCounterPathA(String szFullPathBuffer, Pointer pCounterPathElements, IntByReference pdwBufferSize, int dwFlags); }// Pdhdll /** * The Interface Advapi32. */ interface Advapi32 extends StdCallLibrary { /** The INSTANCE. */ Advapi32 INSTANCE = (Advapi32) Native.loadLibrary("Advapi32", Advapi32.class); // , // Options.UNICODE_OPTIONS); /* * LONG WINAPI RegOpenKeyEx( HKEY hKey, LPCTSTR lpSubKey, DWORD * ulOptions, REGSAM samDesired, PHKEY phkResult ); */ /** * Reg open key ex a. * * @param hKey * the h key * @param lpSubKey * the lp sub key * @param ulOptions * the ul options * @param samDesired * the sam desired * @param phkResult * the phk result * * @return the int */ int RegOpenKeyExA(int hKey, String lpSubKey, int ulOptions, int samDesired, IntByReference phkResult); /** The HKE y_ loca l_ machine. */ int HKEY_LOCAL_MACHINE = 0x80000002; /** The KE y_ read. */ int KEY_READ = 0x20019; /* * LONG WINAPI RegCloseKey( HKEY hKey ); */ /** * Reg close key. * * @param hKey * the h key * * @return the int */ public int RegCloseKey(int hKey); /* * LONG WINAPI RegQueryValueEx( __in HKEY hKey, __in LPCTSTR * lpValueName, LPDWORD lpReserved, __out LPDWORD lpType, __out LPBYTE * lpData, __in_out LPDWORD lpcbData ); */ /** * Reg query value ex a. * * @param hKey * the h key * @param lpValueName * the lp value name * @param lpReserved * the lp reserved * @param lpType * the lp type * @param lpData * the lp data * @param lpcbData * the lpcb data * * @return the int */ int RegQueryValueExA(int hKey, String lpValueName, Pointer lpReserved, IntByReference lpType, Pointer lpData, IntByReference lpcbData); /** The HKE y_ performanc e_ data. */ int HKEY_PERFORMANCE_DATA = 0x80000004; /** The ERRO r_ mor e_ data. */ int ERROR_MORE_DATA = 234; } /** * The Class PDH_FMT_COUNTERVALUE. */ public static class PDH_FMT_COUNTERVALUE extends Structure { /** The C status. */ public int CStatus; /** The Value. */ public ValueUnion Value; @Override protected List getFieldOrder() { return Arrays.asList(new String[]{ "CStatus","Value" }); } } /** * The Class ValueUnion. */ public static class ValueUnion extends Union { /** The long value. */ public int longValue; /** The double value. */ public double doubleValue; /** The large value. */ public long largeValue; /** The Ansi string value. */ public String AnsiStringValue; /** The Wide string value. */ public WString WideStringValue; } /** * The Class COUNTER_PATH_ELEMENTS. */ public static class COUNTER_PATH_ELEMENTS { /** The sz machine name. */ public String szMachineName; /** The sz object name. */ public String szObjectName; /** The sz instance name. */ public String szInstanceName; /** The dw instance index. */ public int dwInstanceIndex; /** The sz counter name. */ public String szCounterName; } /** The MA x_ counte r_ path. */ static int MAX_COUNTER_PATH = 256; // Maximum counter path length /** The PD h_ ma x_ counte r_ name. */ static int PDH_MAX_COUNTER_NAME = 1024; // Maximum counter name length. /** The PD h_ ma x_ instanc e_ name. */ static int PDH_MAX_INSTANCE_NAME = 1024; // Maximum counter instance name // length. /** The PD h_ ma x_ counte r_ path. */ static int PDH_MAX_COUNTER_PATH = 2048; // Maximum full counter path // length. /** * The Class HddCounter. */ static public class HddCounter implements PdhCounter { /** The _file. */ private String _drive; private enum HDDInfoType { FreeSpaceInPercent, FreeSpaceInBytes, UsedSpaceInBytes, TotalSpaceinBytes, UnknownInfoType } private HDDInfoType _infoType = HDDInfoType.UnknownInfoType; /** * Instantiates a new hdd counter. * * @param counter * the counter */ public HddCounter(String counter) { _drive = counter.substring(counter.indexOf('(') + 1, counter.indexOf(')')); if (counter.endsWith("\\% free space")) _infoType = HDDInfoType.FreeSpaceInPercent; else if (counter.endsWith("\\free space")) _infoType = HDDInfoType.FreeSpaceInBytes; else if (counter.endsWith("\\used space")) _infoType = HDDInfoType.UsedSpaceInBytes; else if (counter.endsWith("\\total space")) _infoType = HDDInfoType.TotalSpaceinBytes; } /* * (non-Javadoc) * * @see org.rzo.yajsw.os.ms.win.xp.PdhCounter#close() */ public void close() { } /* * (non-Javadoc) * * @see org.rzo.yajsw.os.ms.win.xp.PdhCounter#getDoubleValue() */ public double getDoubleValue() { if (!isValid()) throw new RuntimeException("Cannot find Harddisk drive " + _drive); File file = new File(_drive); switch (_infoType) { case FreeSpaceInPercent: return ((double) file.getFreeSpace() / (double) file.getTotalSpace()) * 100; case FreeSpaceInBytes: return file.getFreeSpace(); case TotalSpaceinBytes: return file.getTotalSpace(); case UsedSpaceInBytes: return file.getTotalSpace() - file.getFreeSpace(); default: return -1; } } /* * (non-Javadoc) * * @see org.rzo.yajsw.os.ms.win.xp.PdhCounter#getIntValue() */ public int getIntValue() { return (int) getDoubleValue(); } /* * (non-Javadoc) * * @see org.rzo.yajsw.os.ms.win.xp.PdhCounter#isValid() */ public boolean isValid() { return new File(_drive).exists() && !_infoType.equals(HDDInfoType.UnknownInfoType); } public static void main(String[] args) { PdhCounter cc = getEnglishCounter("\\Process(MY_JAVA_VM_PROCESS)\\Handle Count"); System.out.println("Handle Count: " + cc.getDoubleValue()); cc = getEnglishCounter("\\Process(MY_JAVA_VM_PROCESS)\\Thread Count"); System.out.println("Thread Count: " + cc.getDoubleValue()); cc = getEnglishCounter("\\Process(MY_JAVA_VM_PROCESS)\\Private Bytes"); System.out.println("Private Bytes: " + cc.getDoubleValue()); java.io.File[] drives = File.listRoots(); PdhCounter c; for (java.io.File file : drives) { String drive = file.getPath(); drive = StringUtils.remove(drive, '\\'); System.out.println("Drive " + drive); try { c = getEnglishCounter("\\HddCounter(" + drive + ")\\used space"); System.out.println("\t UsedSpace " + ((long) c.getDoubleValue()) / (1024 * 1024) + " MB"); } catch (Exception e) { } try { c = getEnglishCounter("\\HddCounter(" + drive + ")\\free space"); System.out.println("\t FreeSpace " + ((long) c.getDoubleValue()) / (1024 * 1024) + " MB"); } catch (Exception e) { } try { c = getEnglishCounter("\\HddCounter(" + drive + ")\\total space"); System.out.println("\t TotalSpace " + ((long) c.getDoubleValue()) / (1024 * 1024) + " MB"); } catch (Exception e) { } try { c = getEnglishCounter("\\HddCounter(" + drive + ")\\% free space"); System.out.println("\t FreeSpace " + ((long) c.getDoubleValue()) + " %"); } catch (Exception e) { } } } } /** * The Class Counter. */ static public class Counter implements PdhCounter { /** The _h query. */ PointerByReference _hQuery = new PointerByReference(); /** The _h counter. */ PointerByReference _hCounter = new PointerByReference(); /** The _counter. */ String _counter; /** * Instantiates a new counter. * * @param counter * the counter * @param english * the english */ Counter(String counter, boolean english) { int ret = Pdhdll.INSTANCE.PdhOpenQuery(null, null, _hQuery); _counter = counter; if (ret != Pdhdll.ERROR_SUCCESS) System.out.println("Error in PdhOpenQuery " + counter + ": "+ Integer.toHexString(ret)); if (english) ret = Pdhdll.INSTANCE.PdhAdd009CounterA(_hQuery.getValue(), counter, 0, _hCounter); else ret = Pdhdll.INSTANCE.PdhAddCounterA(_hQuery.getValue(), counter, 0, _hCounter); if (ret != Pdhdll.ERROR_SUCCESS) throw new IllegalArgumentException("PdhCounter: " + counter + " " + ret); // System.out.println("created counter: "+ _counter + " "+ // _hQuery.getPointer() + " " +_hCounter.getPointer()+ " "+ // _hQuery.getValue() + " " +_hCounter.getValue()); getIntValue(); } /* * (non-Javadoc) * * @see org.rzo.yajsw.os.ms.win.xp.PdhCounter#isValid() */ public boolean isValid() { return !(_hCounter == null || _hQuery == null || _hQuery.getValue().equals(null) || _hCounter.getValue().equals(null)); } /* * (non-Javadoc) * * @see org.rzo.yajsw.os.ms.win.xp.PdhCounter#getDoubleValue() */ public double getDoubleValue() { if (!isValid()) return -1; PDH_FMT_COUNTERVALUE pdhCounterValue = new PDH_FMT_COUNTERVALUE(); pdhCounterValue.size(); int ret = Pdhdll.INSTANCE.PdhCollectQueryData(_hQuery.getValue()); if (ret != Pdhdll.ERROR_SUCCESS) System.out.println("Error in PdhCollectQueryData " + _counter + ": " + Integer.toHexString(ret)); PointerByReference result = new PointerByReference(); ret = Pdhdll.INSTANCE.PdhGetFormattedCounterValue(_hCounter.getValue(), Pdhdll.PDH_FMT_DOUBLE, null, pdhCounterValue.getPointer()); if (ret != Pdhdll.ERROR_SUCCESS) System.out.println("Error in PdhGetFormattedCounterValue " + _counter + ": " + Integer.toHexString(ret)); else { pdhCounterValue.read(); return pdhCounterValue.Value.doubleValue; } return -1; } /* * (non-Javadoc) * * @see org.rzo.yajsw.os.ms.win.xp.PdhCounter#getIntValue() */ public int getIntValue() { if (!isValid()) return -1; PDH_FMT_COUNTERVALUE pdhCounterValue = new PDH_FMT_COUNTERVALUE(); pdhCounterValue.size(); int ret = Pdhdll.INSTANCE.PdhCollectQueryData(_hQuery.getValue()); if (ret != Pdhdll.ERROR_SUCCESS) System.out.println("Error in PdhCollectQueryData " + _counter + ": "+ Integer.toHexString(ret)); PointerByReference result = new PointerByReference(); ret = Pdhdll.INSTANCE.PdhGetFormattedCounterValue(_hCounter.getValue(), Pdhdll.PDH_FMT_LONG, null, pdhCounterValue.getPointer()); if (ret != Pdhdll.ERROR_SUCCESS) System.out.println("Error in PdhGetFormattedCounterValue " + _counter + ": " + Integer.toHexString(ret)); else { pdhCounterValue.read(); return pdhCounterValue.Value.longValue; } return -1; } // call close to free sources. this is not done in finalize, because the // pointers may have already been finalized /* * (non-Javadoc) * * @see org.rzo.yajsw.os.ms.win.xp.PdhCounter#close() */ public void close() { // System.out.println("closing counter: "+_counter + " " + // _hCounter.getValue() + " " + _hQuery.getValue()); if (!isValid()) return; if (_hCounter != null) Pdhdll.INSTANCE.PdhRemoveCounter(_hCounter.getValue()); if (_hQuery != null) Pdhdll.INSTANCE.PdhCloseQuery(_hQuery.getValue()); _hQuery = null; _hCounter = null; } } // Counter /** * Enum object items. * * @param objectName * the object name * @param english * the english * * @return the list */ public static List enumObjectItems(String objectName, boolean english) { if (english) objectName = translate(objectName); Bag bag = new HashBag(); int pdhStatus = Pdhdll.ERROR_SUCCESS; Memory szCounterListBuffer = null; IntByReference dwCounterListSize = new IntByReference(); Memory szInstanceListBuffer = null; IntByReference dwInstanceListSize = new IntByReference(); String szThisInstance = null; // Determine the required buffer size for the data. pdhStatus = Pdhdll.INSTANCE.PdhEnumObjectItemsA(null, // real time // source null, // local machine objectName, // object to enumerate szCounterListBuffer, // pass NULL and 0 dwCounterListSize, // to get length required szInstanceListBuffer, // buffer size dwInstanceListSize, // Pdhdll.PERF_DETAIL_WIZARD, // counter detail level 0); if (pdhStatus == Pdhdll.PDH_MORE_DATA) { // Allocate the buffers and try the call again. szCounterListBuffer = new Memory(dwCounterListSize.getValue() * 4); szInstanceListBuffer = new Memory((dwInstanceListSize.getValue() * 4)); if ((szCounterListBuffer != null) && (szInstanceListBuffer != null)) { pdhStatus = Pdhdll.INSTANCE.PdhEnumObjectItemsA(null, // real // time // source null, // local machine objectName, // object to enumerate szCounterListBuffer, // buffer to receive counter // list dwCounterListSize, szInstanceListBuffer, // buffer to // receive // instance // list dwInstanceListSize, Pdhdll.PERF_DETAIL_WIZARD, // counter // detail // level 0); if (pdhStatus == Pdhdll.ERROR_SUCCESS) { // System.out.println ("Enumerating Processes:"); // Walk the instance list. The list can contain one // or more null-terminated strings. The last string // is followed by a second null-terminator. int i = 0; for (szThisInstance = szInstanceListBuffer.getString(0); szThisInstance != null && szThisInstance.length() > 0; i += szThisInstance .length() + 1, szThisInstance = szInstanceListBuffer.getString(i)) { // System.out.println( szThisInstance); bag.add(szThisInstance); } } else { System.out.println("PdhEnumObjectItems failed with " + Integer.toHexString(pdhStatus)); } } else { System.out.println("Unable to allocate buffers"); // pdhStatus = ERROR_OUTOFMEMORY; } } else { System.out.println("PdhEnumObjectItems failed with " + Integer.toHexString(pdhStatus)); } List result = new ArrayList(); for (Iterator it = bag.uniqueSet().iterator(); it.hasNext();) { String str = (String) it.next(); result.add(str); // System.out.println(str); for (int i = 1; i < bag.getCount(str); i++) { result.add(str + "#" + i); // System.out.println(str+"#"+i); } } return result; } /** The _indexes. */ static Map _indexes = null; /** * Read index map. */ static void readIndexMap() { if (_indexes != null) return; _indexes = new HashMap(); int ret = 0; IntByReference hkey = new IntByReference(); ret = Advapi32.INSTANCE.RegOpenKeyExA(Advapi32.HKEY_LOCAL_MACHINE, "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Perflib\\009", 0, Advapi32.KEY_READ, hkey); // System.out.println(">> "+ret + " " + // Integer.toHexString(hkey.getValue())); int BufferSize = 1; int BYTEINCREMENT = 1024; Memory PerfData = new Memory(BufferSize); PerfData.clear(); IntByReference PBufferSize = new IntByReference(); PBufferSize.setValue(BufferSize); // System.out.println("Allocating memory..."); for (ret = Advapi32.INSTANCE.RegQueryValueExA(hkey.getValue(), "Counter", null, null, PerfData, PBufferSize); ret == Advapi32.ERROR_MORE_DATA; ret = Advapi32.INSTANCE .RegQueryValueExA(hkey.getValue(), "Counter", null, null, PerfData, PBufferSize)) { // Get a buffer that is big enough. BufferSize += BYTEINCREMENT; PBufferSize = new IntByReference(); PBufferSize.setValue(BufferSize); PerfData = new Memory(BufferSize); PerfData.clear(); } // System.out.println("Final buffer size is " +PBufferSize.getValue()); if (ret != Pdhdll.ERROR_SUCCESS) System.out.println("Error reading Registry entry SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Perflib\\009: " + Integer.toHexString(ret)); else { String key; String counter; int i = 0; while (i < PBufferSize.getValue()) { key = PerfData.getString(i); i += key.length() + 1; counter = PerfData.getString(i); i += counter.length() + 1; // System.out.println(counter+":"+key); if (counter.length() > 0 && key.length() > 0) try { _indexes.put(counter, new Integer(Integer.parseInt(key))); } catch (Exception ex) { ex.printStackTrace(); } } } Advapi32.INSTANCE.RegCloseKey(hkey.getValue()); } /** * Translate. * * @param name * the name * * @return the string */ static String translate(String name) { readIndexMap(); Integer index = (Integer) _indexes.get(name); if (index == null) return name; Memory buff = new Memory(256); IntByReference buffSize = new IntByReference(); buffSize.setValue(256); if (Pdhdll.INSTANCE.PdhLookupPerfNameByIndexA(null, index.intValue(), buff, buffSize) == Pdhdll.ERROR_SUCCESS) return buff.getString(0); return name; } /** * Read process map. * * @return the map */ static Map readProcessMap() { Map result = new HashMap(); List processes = enumObjectItems("Process", true); for (Iterator it = processes.iterator(); it.hasNext();) { String process = (String) it.next(); PdhCounter c = getEnglishCounter("\\Process(" + process + ")\\ID Process"); int pid = c.getIntValue(); c.close(); result.put(new Integer(pid), process); } return result; } /** * Gets the english counter. * * @param counter * the counter * * @return the english counter */ static public PdhCounter getEnglishCounter(String counter) { if (counter.startsWith("\\HddCounter")) return new HddCounter(counter); if (counter.startsWith("\\ScriptCounter")) return new ScriptCounter(counter); return new Counter(counter, true); } /** * Gets the locale counter. * * @param counter * the counter * * @return the locale counter */ static public PdhCounter getLocaleCounter(String counter) { return new Counter(counter, false); } /** * Gets the process english counter. * * @param pid * the pid * @param counter * the counter * * @return the process english counter */ static public PdhCounter getProcessEnglishCounter(int pid, String counter) { Map m = readProcessMap(); String process = (String) m.get(new Integer(pid)); if (process == null) return null; //System.out.println("creating PdhCounter " + counter + " for Process " + process + " with pid " + pid); return getEnglishCounter("\\Process(" + process + ")\\" + counter); } /** * Gets the process locale counter. * * @param pid * the pid * @param counter * the counter * * @return the process locale counter */ static public PdhCounter getProcessLocaleCounter(int pid, String counter) { Map m = readProcessMap(); String processObject = translate("Process"); String process = (String) m.get(new Integer(pid)); if (process == null) return null; //System.out.println("creating PdhCounter " + counter + " for Process " + process + " with pid " + pid); return getLocaleCounter("\\" + processObject + "(" + process + ")\\" + counter); } }
package com.seleniumtests.ut.connectors.selenium.fielddetector; import static org.mockito.Mockito.when; import java.io.File; import java.io.IOException; import java.util.List; import org.powermock.core.classloader.annotations.PrepareForTest; import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import com.seleniumtests.ConnectorsTest; import com.seleniumtests.connectors.selenium.fielddetector.FieldDetectorConnector; import com.seleniumtests.customexception.ConfigurationException; import com.seleniumtests.customexception.ScenarioException; import kong.unirest.HttpRequest; import kong.unirest.Unirest; import kong.unirest.UnirestException; import kong.unirest.json.JSONObject; @PrepareForTest({ Unirest.class }) public class TestFieldDetectorConnector extends ConnectorsTest { @BeforeMethod(alwaysRun = true) public void init() { FieldDetectorConnector.resetFieldDetectorConnector(); } @Test(groups = { "ut" }) public void testIsAlive() { createServerMock("GET", "/status", 200, "OK"); FieldDetectorConnector.getInstance("http://localhost:4321"); } @Test(groups = { "ut" }, expectedExceptions = ConfigurationException.class) public void testIsAbsent() { HttpRequest<HttpRequest> req = createServerMock("GET", "/status", 500, "KO", "body"); when(req.asString()).thenThrow(UnirestException.class); FieldDetectorConnector.getInstance("http://localhost:4321"); } @Test(groups = { "ut" }, expectedExceptions = ConfigurationException.class) public void testIsNotAlive() { createServerMock("GET", "/status", 500, "Error"); FieldDetectorConnector.getInstance("http://localhost:4321"); } @Test(groups={"ut"}) public void testDetectOk() throws IOException { File image = createImageFromResource("ti/form_picture.png"); String detectionReply = String.format("{" + " \"error\": null," + " \"%s\": {" + " \"fields\": [{" + " \"bottom\": 220," + " \"class_id\": 4," + " \"class_name\": \"field_with_label\"," + " \"height\": 36," + " \"left\": 491," + " \"related_field\": {" + " \"bottom\": 228," + " \"class_id\": 0," + " \"class_name\": \"field\"," + " \"height\": 26," + " \"left\": 495," + " \"related_field\": null," + " \"right\": 642," + " \"text\": null," + " \"top\": 202," + " \"width\": 147," + " \"with_label\": false" + " }," + " \"right\": 705," + " \"text\": \"Date de naissance\"," + " \"top\": 184," + " \"width\": 214," + " \"with_label\": true" + " }," + " {" + " \"bottom\": 204," + " \"class_id\": 0," + " \"class_name\": \"field\"," + " \"height\": 21," + " \"left\": 611," + " \"related_field\": null," + " \"right\": 711," + " \"text\": null," + " \"top\": 183," + " \"width\": 100," + " \"with_label\": false" + " }]," + " \"labels\": [{" + " \"bottom\": 20," + " \"height\": 15," + " \"left\": 159," + " \"right\": 460," + " \"text\": \"Dossier Client S3]HOMOLOGATIO\"," + " \"top\": 5," + " \"width\": 301" + " }," + " {" + " \"bottom\": 262," + " \"height\": 8," + " \"left\": 939," + " \"right\": 999," + " \"text\": \"Rechercher\"," + " \"top\": 254," + " \"width\": 60" + " }]" + " }" + "}", image.getName()); createServerMock("GET", "/status", 200, "OK"); createServerMock("POST", "/detect", 200, detectionReply); FieldDetectorConnector fieldDetectorConnector = FieldDetectorConnector.getInstance("http://localhost:4321"); List<JSONObject> fields = fieldDetectorConnector.detect(image).getJSONArray("fields").toList(); Assert.assertEquals(fields.size(), 2); } @Test(groups={"ut"}) public void testDetectErrorOk() throws IOException { File image = createImageFromResource("ti/form_picture.png"); String detectionReply = String.format("{" + " \"error\": null," + " \"%s\": {" + " \"fields\": [" + " {" + " \"bottom\": 204," + " \"class_id\": 0," + " \"class_name\": \"error_field\"," + " \"height\": 21," + " \"left\": 611," + " \"related_field\": null," + " \"right\": 711," + " \"text\": null," + " \"top\": 183," + " \"width\": 100," + " \"with_label\": false" + " }]," + " \"labels\": [{" + " \"bottom\": 20," + " \"height\": 15," + " \"left\": 159," + " \"right\": 460," + " \"text\": \"Dossier Client S3]HOMOLOGATIO\"," + " \"top\": 5," + " \"width\": 301" + " }," + " {" + " \"bottom\": 262," + " \"height\": 8," + " \"left\": 939," + " \"right\": 999," + " \"text\": \"Rechercher\"," + " \"top\": 254," + " \"width\": 60" + " }]" + " }" + "}", image.getName()); createServerMock("GET", "/status", 200, "OK"); createServerMock("POST", "/detectError", 200, detectionReply); FieldDetectorConnector fieldDetectorConnector = FieldDetectorConnector.getInstance("http://localhost:4321"); List<JSONObject> fields = fieldDetectorConnector.detectError(image).getJSONArray("fields").toList(); Assert.assertEquals(fields.size(), 1); } @Test(groups={"ut"}, expectedExceptions = ScenarioException.class, expectedExceptionsMessageRegExp = "Field detector returned error: some error occured") public void testDetectKoWithMessage() throws IOException { File image = createImageFromResource("ti/form_picture.png"); String detectionReply = "{\"error\": \"some error occured\"}"; createServerMock("GET", "/status", 200, "OK"); createServerMock("POST", "/detect", 500, detectionReply); FieldDetectorConnector fieldDetectorConnector = FieldDetectorConnector.getInstance("http://localhost:4321"); fieldDetectorConnector.detect(image); } @Test(groups={"ut"}, expectedExceptions = ScenarioException.class, expectedExceptionsMessageRegExp = "Field detector did not return any information: some error occured") public void testDetectKoWithMessage2() throws IOException { File image = createImageFromResource("ti/form_picture.png"); String detectionReply = "{\"error\": \"some error occured\"}"; createServerMock("GET", "/status", 200, "OK"); createServerMock("POST", "/detect", 200, detectionReply); FieldDetectorConnector fieldDetectorConnector = FieldDetectorConnector.getInstance("http://localhost:4321"); fieldDetectorConnector.detect(image); } @Test(groups={"ut"}, expectedExceptions = ScenarioException.class, expectedExceptionsMessageRegExp = "Field detector returned error") public void testDetectKoWithoutMessage() throws IOException { File image = createImageFromResource("ti/form_picture.png"); String detectionReply = "Internal server error"; createServerMock("GET", "/status", 200, "OK"); createServerMock("POST", "/detect", 500, detectionReply); FieldDetectorConnector fieldDetectorConnector = FieldDetectorConnector.getInstance("http://localhost:4321"); fieldDetectorConnector.detect(image); } @Test(groups={"ut"}, expectedExceptions = ScenarioException.class, expectedExceptionsMessageRegExp = "Image file is null") public void testDetectKoImageNull() throws IOException { createServerMock("GET", "/status", 200, "OK"); FieldDetectorConnector fieldDetectorConnector = FieldDetectorConnector.getInstance("http://localhost:4321"); fieldDetectorConnector.detect(null); } @Test(groups={"ut"}, expectedExceptions = ScenarioException.class, expectedExceptionsMessageRegExp = ".*noPath\\.png not found") public void testDetectKoImageDoesNotExist() throws IOException { createServerMock("GET", "/status", 200, "OK"); FieldDetectorConnector fieldDetectorConnector = FieldDetectorConnector.getInstance("http://localhost:4321"); fieldDetectorConnector.detect(new File("noPath.png")); } }
/* (C) Copyright 2013-2016 The RISCOSS Project Consortium Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /** * @author Alberto Siena **/ package eu.riscoss.server; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.QueryParam; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.google.gson.JsonPrimitive; import eu.riscoss.db.RiscossDB; import eu.riscoss.db.RiscossElements; import eu.riscoss.db.SearchParams; import eu.riscoss.fbk.language.Proposition; import eu.riscoss.fbk.language.Relation; import eu.riscoss.reasoner.Chunk; import eu.riscoss.reasoner.DataType; import eu.riscoss.reasoner.Distribution; import eu.riscoss.reasoner.Evidence; import eu.riscoss.reasoner.FBKRiskAnalysisEngine; import eu.riscoss.reasoner.Field; import eu.riscoss.reasoner.FieldType; import eu.riscoss.reasoner.ModelSlice; import eu.riscoss.reasoner.ReasoningLibrary; import eu.riscoss.reasoner.RiskAnalysisEngine; import eu.riscoss.shared.EChunkType; import eu.riscoss.shared.JChunkItem; import eu.riscoss.shared.JChunkList; import eu.riscoss.shared.JChunkValue; import eu.riscoss.shared.JLayerNode; import eu.riscoss.shared.JModelNode; import eu.riscoss.shared.JProposition; import eu.riscoss.shared.JRelation; import eu.riscoss.shared.JRiskModel; import eu.riscoss.shared.RiscossUtil; import gwtupload.server.exceptions.UploadActionException; @Path("models") @Info("Manages the upload, download, modification and inspection of risk models") public class ModelManager { Gson gson = new Gson(); @GET @Path("/{domain}/list") @Info("Returns a list of models previously uploaded in the selected domain") public String getList( @PathParam("domain") @Info("The work domain") String domain, @HeaderParam("token") @Info("The authentication token") String token ) throws Exception { JsonArray a = new JsonArray(); RiscossDB db = null; try { db = DBConnector.openDB( domain, token ); for( String model : db.getModelList() ) { JsonObject o = new JsonObject(); o.addProperty( "name", model ); a.add( o ); } } catch( Exception ex ) { throw ex; } finally { DBConnector.closeDB( db ); } return a.toString(); } @GET @Path("/{domain}/search") @Info("Returns a list of models that match the specified parameters") public String search( @PathParam("domain") @Info("The selected domain") String domain, @HeaderParam("token") @Info("The authentication token") String token, @QueryParam("query") String query ) throws Exception { return searchNew( domain, token, query, "0", "0"); } @GET @Path("/{domain}/search-models") @Info("Returns a list of models that match the specified parameters") public String searchNew( @PathParam("domain") @Info("The selected domain") String domain, @HeaderParam("token") @Info("The authentication token") String token, @DefaultValue("") @QueryParam("query") @Info("The actual query (on the model name)") String query, @DefaultValue("0") @QueryParam("from") @Info("Index of the first model (for pagination)") String strFrom, @DefaultValue("0") @QueryParam("max") @Info("Amount of models to search") String strMax ) throws Exception { List<JModelNode> result = new ArrayList<JModelNode>(); RiscossDB db = null; try { db = DBConnector.openDB( domain, token ); SearchParams params = new SearchParams(); params.setMax( strMax ); params.setFrom( strFrom ); List<String> models = new ArrayList<String>(); Collection<String> list = db.findModels(query, params); for (String name : list) { JModelNode jd = new JModelNode(); jd.name = name; result.add(jd); } } catch( Exception ex ) { throw ex; } finally { DBConnector.closeDB(db); } return new Gson().toJson( result ); } /** * * @param domain * @param token * @param models a list of models (JsonArray) in the body * @return * @throws Exception */ @POST @Path("/{domain}/chunklist") @Info("Returns a list of input and output chunks for the specified models; " + "it also includes the type of the chunk (goal, risk, indicator)") //returns also the type of each object (goal/risk/...) and is used in AHP public String getModelChunkList( @PathParam("domain") @Info("The work domain") String domain, @HeaderParam("token") @Info("The authentication token") String token, @Info("A JSONized array of model names") String models ) throws Exception { JsonArray json = (JsonArray)new JsonParser().parse( models ); JChunkList list = new JChunkList(); RiscossDB db = null; try { db = DBConnector.openDB( domain, token ); for( int i = 0; i < json.size(); i++ ) { String modelName = json.get( i ).getAsString(); String blob = db.getModelBlob( modelName ); if( blob != null ) { RiskAnalysisEngine rae = ReasoningLibrary.get().createRiskAnalysisEngine(); rae.loadModel( blob ); for( Chunk c : rae.queryModel( ModelSlice.INPUT_DATA ) ) { JChunkItem jchunk = new JChunkItem(); jchunk.setId( c.getId() ); jchunk.setLabel( getProperty( rae, c, FieldType.LABEL, c.getId() ) ); jchunk.setDescription( getProperty( rae, c, FieldType.DESCRIPTION, "" ) ); { String type = rae.getField( c, FieldType.TYPE ).getValue(); if( "Goal".equals( type ) ) { jchunk.setType( EChunkType.GOAL ); } else if( "Risk".equals( type ) ) { jchunk.setType( EChunkType.RISK ); } else if( "Indicator".equals( type ) ) { jchunk.setType( EChunkType.INDICATOR ); } else { jchunk.setType( EChunkType.OTHER ); } } switch( rae.getField( c, FieldType.INPUT_VALUE ).getDataType() ) { case DISTRIBUTION: { JChunkValue.JDistribution jd = new JChunkValue.JDistribution(); Distribution d = rae.getField( c, FieldType.INPUT_VALUE ).getValue(); jd.values.addAll( d.getValues() ); jchunk.setValue( jd ); } break; case EVIDENCE: { JChunkValue.JEvidence je = new JChunkValue.JEvidence(); Evidence e = rae.getField( c, FieldType.INPUT_VALUE ).getValue(); je.p = e.getPositive(); je.m = e.getNegative(); jchunk.setValue( je ); } break; case INTEGER: { JChunkValue.JInteger ji = new JChunkValue.JInteger(); if( rae.getField( c, FieldType.MIN ).getDataType() != DataType.NaN ) ji.min = rae.getField( c, FieldType.MIN ).getValue(); if( rae.getField( c, FieldType.MAX ).getDataType() != DataType.NaN ) ji.max = rae.getField( c, FieldType.MAX ).getValue(); ji.value = (int)rae.getField( c, FieldType.INPUT_VALUE ).getValue(); jchunk.setValue( ji ); } break; case NaN: break; case REAL: { JChunkValue.JReal ji = new JChunkValue.JReal(); if( rae.getField( c, FieldType.MIN ).getDataType() != DataType.NaN ) ji.min = rae.getField( c, FieldType.MIN ).getValue(); if( rae.getField( c, FieldType.MAX ).getDataType() != DataType.NaN ) ji.max = rae.getField( c, FieldType.MAX ).getValue(); ji.value = (double)rae.getField( c, FieldType.INPUT_VALUE ).getValue(); jchunk.setValue( ji ); } break; case STRING: break; default: break; } list.inputs.add( jchunk ); } for( Chunk c : rae.queryModel( ModelSlice.OUTPUT_DATA ) ) { JChunkItem jchunk = new JChunkItem(); jchunk.setId( c.getId() ); { String type = rae.getField( c, FieldType.TYPE ).getValue(); if( "Goal".equals( type ) ) { jchunk.setType( EChunkType.GOAL ); } else if( "Risk".equals( type ) ) { jchunk.setType( EChunkType.RISK ); } else if( "Indicator".equals( type ) ) { jchunk.setType( EChunkType.INDICATOR ); } else { jchunk.setType( EChunkType.OTHER ); } } String label = c.getId(); Field f = rae.getField( c, FieldType.LABEL ); if( f != null ) { label = f.getValue(); } jchunk.setLabel( label ); f = rae.getField( c, FieldType.DESCRIPTION ); if( f != null ) { jchunk.setDescription( (String)f.getValue() ); } switch( rae.getField( c, FieldType.OUTPUT_VALUE ).getDataType() ) { case DISTRIBUTION: { Distribution d = rae.getField( c, FieldType.OUTPUT_VALUE ).getValue(); JChunkValue.JDistribution jd = new JChunkValue.JDistribution(); jd.values.addAll( d.getValues() ); jchunk.setValue( jd ); } break; case EVIDENCE: { JChunkValue.JEvidence je = new JChunkValue.JEvidence(); Evidence e = rae.getField( c, FieldType.OUTPUT_VALUE ).getValue(); je.p = e.getPositive(); je.m = e.getNegative(); jchunk.setValue( je ); } break; case INTEGER: { JChunkValue.JInteger ji = new JChunkValue.JInteger(); ji.value = (int)rae.getField( c, FieldType.OUTPUT_VALUE ).getValue(); jchunk.setValue( ji ); } break; case NaN: break; case REAL: { JChunkValue.JReal ji = new JChunkValue.JReal(); ji.value = (double)rae.getField( c, FieldType.OUTPUT_VALUE ).getValue(); jchunk.setValue( ji ); } break; case STRING: break; default: break; } list.outputs.add( jchunk ); } } } } catch (Exception e) { throw e; } finally { DBConnector.closeDB( db ); } String ret = gson.toJson( list ); System.out.println( ret ); return ret; } /** * * @param domain * @param token * @param models a list of models (JsonArray) * @return * @throws Exception */ @POST @Path("/{domain}/chunks") @Info("Returns a list of input and output chunks for the specified models") //used in the whatifanalysis, and in the ModelsModule (for showing the content) //returns various info, but not the types. public String getModelChunks( @PathParam("domain") @Info("The work domain") String domain, @HeaderParam("token") @Info("The authentication token") String token, @Info("A JSONized array of model names") String models ) throws Exception { JsonArray json = (JsonArray)new JsonParser().parse( models ); JsonObject ret = new JsonObject(); JsonArray inputs = new JsonArray(); JsonArray outputs = new JsonArray(); ret.add( "inputs", inputs ); ret.add( "outputs", outputs ); RiscossDB db = null; try { db = DBConnector.openDB( domain, token ); RiskAnalysisEngine rae = ReasoningLibrary.get().createRiskAnalysisEngine(); for( int i = 0; i < json.size(); i++ ) { String modelName = json.get( i ).getAsString(); String blob = db.getModelBlob( modelName ); if( blob != null ) { rae.loadModel( blob ); } } Set<String> set = new HashSet<>(); for( Chunk c : rae.queryModel( ModelSlice.INPUT_DATA ) ) { JsonObject o = new JsonObject(); o.addProperty( "id", c.getId() ); o.addProperty( "label", getProperty( rae, c, FieldType.LABEL, c.getId() ) ); o.addProperty( "description", getProperty( rae, c, FieldType.DESCRIPTION, "" ) ); o.addProperty( "datatype", rae.getField( c, FieldType.INPUT_VALUE ).getDataType().name() ); switch( rae.getField( c, FieldType.INPUT_VALUE ).getDataType() ) { case DISTRIBUTION: { o.addProperty( "min", "0" ); o.addProperty( "max", "1" ); Distribution d = rae.getField( c, FieldType.INPUT_VALUE ).getValue(); o.add( "value", toJson( d ) ); } break; case EVIDENCE: { Evidence e = rae.getField( c, FieldType.INPUT_VALUE ).getValue(); o.add( "value", toJson( e ) ); } break; case INTEGER: { int min = 0; if( rae.getField( c, FieldType.MIN ).getDataType() != DataType.NaN ) min = rae.getField( c, FieldType.MIN ).getValue(); int max = 100; if( rae.getField( c, FieldType.MAX ).getDataType() != DataType.NaN ) max = rae.getField( c, FieldType.MAX ).getValue(); o.addProperty( "min", "" + min ); o.addProperty( "max", "" + max ); o.addProperty( "value", (int)rae.getField( c, FieldType.INPUT_VALUE ).getValue() ); } break; case NaN: break; case REAL: { double min = 0; if( rae.getField( c, FieldType.MIN ).getDataType() != DataType.NaN ) min = rae.getField( c, FieldType.MIN ).getValue(); double max = 1; if( rae.getField( c, FieldType.MAX ).getDataType() != DataType.NaN ) max = rae.getField( c, FieldType.MAX ).getValue(); o.addProperty( "min", "" + min ); o.addProperty( "max", "" + max ); o.addProperty( "value", (double)rae.getField( c, FieldType.INPUT_VALUE ).getValue() ); } break; case STRING: break; default: break; } if( !set.contains( c.getId() ) ) { inputs.add( o ); set.add( c.getId() ); } } for( Chunk c : rae.queryModel( ModelSlice.OUTPUT_DATA ) ) { JsonObject o = new JsonObject(); o.addProperty( "id", c.getId() ); o.addProperty( "datatype", rae.getField( c, FieldType.OUTPUT_VALUE ).getDataType().name() ); String label = c.getId(); Field f = rae.getField( c, FieldType.LABEL ); if( f != null ) { label = f.getValue(); } o.addProperty( "label", label ); f = rae.getField( c, FieldType.DESCRIPTION ); if( f == null ) { o.addProperty( "description", "" ); } else { o.addProperty( "description", (String)f.getValue() ); } switch( rae.getField( c, FieldType.OUTPUT_VALUE ).getDataType() ) { case DISTRIBUTION: { Distribution d = rae.getField( c, FieldType.OUTPUT_VALUE ).getValue(); o.add( "value", toJson( d ) ); } break; case EVIDENCE: { Evidence e = rae.getField( c, FieldType.OUTPUT_VALUE ).getValue(); o.add( "value", toJson( e ) ); } break; case INTEGER: { o.addProperty( "value", (int)rae.getField( c, FieldType.OUTPUT_VALUE ).getValue() ); } break; case NaN: break; case REAL: { o.addProperty( "value", (double)rae.getField( c, FieldType.OUTPUT_VALUE ).getValue() ); } break; case STRING: break; default: break; } outputs.add( o ); } } catch (Exception e) { throw e; } finally { DBConnector.closeDB( db ); } System.out.println( ret.toString() ); return ret.toString(); } private JsonElement toJson(Evidence e) { JsonObject o = new JsonObject(); o.addProperty( "p", e.getPositive() ); o.addProperty( "e", e.getNegative() ); return o; } private JsonElement toJson(Distribution d) { JsonArray a = new JsonArray(); for( double val : d.getValues() ) { a.add( new JsonPrimitive( "" + val ) ); } return a; } private String getProperty( RiskAnalysisEngine rae, Chunk chunk, FieldType ft, String def ) { Field f = rae.getField( chunk, ft ); if( f == null ) return def; if( f.getValue() == null ) return def; return f.getValue().toString(); } @POST @Path("/{domain}/{model}/delete-documentation") @Info("Deletes the documentation of a model") public void deleteDocumentation( @PathParam("domain") @Info("The work domain") String domain, @HeaderParam("token") @Info("The authentication token") String token, @PathParam("model") @Info("The name of the model") String name ) throws Exception { RiscossDB db = null; try { db = DBConnector.openDB(domain, token); db.deleteModelDesc(name); } catch(Exception e) { throw e; } finally { DBConnector.closeDB(db); } } @GET @Path("/{domain}/{model}/get") @Info("Returns information about a stored model") public String getInfo( @PathParam("domain") @Info("The work domain") String domain, @HeaderParam("token") @Info("The authentication token") String token, @PathParam("model") @Info("The name of the model") String name ) throws Exception { RiscossDB db = null; try { db = DBConnector.openDB( domain, token ); String filename = db.getModelFilename( name ); //modelfilename String descfile = db.getModelDescFielname(name); JsonObject o = new JsonObject(); o.addProperty( "name", name ); o.addProperty( "modelfilename", filename); o.addProperty( "modeldescfilename", descfile); return o.toString(); } catch (Exception e) { throw e; } finally { DBConnector.closeDB( db ); } } @GET @Path("/{domain}/{model}/blob") @Info("Reutrn the actual content of a stored model, e.g., of a previously uploaded xml file") public String getBlob( @PathParam("domain") @Info("The work domain") String domain, @HeaderParam("token") @Info("The authentication token") String token, @PathParam("model") @Info("The model name") String name ) throws Exception { RiscossDB db = null; try { db = DBConnector.openDB( domain, token ); String blob = db.getModelBlob( name ); JsonObject o = new JsonObject(); o.addProperty( "name", name ); o.addProperty( "blob", blob ); return o.toString(); } catch (Exception e) { throw e; } finally { DBConnector.closeDB( db ); } } @DELETE @Path("/{domain}/{model}/delete") @Info("Deletes a model from the database") public void deleteModel( @PathParam("domain") @Info("The work domain") String domain, @HeaderParam("token") @Info("The authentication token") String token, @PathParam("model") @Info("The model name") String name ) throws Exception { RiscossDB db = null; try { db = DBConnector.openDB( domain, token ); db.removeModel( name ); } catch (Exception e) { throw e; } finally { DBConnector.closeDB( db ); } } @POST @Path("/{domain}/{model}/upload") @Info("Stores a model into the database") public void uploadModel( @HeaderParam("token") @Info("The authentication token") String token, @PathParam("domain") @Info("The work domain") String domain, @PathParam("model") @Info("The model name") String modelname, String content ) { RiscossDB db = null; try { db = DBConnector.openDB( domain, token ); //attention:filename sanitation is not directly notified to the user modelname = RiscossUtil.sanitize(modelname); for (String storedmodel : db.getModelList()) { if (storedmodel.equals(modelname)){ return; } } db.storeModel( content, modelname ); } catch (Exception e) { throw new UploadActionException(e); } finally { DBConnector.closeDB( db ); } } @POST @Path("/{domain}/{model}/rename") @Info("Changes the name of a model") public void changeModelName( @PathParam("domain") @Info("The work domain") String domain, @HeaderParam("token") @Info("Then authentication token") String token, @PathParam("model") @Info("The name of an existing model") String name, @QueryParam("newname") @Info("The new name of the model") String newName ) throws Exception{ RiscossDB db = null; boolean duplicate = false; //String response = ""; try { db = DBConnector.openDB( domain, token ); for (String storedmodel : db.getModelList()) { if (storedmodel.equals(newName)) { duplicate = true; throw new RuntimeException("Duplicate entry"); } } if (duplicate == false) { db.changeModelName(name, newName); } } catch (Exception e) { throw e; } finally { DBConnector.closeDB(db); } } @GET @Path("/{domain}/{name}/content") @Info("Experimental. Returns a structured representation of the content of a RiskML model") public String getRiskModelContent( @PathParam("domain") @Info("The work domain") String domain, @HeaderParam("token") @Info("The authentication token") String token, @PathParam("name") @Info("The name of the model") String name ) throws Exception { RiscossDB db = null; try { db = DBConnector.openDB( domain, token ); String blob = db.getModelBlob( name ); FBKRiskAnalysisEngine fbk = new FBKRiskAnalysisEngine(); fbk.loadModel( blob ); JRiskModel jmodel = new JRiskModel(); jmodel.name = name; for( Proposition p : fbk.getProgram().getModel().propositions() ) { JProposition jp = new JProposition(); jp.id = p.getId(); jp.stereotype = p.getStereotype(); jmodel.add( jp ); } for( Relation r : fbk.getProgram().getModel().relations() ) { JRelation jr = new JRelation(); jr.stereotype = r.getStereotype(); Proposition target = r.getTarget(); if( target == null ) continue; JProposition jtarget = jmodel.getProposition( target.getId() ); if( jtarget == null ) continue; jr.setTarget( jtarget ); for( Proposition source : r.getSources() ) { JProposition jsource = jmodel.getProposition( source.getId() ); if( jsource == null ) continue; jr.addSource( jsource ); } jmodel.add( jr ); } return gson.toJson( jmodel ); } catch (Exception e) { throw e; } finally { DBConnector.closeDB( db ); } } @GET @Path("/{domain}/{model}/description") public String getDescription( @PathParam("domain") @Info("The selected domain") String domain, @PathParam("model") @Info("The selected model") String model, @HeaderParam("token") @Info("The authentication token") String token ) throws Exception { RiscossDB db = null; try { db = DBConnector.openDB(domain, token); return db.getProperty( RiscossElements.MODEL, model, "description", "" ); } catch (Exception e) { throw e; } finally { DBConnector.closeDB(db); } } @POST @Path("/{domain}/{model}/description") public void setDescription( @PathParam("domain") @Info("The selected domain") String domain, @PathParam("model") @Info("The selected model") String model, @HeaderParam("token") @Info("The authentication token") String token, @Info("The description string to be set") String description ) throws Exception { RiscossDB db = null; try { db = DBConnector.openDB(domain, token); db.setProperty( RiscossElements.MODEL, model, "description", description ); } catch (Exception e) { throw e; } finally { DBConnector.closeDB(db); } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.idea; import com.intellij.concurrency.IdeaForkJoinWorkerThreadFactory; import com.intellij.ide.cloudConfig.CloudConfigProvider; import com.intellij.ide.customize.CustomizeIDEWizardDialog; import com.intellij.ide.customize.CustomizeIDEWizardStepsProvider; import com.intellij.ide.plugins.PluginManagerCore; import com.intellij.ide.startupWizard.StartupWizard; import com.intellij.jna.JnaLoader; import com.intellij.openapi.application.ApplicationInfo; import com.intellij.openapi.application.ApplicationNamesInfo; import com.intellij.openapi.application.ConfigImportHelper; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.application.impl.ApplicationInfoImpl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.ShutDownTracker; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.SystemInfoRt; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.openapi.util.io.win32.IdeaWin32; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.AppUIUtil; import com.intellij.util.Consumer; import com.intellij.util.EnvironmentUtil; import com.intellij.util.PlatformUtils; import com.intellij.util.ui.UIUtil; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; import org.apache.log4j.PatternLayout; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.io.BuiltInServer; import javax.swing.*; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.lang.management.ManagementFactory; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.List; import java.util.Locale; /** * @author yole */ public class StartupUtil { public static final String NO_SPLASH = "nosplash"; private static SocketLock ourSocketLock; private StartupUtil() { } public static boolean shouldShowSplash(final String[] args) { return !Arrays.asList(args).contains(NO_SPLASH); } public static synchronized void addExternalInstanceListener(@Nullable Consumer<List<String>> consumer) { // method called by app after startup if (ourSocketLock != null) { ourSocketLock.setExternalInstanceListener(consumer); } } @Nullable public static synchronized BuiltInServer getServer() { return ourSocketLock == null ? null : ourSocketLock.getServer(); } @FunctionalInterface interface AppStarter { void start(boolean newConfigFolder); default void beforeImportConfigs() {} } static void prepareAndStart(String[] args, AppStarter appStarter) { IdeaForkJoinWorkerThreadFactory.setupForkJoinCommonPool(Main.isHeadless(args)); boolean newConfigFolder = false; if (!Main.isHeadless()) { AppUIUtil.updateFrameClass(); newConfigFolder = !new File(PathManager.getConfigPath()).exists(); } if (!checkJdkVersion()) { System.exit(Main.JDK_CHECK_FAILED); } // avoiding "log4j:WARN No appenders could be found" System.setProperty("log4j.defaultInitOverride", "true"); try { org.apache.log4j.Logger root = org.apache.log4j.Logger.getRootLogger(); if (!root.getAllAppenders().hasMoreElements()) { root.setLevel(Level.WARN); root.addAppender(new ConsoleAppender(new PatternLayout(PatternLayout.DEFAULT_CONVERSION_PATTERN))); } } catch (Throwable e) { //noinspection CallToPrintStackTrace e.printStackTrace(); } // note: uses config folder! if (!checkSystemFolders()) { System.exit(Main.DIR_CHECK_FAILED); } ActivationResult result = lockSystemFolders(args); if (result == ActivationResult.ACTIVATED) { System.exit(0); } if (result != ActivationResult.STARTED) { System.exit(Main.INSTANCE_CHECK_FAILED); } // the log initialization should happen only after locking the system directory Logger.setFactory(LoggerFactory.class); Logger log = Logger.getInstance(Main.class); startLogging(log); loadSystemLibraries(log); fixProcessEnvironment(log); if (!Main.isHeadless()) { UIUtil.initDefaultLAF(); } if (newConfigFolder) { appStarter.beforeImportConfigs(); ConfigImportHelper.importConfigsTo(PathManager.getConfigPath()); } if (!Main.isHeadless()) { AppUIUtil.updateWindowIcon(JOptionPane.getRootFrame()); AppUIUtil.registerBundledFonts(); AppUIUtil.showUserAgreementAndConsentsIfNeeded(); } appStarter.start(newConfigFolder); } /** * Checks if the program can run under the JDK it was started with. */ private static boolean checkJdkVersion() { if ("true".equals(System.getProperty("idea.jre.check"))) { try { // try to find a class from tools.jar Class.forName("com.sun.jdi.Field", false, StartupUtil.class.getClassLoader()); } catch (ClassNotFoundException e) { String message = "'tools.jar' seems to be not in " + ApplicationNamesInfo.getInstance().getProductName() + " classpath.\n" + "Please ensure JAVA_HOME points to JDK rather than JRE."; Main.showMessage("JDK Required", message, true); return false; } catch (LinkageError e) { String message = "Cannot load a class from 'tools.jar': " + e.getMessage() + "\n" + "Please ensure JAVA_HOME points to JDK rather than JRE."; Main.showMessage("JDK Required", message, true); return false; } } if ("true".equals(System.getProperty("idea.64bit.check"))) { if (PlatformUtils.isCidr() && !SystemInfo.is64Bit) { String message = "32-bit JVM is not supported. Please install 64-bit version."; Main.showMessage("Unsupported JVM", message, true); return false; } } return true; } private static synchronized boolean checkSystemFolders() { String configPath = PathManager.getConfigPath(); PathManager.ensureConfigFolderExists(); if (!new File(configPath).isDirectory()) { String message = "Config path '" + configPath + "' is invalid.\n" + "If you have modified the '" + PathManager.PROPERTY_CONFIG_PATH + "' property please make sure it is correct,\n" + "otherwise please re-install the IDE."; Main.showMessage("Invalid Config Path", message, true); return false; } String systemPath = PathManager.getSystemPath(); if (!new File(systemPath).isDirectory()) { String message = "System path '" + systemPath + "' is invalid.\n" + "If you have modified the '" + PathManager.PROPERTY_SYSTEM_PATH + "' property please make sure it is correct,\n" + "otherwise please re-install the IDE."; Main.showMessage("Invalid System Path", message, true); return false; } File logDir = new File(PathManager.getLogPath()); boolean logOk = false; if (logDir.isDirectory() || logDir.mkdirs()) { try { File ideTempFile = new File(logDir, "idea_log_check.txt"); write(ideTempFile, "log check"); delete(ideTempFile); logOk = true; } catch (IOException ignored) { } } if (!logOk) { String message = "Log path '" + logDir.getPath() + "' is inaccessible.\n" + "If you have modified the '" + PathManager.PROPERTY_LOG_PATH + "' property please make sure it is correct,\n" + "otherwise please re-install the IDE."; Main.showMessage("Invalid Log Path", message, true); return false; } File ideTempDir = new File(PathManager.getTempPath()); String tempInaccessible; if (!ideTempDir.isDirectory() && !ideTempDir.mkdirs()) { tempInaccessible = "unable to create the directory"; } else { try { File ideTempFile = new File(ideTempDir, "idea_tmp_check.sh"); write(ideTempFile, "#!/bin/sh\nexit 0"); if (SystemInfo.isWindows || SystemInfo.isMac) { tempInaccessible = null; } else if (!ideTempFile.setExecutable(true, true)) { tempInaccessible = "cannot set executable permission"; } else if (new ProcessBuilder(ideTempFile.getAbsolutePath()).start().waitFor() != 0) { tempInaccessible = "cannot execute test script"; } else { tempInaccessible = null; } delete(ideTempFile); } catch (Exception e) { tempInaccessible = e.getClass().getSimpleName() + ": " + e.getMessage(); } } if (tempInaccessible != null) { String message = "Temp directory '" + ideTempDir + "' is inaccessible.\n" + "If you have modified the '" + PathManager.PROPERTY_SYSTEM_PATH + "' property please make sure it is correct,\n" + "otherwise please re-install the IDE.\n\nDetails: " + tempInaccessible; Main.showMessage("Invalid System Path", message, true); return false; } return true; } private static void write(File file, String content) throws IOException { try (FileWriter writer = new FileWriter(file)) { writer.write(content); } } @SuppressWarnings("SSBasedInspection") private static void delete(File ideTempFile) { if (!FileUtilRt.delete(ideTempFile)) { ideTempFile.deleteOnExit(); } } private enum ActivationResult { STARTED, ACTIVATED, FAILED } @NotNull private static synchronized ActivationResult lockSystemFolders(String[] args) { if (ourSocketLock != null) { throw new AssertionError(); } ourSocketLock = new SocketLock(PathManager.getConfigPath(), PathManager.getSystemPath()); SocketLock.ActivateStatus status; try { status = ourSocketLock.lock(args); } catch (Exception e) { Main.showMessage("Cannot Lock System Folders", e); return ActivationResult.FAILED; } if (status == SocketLock.ActivateStatus.NO_INSTANCE) { ShutDownTracker.getInstance().registerShutdownTask(() -> { //noinspection SynchronizeOnThis synchronized (StartupUtil.class) { ourSocketLock.dispose(); ourSocketLock = null; } }); return ActivationResult.STARTED; } if (status == SocketLock.ActivateStatus.ACTIVATED) { //noinspection UseOfSystemOutOrSystemErr System.out.println("Already running"); return ActivationResult.ACTIVATED; } if (Main.isHeadless() || status == SocketLock.ActivateStatus.CANNOT_ACTIVATE) { String message = "Only one instance of " + ApplicationNamesInfo.getInstance().getProductName() + " can be run at a time."; Main.showMessage("Too Many Instances", message, true); } return ActivationResult.FAILED; } private static void fixProcessEnvironment(Logger log) { if (!Main.isCommandLine()) { System.setProperty("__idea.mac.env.lock", "unlocked"); } boolean envReady = EnvironmentUtil.isEnvironmentReady(); // trigger environment loading if (!envReady) { log.info("initializing environment"); } } private static void loadSystemLibraries(final Logger log) { // load JNA in own temp directory - to avoid collisions and work around no-exec /tmp File ideTempDir = new File(PathManager.getTempPath()); if (!(ideTempDir.mkdirs() || ideTempDir.exists())) { throw new RuntimeException("Unable to create temp directory '" + ideTempDir + "'"); } if (System.getProperty("jna.tmpdir") == null) { System.setProperty("jna.tmpdir", ideTempDir.getPath()); } if (System.getProperty("jna.nosys") == null) { System.setProperty("jna.nosys", "true"); // prefer bundled JNA dispatcher lib } JnaLoader.load(log); if (SystemInfo.isWin2kOrNewer) { //noinspection ResultOfMethodCallIgnored IdeaWin32.isAvailable(); // logging is done there } if (SystemInfo.isWindows) { // WinP should not unpack .dll files into parent directory System.setProperty("winp.unpack.dll.to.parent.dir", "false"); } } private static void startLogging(final Logger log) { ShutDownTracker.getInstance().registerShutdownTask(() -> log.info("------------------------------------------------------ IDE SHUTDOWN ------------------------------------------------------")); log.info("------------------------------------------------------ IDE STARTED ------------------------------------------------------"); ApplicationInfo appInfo = ApplicationInfoImpl.getShadowInstance(); ApplicationNamesInfo namesInfo = ApplicationNamesInfo.getInstance(); String buildDate = new SimpleDateFormat("dd MMM yyyy HH:ss", Locale.US).format(appInfo.getBuildDate().getTime()); log.info("IDE: " + namesInfo.getFullProductName() + " (build #" + appInfo.getBuild().asString() + ", " + buildDate + ")"); log.info("OS: " + SystemInfoRt.OS_NAME + " (" + SystemInfoRt.OS_VERSION + ", " + SystemInfo.OS_ARCH + ")"); log.info("JRE: " + System.getProperty("java.runtime.version", "-") + " (" + System.getProperty("java.vendor", "-") + ")"); log.info("JVM: " + System.getProperty("java.vm.version", "-") + " (" + System.getProperty("java.vm.name", "-") + ")"); List<String> arguments = ManagementFactory.getRuntimeMXBean().getInputArguments(); if (arguments != null) { log.info("JVM Args: " + StringUtil.join(arguments, " ")); } String extDirs = System.getProperty("java.ext.dirs"); if (extDirs != null) { for (String dir : StringUtil.split(extDirs, File.pathSeparator)) { String[] content = new File(dir).list(); if (content != null && content.length > 0) { log.info("ext: " + dir + ": " + Arrays.toString(content)); } } } log.info("JNU charset: " + System.getProperty("sun.jnu.encoding")); } static void runStartupWizard() { ApplicationInfoEx appInfo = ApplicationInfoImpl.getShadowInstance(); String stepsProviderName = appInfo.getCustomizeIDEWizardStepsProvider(); if (stepsProviderName != null) { CustomizeIDEWizardStepsProvider provider; try { Class<?> providerClass = Class.forName(stepsProviderName); provider = (CustomizeIDEWizardStepsProvider)providerClass.newInstance(); } catch (Throwable e) { Main.showMessage("Configuration Wizard Failed", e); return; } CloudConfigProvider configProvider = CloudConfigProvider.getProvider(); if (configProvider != null) { configProvider.beforeStartupWizard(); } new CustomizeIDEWizardDialog(provider).show(); PluginManagerCore.invalidatePlugins(); if (configProvider != null) { configProvider.startupWizardFinished(); } return; } List<ApplicationInfoEx.PluginChooserPage> pages = appInfo.getPluginChooserPages(); if (!pages.isEmpty()) { new StartupWizard(pages).show(); PluginManagerCore.invalidatePlugins(); } } }
package com.worldly.graph; import android.content.Context; import android.content.res.Configuration; import android.content.res.Resources; import android.util.DisplayMetrics; import com.worldly.graph.data.GraphData; import com.worldly.graph.data.GraphDataColumn; import com.worldly.graph.data.GraphDataRow; import com.worldly.graph.exception.CannotBeNullException; import com.worldly.graph.exception.GraphDataSizeMismatchException; import com.worldly.graph.listener.GraphDataChangeListener; /** * Abstract class which all types of Graphs/Charts * have to extend. * * @author Marek Matejka * */ public abstract class Chart { /** * Used to get data about the current state of the application. */ private Context context; /** * Data for the graph. */ private GraphData data; /** * Listener which automatically updates the graph with new data, * each time the data is changed. */ private GraphDataChangeListener listener; /** * Title of the vertical axis. */ private String vAxisTitle; /** * Title of the horizontal axis. */ private String hAxisTitle; /** * Title of the chart. */ private String chartTitle; /** * Basic constructor which uses the fully prepared data (table) * to create the graph. * * @param data Already formatted data for the graph. * @param context Context of the current Activity. */ public Chart(GraphData data, Context context) { this.data = data; this.chartTitle = ""; this.hAxisTitle = ""; this.vAxisTitle = ""; this.context = context; } /** * Constructor which uses one Column as the initial data. * * @param names Column of row names. * @param values Column of data to be included in the data table. * @param context Context of the current Activity. * @throws CannotBeNullException Thrown if some value in the column is NULL * or if the column itself is NULL. */ public Chart(GraphDataColumn names, GraphDataColumn values, Context context) throws CannotBeNullException { this.data = new GraphData(names, values); this.chartTitle = ""; this.hAxisTitle = ""; this.vAxisTitle = ""; this.context = context; } /** * Constructor with two rows - names and values. * * @param names Row with names. * @param values Row with values. * @param context Context of the current Activity. * @throws GraphDataSizeMismatchException If length of names and values does not match. */ public Chart(GraphDataRow names, GraphDataRow values, Context context) throws GraphDataSizeMismatchException { this.data = new GraphData(names, values); this.chartTitle = ""; this.hAxisTitle = ""; this.vAxisTitle = ""; this.context = context; } /** * Returns the HTML and Javascript content used * to load and display the graph in GraphView. * * @return HTML and Javascript content in a String * that can be read by Google Graphs. */ public abstract String getContent(); /** * Overwrites the current data in the graph. * * @param data Fully formatted data for the graph. */ public void setGraphData(GraphData data) { this.data = data; } /** * Returns the current data for the graph. * * @return Current graph's data. */ public GraphData getGraphData() { return this.data; } /** * Adds column to graph's current data. * * @param column Column of data which will be added. * @throws CannotBeNullException If column or any of its content is NULL exception is thrown. * @throws GraphDataSizeMismatchException If column does not have the correct length exception is thrown. */ void addColumn(GraphDataColumn column) throws CannotBeNullException, GraphDataSizeMismatchException { this.data.addColumn(column); listener.onGraphDataChanged(this); } /** * Adds row to graph's current data. * * @param row Row of data which will be added. * @throws CannotBeNullException If row of any of its content is NULL exception is thrown. * @throws GraphDataSizeMismatchException If the row's length is different to current length of row * in the GraphData exception is thrown. */ void addRow(GraphDataRow row) throws CannotBeNullException, GraphDataSizeMismatchException { this.data.addRow(row); listener.onGraphDataChanged(this); } /** * Removes a row from data on a given index. * Index starts with 0. * * @param index Index of row to be removed. */ void removeRow(int index) { this.data.removeRow(index); listener.onGraphDataChanged(this); } /** * Removes a column from data on a given index. * * @param index Index of a column to be removed. */ void removeColumn(int index) { this.data.removeColumn(index); listener.onGraphDataChanged(this); } /** * Sets {@link GraphDataChangeListener} for this Chart. * * @param listener Concrete instance of the listener. */ public void setGraphDataChangeListener(GraphDataChangeListener listener) { this.listener = listener; } /** * Returns the number of columns in the data for the graph. * * @return Number of columns in graph's data. */ int getNumberOfColumns() { return data.getNumberOfColumns(); } /** * Returns the number of rows in the data for the graph. * * @return Number of rows in graph's data. */ int getNumberOfRows() { return data.getNumberOfRows(); } /** * Returns the width of the chart adjusted to screen size. * * @return Screen adjusted width of the graph. */ public int getChartWidth() { DisplayMetrics metrics = Resources.getSystem().getDisplayMetrics(); return (int)((metrics.widthPixels/metrics.density)*0.9); } /** * Returns the height of the chart adjusted to screen size * * @return Screen adjusted height of the graph. */ public int getChartHeight() { DisplayMetrics metrics = Resources.getSystem().getDisplayMetrics(); //based on the orientation if (context.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) return (int)((metrics.heightPixels/metrics.density)*0.65); else return (int)(metrics.heightPixels/metrics.density/4); } /** * Returns the current title of vertical axis. * * @return Current vertical axis title. */ public String getVerticalAxisTitle() { return vAxisTitle; } /** * Sets the title of the vertical axis. * * @param vAxisTitle New title of the vertical axis. */ public void setVerticalAxisTitle(String vAxisTitle) { this.vAxisTitle = vAxisTitle; if (listener != null) listener.onGraphDataChanged(this); } /** * Returns the current title of horizontal axis. * * @return Current horizontal axis title. */ public String getHorizontalAxisTitle() { return hAxisTitle; } /** * Sets the title of the horizontal axis. * * @param hAxisTitle New title of the horizontal axis. */ public void setHorizontalAxisTitle(String hAxisTitle) { this.hAxisTitle = hAxisTitle; if (listener != null) listener.onGraphDataChanged(this); } /** * Returns the current title of the chart. * * @return Chart's title. */ public String getChartTitle() { return chartTitle; } /** * Sets the title of the chart. * * @param chartTitle New chart's title. */ public void setChartTitle(String chartTitle) { this.chartTitle = chartTitle; if (listener != null) listener.onGraphDataChanged(this); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.hyracks.api.rewriter.runtime; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Map.Entry; import java.util.concurrent.Future; import org.apache.commons.lang3.tuple.Pair; import org.apache.hyracks.api.comm.IFrameWriter; import org.apache.hyracks.api.context.IHyracksTaskContext; import org.apache.hyracks.api.dataflow.ActivityId; import org.apache.hyracks.api.dataflow.IActivity; import org.apache.hyracks.api.dataflow.IConnectorDescriptor; import org.apache.hyracks.api.dataflow.IOperatorNodePushable; import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider; import org.apache.hyracks.api.dataflow.value.RecordDescriptor; import org.apache.hyracks.api.exceptions.HyracksDataException; /** * The runtime of a SuperActivity, which internally executes a DAG of one-to-one * connected activities in a single thread. * * @author yingyib */ public class SuperActivityOperatorNodePushable implements IOperatorNodePushable { private final Map<ActivityId, IOperatorNodePushable> operatorNodePushables = new HashMap<ActivityId, IOperatorNodePushable>(); private final List<IOperatorNodePushable> operatorNodePushablesBFSOrder = new ArrayList<IOperatorNodePushable>(); private final Map<ActivityId, IActivity> startActivities; private final SuperActivity parent; private final IHyracksTaskContext ctx; private final IRecordDescriptorProvider recordDescProvider; private final int partition; private final int nPartitions; private int inputArity = 0; private boolean[] startedInitialization; public SuperActivityOperatorNodePushable(SuperActivity parent, Map<ActivityId, IActivity> startActivities, IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) { this.parent = parent; this.startActivities = startActivities; this.ctx = ctx; this.recordDescProvider = recordDescProvider; this.partition = partition; this.nPartitions = nPartitions; /** * initialize the writer-relationship for the internal DAG of operator * node pushables */ try { init(); } catch (Exception e) { throw new IllegalStateException(e); } } @Override public void initialize() throws HyracksDataException { // Initializes all OperatorNodePushables in parallel and then finally deinitializes them. runInParallel((op, index) -> { startedInitialization[index] = true; op.initialize(); }); } private void init() throws HyracksDataException { Map<ActivityId, IOperatorNodePushable> startOperatorNodePushables = new HashMap<ActivityId, IOperatorNodePushable>(); Queue<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> childQueue = new LinkedList<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>(); List<IConnectorDescriptor> outputConnectors = null; /** * Set up the source operators */ for (Entry<ActivityId, IActivity> entry : startActivities.entrySet()) { IOperatorNodePushable opPushable = entry.getValue().createPushRuntime(ctx, recordDescProvider, partition, nPartitions); startOperatorNodePushables.put(entry.getKey(), opPushable); operatorNodePushablesBFSOrder.add(opPushable); operatorNodePushables.put(entry.getKey(), opPushable); inputArity += opPushable.getInputArity(); outputConnectors = parent.getActivityOutputMap().get(entry.getKey()); if (outputConnectors != null) { for (IConnectorDescriptor conn : outputConnectors) { childQueue.add(parent.getConnectorActivityMap().get(conn.getConnectorId())); } } } /** * Using BFS (breadth-first search) to construct to runtime execution * DAG; */ while (childQueue.size() > 0) { /** * construct the source to destination information */ Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> channel = childQueue.poll(); ActivityId sourceId = channel.getLeft().getLeft().getActivityId(); int outputChannel = channel.getLeft().getRight(); ActivityId destId = channel.getRight().getLeft().getActivityId(); int inputChannel = channel.getRight().getRight(); IOperatorNodePushable sourceOp = operatorNodePushables.get(sourceId); IOperatorNodePushable destOp = operatorNodePushables.get(destId); if (destOp == null) { destOp = channel.getRight().getLeft().createPushRuntime(ctx, recordDescProvider, partition, nPartitions); operatorNodePushablesBFSOrder.add(destOp); operatorNodePushables.put(destId, destOp); } /** * construct the dataflow connection from a producer to a consumer */ sourceOp.setOutputFrameWriter(outputChannel, destOp.getInputFrameWriter(inputChannel), recordDescProvider.getInputRecordDescriptor(destId, inputChannel)); /** * traverse to the child of the current activity */ outputConnectors = parent.getActivityOutputMap().get(destId); /** * expend the executing activities further to the downstream */ if (outputConnectors != null && outputConnectors.size() > 0) { for (IConnectorDescriptor conn : outputConnectors) { if (conn != null) { childQueue.add(parent.getConnectorActivityMap().get(conn.getConnectorId())); } } } } // Sets the startedInitialization flags to be false. startedInitialization = new boolean[operatorNodePushablesBFSOrder.size()]; Arrays.fill(startedInitialization, false); } @Override public void deinitialize() throws HyracksDataException { runInParallel((op, index) -> { if (startedInitialization[index]) { op.deinitialize(); } }); } @Override public int getInputArity() { return inputArity; } @Override public void setOutputFrameWriter(int clusterOutputIndex, IFrameWriter writer, RecordDescriptor recordDesc) throws HyracksDataException { /** * set the right output frame writer */ Pair<ActivityId, Integer> activityIdOutputIndex = parent.getActivityIdOutputIndex(clusterOutputIndex); IOperatorNodePushable opPushable = operatorNodePushables.get(activityIdOutputIndex.getLeft()); opPushable.setOutputFrameWriter(activityIdOutputIndex.getRight(), writer, recordDesc); } @Override public IFrameWriter getInputFrameWriter(final int index) { /** * get the right IFrameWriter from the cluster input index */ Pair<ActivityId, Integer> activityIdInputIndex = parent.getActivityIdInputIndex(index); IOperatorNodePushable operatorNodePushable = operatorNodePushables.get(activityIdInputIndex.getLeft()); IFrameWriter writer = operatorNodePushable.getInputFrameWriter(activityIdInputIndex.getRight()); return writer; } @Override public String getDisplayName() { return "Super Activity " + parent.getActivityMap().values().toString(); } interface OperatorNodePushableAction { void runAction(IOperatorNodePushable op, int opIndex) throws HyracksDataException; } private void runInParallel(OperatorNodePushableAction opAction) throws HyracksDataException { List<Future<Void>> initializationTasks = new ArrayList<>(); try { int index = 0; // Run one action for all OperatorNodePushables in parallel through a thread pool. for (final IOperatorNodePushable op : operatorNodePushablesBFSOrder) { final int opIndex = index++; initializationTasks.add(ctx.getExecutorService().submit(() -> { opAction.runAction(op, opIndex); return null; })); } // Waits until all parallel actions to finish. for (Future<Void> initializationTask : initializationTasks) { initializationTask.get(); } } catch (Throwable th) { for (Future<Void> initializationTask : initializationTasks) { initializationTask.cancel(true); } throw new HyracksDataException(th); } } }
/** * Copyright (c) 2007-2014 The Apereo Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/ecl2 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Educational * Community License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the * License at: * * http://opensource.org/licenses/ecl2.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.signup.tool.entityproviders; import java.util.ArrayList; import java.util.Date; import java.util.Hashtable; import java.util.List; import org.sakaiproject.signup.model.SignupTimeslot; import org.sakaiproject.signup.restful.SignupEvent; import org.sakaiproject.signup.restful.SignupParticipant; import org.sakaiproject.signup.restful.SignupTimeslotItem; import org.sakaiproject.signup.tool.util.Utilities; import org.sakaiproject.tool.cover.SessionManager; /** * <p> * This class will provides user session cache for restful events. * </P> * * @author Peter Liu */ public class SignupRESTfulSessionManager { private static final String USER_SESSION_DATA = "signup.user.session.data"; /* 5 minutes */ private static final long TIME_INTERVAL_TO_REFRESH = 5 * 60 * 1000; SignupRESTfulSessionManager() { } public UserSignupSessionData getStoredUserSessionData() { return (UserSignupSessionData) SessionManager.getCurrentSession().getAttribute( USER_SESSION_DATA); } public SignupEventsCache getSignupEventsCache(String uniqueId) { if (uniqueId == null || getStoredUserSessionData() == null) return null; return getStoredUserSessionData().get(uniqueId); } /** * Store the data in the cache * * @param uniqueId * can be unique siteId or userId * @param events * a list of SignupEvent objects * @param viewRange * a numeric string */ public void StoreSignupEventsData(String uniqueId, List<SignupEvent> events, String viewRange) { SignupEventsCache signupEventsCache = new SignupEventsCache(uniqueId, events, viewRange); UserSignupSessionData userDataMap = getStoredUserSessionData(); if (userDataMap == null) { userDataMap = new UserSignupSessionData(); } userDataMap.add(uniqueId, signupEventsCache); storeUserDataInSession(userDataMap); } public void StoreSignupEventsData(String uniqueId, SignupEventsCache cache) { UserSignupSessionData userDataMap = getStoredUserSessionData(); if (userDataMap == null) { userDataMap = new UserSignupSessionData(); } userDataMap.add(uniqueId, cache); storeUserDataInSession(userDataMap); } public boolean isUpTodateDataAvailable(String uniqueId) { if (this.getSignupEventsCache(uniqueId) != null && !isRefresh(getSignupEventsCache(uniqueId))) { return true; } return false; } /** * See if the view range is the same for the data set * * @param id * can be unique siteId or userId * @param viewRange * numeric number string * @return boolean value */ public boolean isSameViewRange(String id, String viewRange) { SignupEventsCache sec = this.getSignupEventsCache(id); if (sec != null && sec.getViewRange().equals(viewRange)) { return true; } return false; } public SignupEvent getExistedSignupEventInCache(String uniqueId, Long eventId) { if (this.getSignupEventsCache(uniqueId) != null) { // getSiteEventsCache(siteId).getSignupEvent(eventId); return getSignupEventsCache(uniqueId).getSignupEvent(eventId); } return null; } public void updateSignupEventsCache(String uniqueId, SignupEvent event) { SignupEventsCache eCache = getSignupEventsCache(uniqueId); if (eCache == null) { return; // don't update, force it to get a new list later. } if (event == null) { /* clean up and force it to get a new list later due to event-gone. */ cleanSignupEventsCache(uniqueId); return; } else { eCache.updateSignupEvent(event); } StoreSignupEventsData(uniqueId, eCache); } public void updateMySignupEventsCache(String userId, SignupEvent event,String userAction) { SignupEventsCache eCache = getSignupEventsCache(userId); if (eCache == null) { return; // don't update, force it to get a new list later. } if (event == null) { /* clean up and force it to get a new list later due to event-gone. */ cleanSignupEventsCache(userId); return; } else { if(SignupEvent.USER_CANCEL_SIGNUP.equals(userAction)) eCache.removeEvent(event); else if (SignupEvent.USER_SIGNUP.equals(userAction)){ populateMySignedUpEventData(event, userId); eCache.addEvent(event); } } StoreSignupEventsData(userId, eCache); } private void populateMySignedUpEventData(SignupEvent event, String userId){ List<SignupTimeslotItem> tsList = event.getSignupTimeSlotItems(); if(tsList !=null){ for (SignupTimeslotItem ts : tsList) { List<SignupParticipant> attList = ts.getAttendees(); if(attList !=null){ for (SignupParticipant p : attList) { if(p.getAttendeeUserId().equals(userId)){ event.setMyStartTime(ts.getStartTime()); event.setMyEndTime(ts.getEndTime()); event.setAvailableStatus(Utilities.rb.getString("event.youSignedUp")); return; } } } } } } private void storeUserDataInSession(UserSignupSessionData userSessionData) { SessionManager.getCurrentSession().setAttribute(USER_SESSION_DATA, userSessionData); } private void cleanSignupEventsCache(String uniqueId) { UserSignupSessionData userSnData = getStoredUserSessionData(); userSnData.remove(uniqueId); storeUserDataInSession(userSnData); } private boolean isRefresh(SignupEventsCache cache) { Date current = new Date(); return (current.getTime() - cache.getLastUpdatedTime().getTime() > TIME_INTERVAL_TO_REFRESH); } class UserSignupSessionData { private Hashtable<String, SignupEventsCache> table = new Hashtable<String, SignupEventsCache>(); public UserSignupSessionData() { } public void add(String uniqueId, SignupEventsCache eCache) { this.table.put(uniqueId, eCache); } public SignupEventsCache get(String uniqueId) { return this.table.get(uniqueId); } public void remove(String uniqueId) { this.table.remove(uniqueId); } public void update(String uniqueId, SignupEventsCache eCache) { this.table.put(uniqueId, eCache); } } class SignupEventsCache { private Date lastUpdatedTime; private String uniqueId; private List<SignupEvent> events; private String viewRange = ""; SignupEventsCache(String id, List<SignupEvent> events, String viewRange) { this.uniqueId = id; this.events = events; this.viewRange = viewRange; lastUpdatedTime = new Date(); } SignupEventsCache(String id, SignupEvent event) { this.uniqueId = id; events = new ArrayList<SignupEvent>(); events.add(event); lastUpdatedTime = new Date(); } public SignupEvent getSignupEvent(Long eventId) { SignupEvent event = null; if (this.events != null) { for (SignupEvent e : events) { if (e.getEventId().equals(eventId)) { event = e; break; } } } return event; } public void updateSignupEvent(SignupEvent event) { if (this.events != null) { for (int i = 0; i < events.size(); i++) { if (events.get(i).getEventId().equals(event.getEventId())) { events.remove(i); events.add(i, event); break; } } } } public void removeEvent(SignupEvent event){ if (this.events != null) { for (int i = 0; i < events.size(); i++) { if (events.get(i).getEventId().equals(event.getEventId())) { events.remove(i); break; } } } } public void addEvent(SignupEvent event){ if (this.events != null){ events.add(event); } } public String getUniqueId() { return uniqueId; } public void setUniqueId(String uniqueId) { this.uniqueId = uniqueId; } public List<SignupEvent> getEvents() { return events; } public void setEvents(List<SignupEvent> events) { this.events = events; } public Date getLastUpdatedTime() { return lastUpdatedTime; } public void setLastUpdatedTime(Date lastUpdatedTime) { this.lastUpdatedTime = lastUpdatedTime; } public String getViewRange() { return viewRange; } public void setViewRange(String viewRange) { this.viewRange = viewRange; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.tools; import static org.apache.kafka.common.config.SaslConfigs.SASL_LOGIN_CONNECT_TIMEOUT_MS; import static org.apache.kafka.common.config.SaslConfigs.SASL_LOGIN_CONNECT_TIMEOUT_MS_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_LOGIN_READ_TIMEOUT_MS; import static org.apache.kafka.common.config.SaslConfigs.SASL_LOGIN_READ_TIMEOUT_MS_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_LOGIN_RETRY_BACKOFF_MAX_MS; import static org.apache.kafka.common.config.SaslConfigs.SASL_LOGIN_RETRY_BACKOFF_MAX_MS_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_LOGIN_RETRY_BACKOFF_MS; import static org.apache.kafka.common.config.SaslConfigs.SASL_LOGIN_RETRY_BACKOFF_MS_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_CLOCK_SKEW_SECONDS; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_CLOCK_SKEW_SECONDS_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_EXPECTED_AUDIENCE; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_EXPECTED_AUDIENCE_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_EXPECTED_ISSUER; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_EXPECTED_ISSUER_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_JWKS_ENDPOINT_REFRESH_MS; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_JWKS_ENDPOINT_REFRESH_MS_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MAX_MS; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MAX_MS_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MS; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MS_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_JWKS_ENDPOINT_URL; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_JWKS_ENDPOINT_URL_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_SCOPE_CLAIM_NAME; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_SCOPE_CLAIM_NAME_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_SUB_CLAIM_NAME; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_SUB_CLAIM_NAME_DOC; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL; import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_CIPHER_SUITES_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_CIPHER_SUITES_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_ENABLED_PROTOCOLS_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_ENGINE_FACTORY_CLASS_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_ENGINE_FACTORY_CLASS_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_KEYMANAGER_ALGORITHM_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_KEYSTORE_KEY_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_KEYSTORE_KEY_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_KEYSTORE_LOCATION_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_KEYSTORE_PASSWORD_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_KEYSTORE_TYPE_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_KEYSTORE_TYPE_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_KEY_PASSWORD_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_KEY_PASSWORD_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_PROTOCOL_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_PROTOCOL_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_PROVIDER_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_PROVIDER_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_TRUSTSTORE_LOCATION_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_TRUSTSTORE_PASSWORD_DOC; import static org.apache.kafka.common.config.SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG; import static org.apache.kafka.common.config.SslConfigs.SSL_TRUSTSTORE_TYPE_DOC; import static org.apache.kafka.common.security.oauthbearer.secured.OAuthBearerLoginCallbackHandler.CLIENT_ID_CONFIG; import static org.apache.kafka.common.security.oauthbearer.secured.OAuthBearerLoginCallbackHandler.CLIENT_ID_DOC; import static org.apache.kafka.common.security.oauthbearer.secured.OAuthBearerLoginCallbackHandler.CLIENT_SECRET_CONFIG; import static org.apache.kafka.common.security.oauthbearer.secured.OAuthBearerLoginCallbackHandler.CLIENT_SECRET_DOC; import static org.apache.kafka.common.security.oauthbearer.secured.OAuthBearerLoginCallbackHandler.SCOPE_CONFIG; import static org.apache.kafka.common.security.oauthbearer.secured.OAuthBearerLoginCallbackHandler.SCOPE_DOC; import java.util.HashMap; import java.util.List; import java.util.Map; import net.sourceforge.argparse4j.ArgumentParsers; import net.sourceforge.argparse4j.impl.Arguments; import net.sourceforge.argparse4j.inf.Argument; import net.sourceforge.argparse4j.inf.ArgumentParser; import net.sourceforge.argparse4j.inf.ArgumentParserException; import net.sourceforge.argparse4j.inf.Namespace; import org.apache.kafka.common.KafkaException; import org.apache.kafka.common.config.AbstractConfig; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigException; import org.apache.kafka.common.config.SaslConfigs; import org.apache.kafka.common.config.SslConfigs; import org.apache.kafka.common.config.types.Password; import org.apache.kafka.common.security.oauthbearer.secured.AccessTokenRetriever; import org.apache.kafka.common.security.oauthbearer.secured.AccessTokenRetrieverFactory; import org.apache.kafka.common.security.oauthbearer.secured.AccessTokenValidator; import org.apache.kafka.common.security.oauthbearer.secured.AccessTokenValidatorFactory; import org.apache.kafka.common.security.oauthbearer.secured.CloseableVerificationKeyResolver; import org.apache.kafka.common.security.oauthbearer.secured.VerificationKeyResolverFactory; import org.apache.kafka.common.utils.Exit; public class OAuthCompatibilityTool { public static void main(String[] args) { ArgsHandler argsHandler = new ArgsHandler(); Namespace namespace; try { namespace = argsHandler.parseArgs(args); } catch (ArgumentParserException e) { Exit.exit(1); return; } ConfigHandler configHandler = new ConfigHandler(namespace); Map<String, ?> configs = configHandler.getConfigs(); Map<String, Object> jaasConfigs = configHandler.getJaasOptions(); try { String accessToken; { // Client side... try (AccessTokenRetriever atr = AccessTokenRetrieverFactory.create(configs, jaasConfigs)) { atr.init(); AccessTokenValidator atv = AccessTokenValidatorFactory.create(configs); System.out.println("PASSED 1/5: client configuration"); accessToken = atr.retrieve(); System.out.println("PASSED 2/5: client JWT retrieval"); atv.validate(accessToken); System.out.println("PASSED 3/5: client JWT validation"); } } { // Broker side... try (CloseableVerificationKeyResolver vkr = VerificationKeyResolverFactory.create(configs, jaasConfigs)) { vkr.init(); AccessTokenValidator atv = AccessTokenValidatorFactory.create(configs, vkr); System.out.println("PASSED 4/5: broker configuration"); atv.validate(accessToken); System.out.println("PASSED 5/5: broker JWT validation"); } } System.out.println("SUCCESS"); Exit.exit(0); } catch (Throwable t) { System.out.println("FAILED:"); t.printStackTrace(); if (t instanceof ConfigException) { System.out.printf("%n"); argsHandler.parser.printHelp(); } Exit.exit(1); } } private static class ArgsHandler { private static final String DESCRIPTION = String.format( "This tool is used to verify OAuth/OIDC provider compatibility.%n%n" + "Run the following script to determine the configuration options:%n%n" + " ./bin/kafka-run-class.sh %s --help", OAuthCompatibilityTool.class.getName()); private final ArgumentParser parser; private ArgsHandler() { this.parser = ArgumentParsers .newArgumentParser("oauth-compatibility-tool") .defaultHelp(true) .description(DESCRIPTION); } private Namespace parseArgs(String[] args) throws ArgumentParserException { // SASL/OAuth addArgument(SASL_LOGIN_CONNECT_TIMEOUT_MS, SASL_LOGIN_CONNECT_TIMEOUT_MS_DOC, Integer.class); addArgument(SASL_LOGIN_READ_TIMEOUT_MS, SASL_LOGIN_READ_TIMEOUT_MS_DOC, Integer.class); addArgument(SASL_LOGIN_RETRY_BACKOFF_MAX_MS, SASL_LOGIN_RETRY_BACKOFF_MAX_MS_DOC, Long.class); addArgument(SASL_LOGIN_RETRY_BACKOFF_MS, SASL_LOGIN_RETRY_BACKOFF_MS_DOC, Long.class); addArgument(SASL_OAUTHBEARER_CLOCK_SKEW_SECONDS, SASL_OAUTHBEARER_CLOCK_SKEW_SECONDS_DOC, Integer.class); addArgument(SASL_OAUTHBEARER_EXPECTED_AUDIENCE, SASL_OAUTHBEARER_EXPECTED_AUDIENCE_DOC) .action(Arguments.append()); addArgument(SASL_OAUTHBEARER_EXPECTED_ISSUER, SASL_OAUTHBEARER_EXPECTED_ISSUER_DOC); addArgument(SASL_OAUTHBEARER_JWKS_ENDPOINT_REFRESH_MS, SASL_OAUTHBEARER_JWKS_ENDPOINT_REFRESH_MS_DOC, Long.class); addArgument(SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MAX_MS, SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MAX_MS_DOC, Long.class); addArgument(SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MS, SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MS_DOC, Long.class); addArgument(SASL_OAUTHBEARER_JWKS_ENDPOINT_URL, SASL_OAUTHBEARER_JWKS_ENDPOINT_URL_DOC); addArgument(SASL_OAUTHBEARER_SCOPE_CLAIM_NAME, SASL_OAUTHBEARER_SCOPE_CLAIM_NAME_DOC); addArgument(SASL_OAUTHBEARER_SUB_CLAIM_NAME, SASL_OAUTHBEARER_SUB_CLAIM_NAME_DOC); addArgument(SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL, SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL_DOC); // SSL addArgument(SSL_CIPHER_SUITES_CONFIG, SSL_CIPHER_SUITES_DOC) .action(Arguments.append()); addArgument(SSL_ENABLED_PROTOCOLS_CONFIG, SSL_ENABLED_PROTOCOLS_DOC) .action(Arguments.append()); addArgument(SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_DOC); addArgument(SSL_ENGINE_FACTORY_CLASS_CONFIG, SSL_ENGINE_FACTORY_CLASS_DOC); addArgument(SSL_KEYMANAGER_ALGORITHM_CONFIG, SSL_KEYMANAGER_ALGORITHM_DOC); addArgument(SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG, SSL_KEYSTORE_CERTIFICATE_CHAIN_DOC); addArgument(SSL_KEYSTORE_KEY_CONFIG, SSL_KEYSTORE_KEY_DOC); addArgument(SSL_KEYSTORE_LOCATION_CONFIG, SSL_KEYSTORE_LOCATION_DOC); addArgument(SSL_KEYSTORE_PASSWORD_CONFIG, SSL_KEYSTORE_PASSWORD_DOC); addArgument(SSL_KEYSTORE_TYPE_CONFIG, SSL_KEYSTORE_TYPE_DOC); addArgument(SSL_KEY_PASSWORD_CONFIG, SSL_KEY_PASSWORD_DOC); addArgument(SSL_PROTOCOL_CONFIG, SSL_PROTOCOL_DOC); addArgument(SSL_PROVIDER_CONFIG, SSL_PROVIDER_DOC); addArgument(SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG, SSL_SECURE_RANDOM_IMPLEMENTATION_DOC); addArgument(SSL_TRUSTMANAGER_ALGORITHM_CONFIG, SSL_TRUSTMANAGER_ALGORITHM_DOC); addArgument(SSL_TRUSTSTORE_CERTIFICATES_CONFIG, SSL_TRUSTSTORE_CERTIFICATES_DOC); addArgument(SSL_TRUSTSTORE_LOCATION_CONFIG, SSL_TRUSTSTORE_LOCATION_DOC); addArgument(SSL_TRUSTSTORE_PASSWORD_CONFIG, SSL_TRUSTSTORE_PASSWORD_DOC); addArgument(SSL_TRUSTSTORE_TYPE_CONFIG, SSL_TRUSTSTORE_TYPE_DOC); // JAAS options... addArgument(CLIENT_ID_CONFIG, CLIENT_ID_DOC); addArgument(CLIENT_SECRET_CONFIG, CLIENT_SECRET_DOC); addArgument(SCOPE_CONFIG, SCOPE_DOC); try { return parser.parseArgs(args); } catch (ArgumentParserException e) { parser.handleError(e); throw e; } } private Argument addArgument(String option, String help) { return addArgument(option, help, String.class); } private Argument addArgument(String option, String help, Class<?> clazz) { // Change foo.bar into --foo.bar. String name = "--" + option; return parser.addArgument(name) .type(clazz) .metavar(option) .dest(option) .help(help); } } private static class ConfigHandler { private final Namespace namespace; private ConfigHandler(Namespace namespace) { this.namespace = namespace; } private Map<String, ?> getConfigs() { Map<String, Object> m = new HashMap<>(); // SASL/OAuth maybeAddInt(m, SASL_LOGIN_CONNECT_TIMEOUT_MS); maybeAddInt(m, SASL_LOGIN_READ_TIMEOUT_MS); maybeAddLong(m, SASL_LOGIN_RETRY_BACKOFF_MS); maybeAddLong(m, SASL_LOGIN_RETRY_BACKOFF_MAX_MS); maybeAddString(m, SASL_OAUTHBEARER_SCOPE_CLAIM_NAME); maybeAddString(m, SASL_OAUTHBEARER_SUB_CLAIM_NAME); maybeAddString(m, SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL); maybeAddString(m, SASL_OAUTHBEARER_JWKS_ENDPOINT_URL); maybeAddLong(m, SASL_OAUTHBEARER_JWKS_ENDPOINT_REFRESH_MS); maybeAddLong(m, SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MAX_MS); maybeAddLong(m, SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MS); maybeAddInt(m, SASL_OAUTHBEARER_CLOCK_SKEW_SECONDS); maybeAddStringList(m, SASL_OAUTHBEARER_EXPECTED_AUDIENCE); maybeAddString(m, SASL_OAUTHBEARER_EXPECTED_ISSUER); // This here is going to fill in all the defaults for the values we don't specify... ConfigDef cd = new ConfigDef(); SaslConfigs.addClientSaslSupport(cd); SslConfigs.addClientSslSupport(cd); AbstractConfig config = new AbstractConfig(cd, m); return config.values(); } private Map<String, Object> getJaasOptions() { Map<String, Object> m = new HashMap<>(); // SASL/OAuth maybeAddString(m, CLIENT_ID_CONFIG); maybeAddString(m, CLIENT_SECRET_CONFIG); maybeAddString(m, SCOPE_CONFIG); // SSL maybeAddStringList(m, SSL_CIPHER_SUITES_CONFIG); maybeAddStringList(m, SSL_ENABLED_PROTOCOLS_CONFIG); maybeAddString(m, SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG); maybeAddClass(m, SSL_ENGINE_FACTORY_CLASS_CONFIG); maybeAddString(m, SSL_KEYMANAGER_ALGORITHM_CONFIG); maybeAddPassword(m, SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG); maybeAddPassword(m, SSL_KEYSTORE_KEY_CONFIG); maybeAddString(m, SSL_KEYSTORE_LOCATION_CONFIG); maybeAddPassword(m, SSL_KEYSTORE_PASSWORD_CONFIG); maybeAddString(m, SSL_KEYSTORE_TYPE_CONFIG); maybeAddPassword(m, SSL_KEY_PASSWORD_CONFIG); maybeAddString(m, SSL_PROTOCOL_CONFIG); maybeAddString(m, SSL_PROVIDER_CONFIG); maybeAddString(m, SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG); maybeAddString(m, SSL_TRUSTMANAGER_ALGORITHM_CONFIG); maybeAddPassword(m, SSL_TRUSTSTORE_CERTIFICATES_CONFIG); maybeAddString(m, SSL_TRUSTSTORE_LOCATION_CONFIG); maybeAddPassword(m, SSL_TRUSTSTORE_PASSWORD_CONFIG); maybeAddString(m, SSL_TRUSTSTORE_TYPE_CONFIG); return m; } private void maybeAddInt(Map<String, Object> m, String option) { Integer value = namespace.getInt(option); if (value != null) m.put(option, value); } private void maybeAddLong(Map<String, Object> m, String option) { Long value = namespace.getLong(option); if (value != null) m.put(option, value); } private void maybeAddString(Map<String, Object> m, String option) { String value = namespace.getString(option); if (value != null) m.put(option, value); } private void maybeAddPassword(Map<String, Object> m, String option) { String value = namespace.getString(option); if (value != null) m.put(option, new Password(value)); } private void maybeAddClass(Map<String, Object> m, String option) { String value = namespace.getString(option); if (value != null) { try { m.put(option, Class.forName(value)); } catch (ClassNotFoundException e) { throw new KafkaException("Could not find class for " + option, e); } } } private void maybeAddStringList(Map<String, Object> m, String option) { List<String> value = namespace.getList(option); if (value != null) m.put(option, value); } } }
/* * Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.impl.workflow; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.impl.builder.StAXOMBuilder; import org.apache.commons.io.IOUtils; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.impl.config.APIMConfigService; import org.wso2.carbon.apimgt.impl.dto.WorkflowDTO; import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder; import org.wso2.carbon.registry.core.exceptions.RegistryException; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.List; import javax.xml.stream.XMLStreamException; /** * TenantWorkflowConfigHolder test cases */ @RunWith(PowerMockRunner.class) @PrepareForTest({ServiceReferenceHolder.class, TenantWorkflowConfigHolder.class}) public class TenantWorkflowConfigHolderTest { private int tenantID = -1234; private String tenantDomain = "carbon.super"; private APIMConfigService apimConfigService = Mockito.mock(APIMConfigService.class); @Before public void init() throws RegistryException { PowerMockito.mockStatic(ServiceReferenceHolder.class); ServiceReferenceHolder serviceReferenceHolder = Mockito.mock(ServiceReferenceHolder.class); PowerMockito.when(ServiceReferenceHolder.getInstance()).thenReturn(serviceReferenceHolder); Mockito.when(serviceReferenceHolder.getApimConfigService()).thenReturn(apimConfigService); } @Test public void testLoadingDefaultTenantWorkflowConfig() throws IOException, XMLStreamException, RegistryException, APIManagementException { TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID); File defaultWFConfigFile = new File(Thread.currentThread().getContextClassLoader(). getResource("workflow-configs/default-workflow-extensions.xml").getFile()); InputStream defaultWFConfigContent = new FileInputStream(defaultWFConfigFile); Mockito.when(apimConfigService.getWorkFlowConfig(tenantDomain)).thenReturn(IOUtils.toString(defaultWFConfigContent)); try { tenantWorkflowConfigHolder.load(); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_APPLICATION_CREATION")); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor ("AM_APPLICATION_REGISTRATION_PRODUCTION")); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor ("AM_APPLICATION_REGISTRATION_SANDBOX")); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_USER_SIGNUP")); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_SUBSCRIPTION_CREATION")); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_SUBSCRIPTION_DELETION")); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_API_STATE")); } catch (WorkflowException e) { Assert.fail("Unexpected WorkflowException occurred while loading default tenant workflow configuration"); } } @Test public void testLoadingExtendedTenantWorkflowConfig() throws IOException, XMLStreamException, RegistryException, APIManagementException { TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID); File defaultWFConfigFile = new File(Thread.currentThread().getContextClassLoader(). getResource("workflow-configs/workflow-extensions.xml").getFile()); InputStream defaultWFConfigContent = new FileInputStream(defaultWFConfigFile); Mockito.when(apimConfigService.getWorkFlowConfig(tenantDomain)).thenReturn(IOUtils.toString(defaultWFConfigContent)); try { tenantWorkflowConfigHolder.load(); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_APPLICATION_CREATION")); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor ("AM_APPLICATION_REGISTRATION_PRODUCTION")); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor ("AM_APPLICATION_REGISTRATION_SANDBOX")); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_USER_SIGNUP")); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_SUBSCRIPTION_CREATION")); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_SUBSCRIPTION_DELETION")); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_API_STATE")); } catch (WorkflowException e) { Assert.fail("Unexpected WorkflowException occurred while loading extended tenant workflow configuration"); } } @Test public void testFailureToLoadTenantWFConfigWhenErrorWhileLoadingRegistryResource() throws FileNotFoundException, XMLStreamException, RegistryException, APIManagementException { TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID); Mockito.when(apimConfigService.getWorkFlowConfig(tenantDomain)).thenThrow(APIManagementException.class); try { tenantWorkflowConfigHolder.load(); Assert.fail("Expected WorkflowException has not been thrown when registry resource loading failed"); } catch (WorkflowException e) { Assert.assertTrue(e.getMessage().contains("Unable to retrieve workflow configurations")); } } @Test public void testFailureToLoadTenantWFConfigWhenWFExecutorClassNotFound() throws Exception { //Workflow executor is an non existing class so that ClassNotFoundException will be thrown String invalidWFExecutor = "<WorkFlowExtensions>\n" + " <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".TestExecutor\"/></WorkFlowExtensions>"; TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID); Mockito.when(apimConfigService.getWorkFlowConfig(tenantDomain)).thenReturn(invalidWFExecutor); try { tenantWorkflowConfigHolder.load(); Assert.fail("Expected WorkflowException has not been thrown when workflow executor class not found"); } catch (WorkflowException e) { Assert.assertEquals(e.getMessage(), "Unable to find class"); } } @Test public void testFailureToLoadTenantWFConfigWhenWFExecutorClassCannotBeInstantiated() throws Exception { //Workflow executor is an abstract class so that InstantiationException will be thrown String invalidWFExecutor = "<WorkFlowExtensions>\n" + " <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".WorkflowExecutor\"/></WorkFlowExtensions>"; TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID); Mockito.when(apimConfigService.getWorkFlowConfig(tenantDomain)).thenReturn(invalidWFExecutor); try { tenantWorkflowConfigHolder.load(); Assert.fail("Expected WorkflowException has not been thrown when workflow executor class cannot be " + "instantiate"); } catch (WorkflowException e) { Assert.assertEquals(e.getMessage(), "Unable to instantiate class"); } } @Test public void testFailureToLoadTenantWFConfigWhenXMLStreamExceptionOccurredWhileParsingConfig() throws Exception { TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID); File defaultWFConfigFile = new File(Thread.currentThread().getContextClassLoader(). getResource("workflow-configs/workflow-extensions.xml").getFile()); InputStream defaultWFConfigContent = new FileInputStream(defaultWFConfigFile); Mockito.when(apimConfigService.getWorkFlowConfig(tenantDomain)).thenReturn(IOUtils.toString(defaultWFConfigContent)); //XMLStreamException will be thrown while building workflow config PowerMockito.whenNew(StAXOMBuilder.class).withParameterTypes(InputStream.class). withArguments(Mockito.any(InputStream.class)).thenThrow(new XMLStreamException("")); try { tenantWorkflowConfigHolder.load(); Assert.fail("Expected WorkflowException has not been thrown when XMLStreamException occurred while " + "processing workflow config"); } catch (WorkflowException e) { Assert.assertEquals(e.getMessage(), "Error building xml"); } } @Test public void testFailureToLoadTenantWFConfigWhenWFExecutorClassCannotAccessible() throws Exception { //Workflow executor class is a singleton class with private constructor, so that IllegalAccessException will // be thrown while instantiation String invalidWFExecutor = "<WorkFlowExtensions>\n" + " <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".InvalidWorkFlowExecutor1\"/></WorkFlowExtensions>"; TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID); Mockito.when(apimConfigService.getWorkFlowConfig(tenantDomain)).thenReturn(invalidWFExecutor); try { tenantWorkflowConfigHolder.load(); Assert.fail("Expected WorkflowException has not been thrown when workflow executor class cannot be " + "accessible"); } catch (WorkflowException e) { Assert.assertEquals(e.getMessage(), "Illegal attempt to invoke class methods"); } } @Test public void testFailureToLoadTenantWFConfigWhenWFExecutorPropertyNameNotFound() throws Exception { //Workflow executor class is a singleton class with private constructor, so that IllegalAccessException will // be thrown while instantiation String invalidWFExecutor = "<WorkFlowExtensions>\n" + " <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".ApplicationCreationWSWorkflowExecutor\">\n" + " <Property/>\n" + " </ApplicationCreation>\n" + "</WorkFlowExtensions>\n"; TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID); Mockito.when(apimConfigService.getWorkFlowConfig(tenantDomain)).thenReturn(invalidWFExecutor); try { tenantWorkflowConfigHolder.load(); Assert.fail("Expected WorkflowException has not been thrown when workflow executor property 'name' " + "attribute not found"); } catch (WorkflowException e) { Assert.assertEquals(e.getMessage(), "Unable to load workflow executor class"); } } @Test public void testFailureToLoadTenantWFConfigWhenWFExecutorPropertySetterNotDefined() throws Exception { //Workflow executor class does not have setter method for 'testParam' String invalidWFExecutor = "<WorkFlowExtensions>\n" + " <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".ApplicationCreationWSWorkflowExecutor\">\n" + " <Property name=\"testParam\">test</Property>\n" + " </ApplicationCreation>\n" + "</WorkFlowExtensions>\n"; TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID); Mockito.when(apimConfigService.getWorkFlowConfig(tenantDomain)).thenReturn(invalidWFExecutor); try { tenantWorkflowConfigHolder.load(); Assert.fail("Expected WorkflowException has not been thrown when workflow executor property setter method" + " cannot be found"); } catch (WorkflowException e) { Assert.assertEquals(e.getMessage(), "Unable to load workflow executor class"); Assert.assertEquals(e.getCause().getMessage(), "Error invoking setter method named : setTestParam() " + "that takes a single String, int, long, float, double or boolean parameter"); } } @Test public void testFailureToLoadTenantWFConfigWhenWFExecutorPropertySetterInInvalid() throws Exception { //Workflow executor class setter method is invalid since it has multiple parameter types String invalidWFExecutor = "<WorkFlowExtensions>\n" + " <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".InvalidWorkFlowExecutor2\">\n" + " <Property name=\"username\">admin</Property>\n" + " </ApplicationCreation>\n" + "</WorkFlowExtensions>\n"; TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID); Mockito.when(apimConfigService.getWorkFlowConfig(tenantDomain)).thenReturn(invalidWFExecutor); try { tenantWorkflowConfigHolder.load(); Assert.fail("Expected WorkflowException has not been thrown when workflow executor property setter method" + " is invalid"); } catch (WorkflowException e) { Assert.assertEquals(e.getMessage(), "Unable to load workflow executor class"); Assert.assertEquals(e.getCause().getMessage(), "Error invoking setter method named : setUsername() " + "that takes a single String, int, long, float, double or boolean parameter"); } } @Test public void testFailureToLoadTenantWFConfigWhenWFExecutorHasMultipleParamTypes() throws Exception { //Workflow executor class setter methods are available for different parameter types String invalidWFExecutor = "<WorkFlowExtensions>\n" + " <ApplicationCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".WorkflowExecutorWithMultipleParamTypes\">\n" + " <Property name=\"stringParam\">admin</Property>\n" + " <Property name=\"intParam\">1</Property>\n" + " <Property name=\"booleanParam\">true</Property>\n" + " <Property name=\"longParam\">10000000</Property>\n" + " <Property name=\"doubleParam\">10.1000000000</Property>\n" + " <Property name=\"floatParam\">10.1</Property>\n" + " <Property name=\"omElement\">" + " <omElement>test</omElement>" + " </Property>\n" + " </ApplicationCreation>\n" + " <ProductionApplicationRegistration executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".ApplicationRegistrationSimpleWorkflowExecutor\"/>" + " <SandboxApplicationRegistration executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".ApplicationRegistrationSimpleWorkflowExecutor\"/>\n" + " <SubscriptionCreation executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".SubscriptionCreationSimpleWorkflowExecutor\"/>\n"+ " <SubscriptionUpdate executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".SubscriptionUpdateSimpleWorkflowExecutor\"/>\n"+ " <UserSignUp executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".UserSignUpSimpleWorkflowExecutor\"/>\n"+ " <SubscriptionDeletion executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".SubscriptionDeletionSimpleWorkflowExecutor\"/>\n"+ " <ApplicationDeletion executor=\"org.wso2.carbon.apimgt.impl.workflow" + ".ApplicationDeletionSimpleWorkflowExecutor\"/>\n"+ "</WorkFlowExtensions>\n"; TenantWorkflowConfigHolder tenantWorkflowConfigHolder = new TenantWorkflowConfigHolder(tenantDomain, tenantID); Mockito.when(apimConfigService.getWorkFlowConfig(tenantDomain)).thenReturn(invalidWFExecutor); try { tenantWorkflowConfigHolder.load(); Assert.assertNotNull(tenantWorkflowConfigHolder.getWorkflowExecutor("AM_APPLICATION_CREATION")); } catch (WorkflowException e) { Assert.fail("Unexpected WorkflowException has been thrown while loading workflow executor for different " + "param types"); } } } /** * This WorkflowExecutor is a singleton class and cannot be instantiated */ class InvalidWorkFlowExecutor1 { private InvalidWorkFlowExecutor1() { } } /** * This WorkflowExecutor has invalid setter method with multiple input types */ class InvalidWorkFlowExecutor2 extends WorkflowExecutor{ public void setUsername(String username, int id){ } @Override public String getWorkflowType() { return null; } @Override public List<WorkflowDTO> getWorkflowDetails(String workflowStatus) throws WorkflowException { return null; } } class WorkflowExecutorWithMultipleParamTypes extends WorkflowExecutor{ @Override public String getWorkflowType() { return null; } @Override public List<WorkflowDTO> getWorkflowDetails(String workflowStatus) throws WorkflowException { return null; } public void setStringParam(String stringParam){} public void setIntParam(int intParam){} public void setLongParam(long longParam){} public void setFloatParam(float floatParam){} public void setDoubleParam(double doubleParam){} public void setBooleanParam(boolean booleanParam){} public void setOmElement(OMElement omElement){} }
package com.hortonworks.streaming.impl.topologies; import org.apache.log4j.Logger; import org.apache.storm.hdfs.bolt.HdfsBolt; import org.apache.storm.hdfs.bolt.format.DefaultFileNameFormat; import org.apache.storm.hdfs.bolt.format.DelimitedRecordFormat; import org.apache.storm.hdfs.bolt.format.FileNameFormat; import org.apache.storm.hdfs.bolt.format.RecordFormat; import org.apache.storm.hdfs.bolt.rotation.FileRotationPolicy; import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy; import org.apache.storm.hdfs.bolt.rotation.FileSizeRotationPolicy.Units; import org.apache.storm.hdfs.bolt.sync.CountSyncPolicy; import org.apache.storm.hdfs.bolt.sync.SyncPolicy; import org.apache.storm.hdfs.common.rotation.MoveFileAction; import storm.kafka.BrokerHosts; import storm.kafka.KafkaSpout; import storm.kafka.SpoutConfig; import storm.kafka.ZkHosts; import backtype.storm.Config; import backtype.storm.StormSubmitter; import backtype.storm.spout.SchemeAsMultiScheme; import backtype.storm.topology.TopologyBuilder; import backtype.storm.tuple.Fields; import com.hortonworks.streaming.impl.bolts.TruckHBaseBolt; import com.hortonworks.streaming.impl.bolts.TruckEventRuleBolt; import com.hortonworks.streaming.impl.bolts.TruckPhoenixHBaseBolt; import com.hortonworks.streaming.impl.bolts.WebSocketBolt; import com.hortonworks.streaming.impl.bolts.hdfs.FileTimeRotationPolicy; import com.hortonworks.streaming.impl.bolts.hive.HiveTablePartitionAction; import com.hortonworks.streaming.impl.bolts.solr.SolrIndexingBolt; import com.hortonworks.streaming.impl.kafka.TruckScheme2; public class TruckEventProcessorKafkaTopology extends BaseTruckEventTopology { private static final Logger LOG = Logger.getLogger(TruckEventProcessorKafkaTopology.class); public TruckEventProcessorKafkaTopology(String configFileLocation) throws Exception { super(configFileLocation); } private void buildAndSubmit() throws Exception { TopologyBuilder builder = new TopologyBuilder(); /* Set up Kafka Spout to ingest from */ configureKafkaSpout(builder); /* Set up HDFSBOlt to send every truck event to HDFS */ configureHDFSBolt(builder); /* configure Solr indexing bolt */ configureSolrIndexingBolt(builder); /* Setup Monitoring Bolt to track number of alerts per truck driver */ configureMonitoringBolt(builder); /* Setup HBse Bolt for to persist violations and all events (if configured to do so)*/ //configureHBaseBolt(builder); configurePhoenixHBaseBolt(builder); /* Setup WebSocket Bolt for alerts and notifications */ configureWebSocketBolt(builder); /* This conf is for Storm and it needs be configured with things like the following: * Zookeeper server, nimbus server, ports, etc... All of this configuration will be picked up * in the ~/.storm/storm.yaml file that will be located on each storm node. */ Config conf = new Config(); conf.setDebug(true); /* Set the number of workers that will be spun up for this topology. * Each worker represents a JVM where executor thread will be spawned from */ Integer topologyWorkers = Integer.valueOf(topologyConfig.getProperty("storm.trucker.topology.workers")); conf.put(Config.TOPOLOGY_WORKERS, topologyWorkers); try { StormSubmitter.submitTopology("truck-event-processor", conf, builder.createTopology()); } catch (Exception e) { LOG.error("Error submiting Topology", e); } } private void configureSolrIndexingBolt(TopologyBuilder builder) { boolean isIndexingEnabled = Boolean.valueOf(topologyConfig.getProperty("solr.index.enable")).booleanValue(); if(isIndexingEnabled) { LOG.info("Solr indexing enabled"); int solrBoltCount = Integer.valueOf(topologyConfig.getProperty("solr.bolt.thread.count")); SolrIndexingBolt solrBolt = new SolrIndexingBolt(topologyConfig); builder.setBolt("solr_indexer_bolt", solrBolt, solrBoltCount).shuffleGrouping("kafkaSpout"); } else { LOG.info("Solr indexing turned off"); } } public void configureWebSocketBolt(TopologyBuilder builder) { boolean configureWebSocketBolt = Boolean.valueOf(topologyConfig.getProperty("notification.topic")).booleanValue(); if(configureWebSocketBolt) { WebSocketBolt webSocketBolt = new WebSocketBolt(topologyConfig); builder.setBolt("web_sockets_bolt", webSocketBolt, 4).shuffleGrouping("phoenix_hbase_bolt"); } } // public void configureHBaseBolt(TopologyBuilder builder) { // TruckHBaseBolt hbaseBolt = new TruckHBaseBolt(topologyConfig); // builder.setBolt("hbase_bolt", hbaseBolt, 2 ).shuffleGrouping("kafkaSpout"); // } public void configurePhoenixHBaseBolt(TopologyBuilder builder) { TruckPhoenixHBaseBolt hbaseBolt = new TruckPhoenixHBaseBolt(topologyConfig); builder.setBolt("phoenix_hbase_bolt", hbaseBolt, 2 ).shuffleGrouping("kafkaSpout"); } /** * Send truckEvents from same driver to the same bolt instances to maintain accuracy of eventCount per truck/driver * @param builder */ public void configureMonitoringBolt(TopologyBuilder builder) { int boltCount = Integer.valueOf(topologyConfig.getProperty("bolt.thread.count")); builder.setBolt("monitoring_bolt", new TruckEventRuleBolt(topologyConfig), boltCount) .fieldsGrouping("kafkaSpout", new Fields("driverId")); } public void configureHDFSBolt(TopologyBuilder builder) { // Use pipe as record boundary String rootPath = topologyConfig.getProperty("hdfs.path"); String prefix = topologyConfig.getProperty("hdfs.file.prefix"); String fsUrl = topologyConfig.getProperty("hdfs.url"); String sourceMetastoreUrl = topologyConfig.getProperty("hive.metastore.url"); String hiveStagingTableName = topologyConfig.getProperty("hive.staging.table.name"); String databaseName = topologyConfig.getProperty("hive.database.name"); Float rotationTimeInMinutes = Float.valueOf(topologyConfig.getProperty("hdfs.file.rotation.time.minutes")); RecordFormat format = new DelimitedRecordFormat().withFieldDelimiter(","); //Synchronize data buffer with the filesystem every 1000 tuples SyncPolicy syncPolicy = new CountSyncPolicy(1000); // Rotate data files when they reach five MB //FileRotationPolicy rotationPolicy = new FileSizeRotationPolicy(5.0f, Units.MB); //Rotate every X minutes FileTimeRotationPolicy rotationPolicy = new FileTimeRotationPolicy(rotationTimeInMinutes, FileTimeRotationPolicy.Units.MINUTES); //Hive Partition Action HiveTablePartitionAction hivePartitionAction = new HiveTablePartitionAction(sourceMetastoreUrl, hiveStagingTableName, databaseName, fsUrl); //MoveFileAction moveFileAction = new MoveFileAction().toDestination(rootPath + "/working"); FileNameFormat fileNameFormat = new DefaultFileNameFormat() .withPath(rootPath + "/staging") .withPrefix(prefix); // Instantiate the HdfsBolt HdfsBolt hdfsBolt = new HdfsBolt() .withFsUrl(fsUrl) .withFileNameFormat(fileNameFormat) .withRecordFormat(format) .withRotationPolicy(rotationPolicy) .withSyncPolicy(syncPolicy) .addRotationAction(hivePartitionAction); int hdfsBoltCount = Integer.valueOf(topologyConfig.getProperty("hdfsbolt.thread.count")); builder.setBolt("hdfs_bolt", hdfsBolt, hdfsBoltCount).shuffleGrouping("kafkaSpout"); } public int configureKafkaSpout(TopologyBuilder builder) { KafkaSpout kafkaSpout = constructKafkaSpout(); int spoutCount = Integer.valueOf(topologyConfig.getProperty("spout.thread.count")); int boltCount = Integer.valueOf(topologyConfig.getProperty("bolt.thread.count")); builder.setSpout("kafkaSpout", kafkaSpout, spoutCount); return boltCount; } /** * Construct the KafkaSpout which comes from the jar storm-kafka-0.8-plus * @return */ private KafkaSpout constructKafkaSpout() { KafkaSpout kafkaSpout = new KafkaSpout(constructKafkaSpoutConf()); return kafkaSpout; } /** * Construct * @return */ private SpoutConfig constructKafkaSpoutConf() { BrokerHosts hosts = new ZkHosts(topologyConfig.getProperty("kafka.zookeeper.host.port")); String topic = topologyConfig.getProperty("kafka.topic"); String zkRoot = topologyConfig.getProperty("kafka.zkRoot"); String consumerGroupId = topologyConfig.getProperty("kafka.consumer.group.id"); SpoutConfig spoutConfig = new SpoutConfig(hosts, topic, zkRoot, consumerGroupId); /* Custom TruckScheme that will take Kafka message of single truckEvent * and emit a 2-tuple consisting of truckId and truckEvent. This driverId * is required to do a fieldsSorting so that all driver events are sent to the set of bolts */ spoutConfig.scheme = new SchemeAsMultiScheme(new TruckScheme2()); return spoutConfig; } public static void main(String[] args) throws Exception { String configFileLocation = args[0]; TruckEventProcessorKafkaTopology truckTopology = new TruckEventProcessorKafkaTopology(configFileLocation); truckTopology.buildAndSubmit(); } }
/** * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.internal.operators; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyLong; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.MockitoAnnotations.initMocks; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import org.junit.*; import org.mockito.InOrder; import org.mockito.Mock; import rx.Notification; import rx.Observable; import rx.Observer; import rx.Subscription; import rx.exceptions.TestException; import rx.functions.*; import rx.internal.util.RxRingBuffer; import rx.observers.TestObserver; import rx.observers.TestSubscriber; import rx.schedulers.Schedulers; import rx.schedulers.TestScheduler; import rx.subjects.PublishSubject; public class OperatorDelayTest { @Mock private Observer<Long> observer; @Mock private Observer<Long> observer2; private TestScheduler scheduler; @Before public void before() { initMocks(this); scheduler = new TestScheduler(); } @Test public void testDelay() { Observable<Long> source = Observable.interval(1L, TimeUnit.SECONDS, scheduler).take(3); Observable<Long> delayed = source.delay(500L, TimeUnit.MILLISECONDS, scheduler); delayed.subscribe(observer); InOrder inOrder = inOrder(observer); scheduler.advanceTimeTo(1499L, TimeUnit.MILLISECONDS); verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(1500L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onNext(0L); inOrder.verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(2400L, TimeUnit.MILLISECONDS); inOrder.verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(2500L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onNext(1L); inOrder.verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(3400L, TimeUnit.MILLISECONDS); inOrder.verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(3500L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onNext(2L); verify(observer, times(1)).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); } @Test public void testLongDelay() { Observable<Long> source = Observable.interval(1L, TimeUnit.SECONDS, scheduler).take(3); Observable<Long> delayed = source.delay(5L, TimeUnit.SECONDS, scheduler); delayed.subscribe(observer); InOrder inOrder = inOrder(observer); scheduler.advanceTimeTo(5999L, TimeUnit.MILLISECONDS); verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(6000L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onNext(0L); scheduler.advanceTimeTo(6999L, TimeUnit.MILLISECONDS); inOrder.verify(observer, never()).onNext(anyLong()); scheduler.advanceTimeTo(7000L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onNext(1L); scheduler.advanceTimeTo(7999L, TimeUnit.MILLISECONDS); inOrder.verify(observer, never()).onNext(anyLong()); scheduler.advanceTimeTo(8000L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onNext(2L); inOrder.verify(observer, times(1)).onCompleted(); inOrder.verify(observer, never()).onNext(anyLong()); inOrder.verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); } @Test public void testDelayWithError() { Observable<Long> source = Observable.interval(1L, TimeUnit.SECONDS, scheduler).map(new Func1<Long, Long>() { @Override public Long call(Long value) { if (value == 1L) { throw new RuntimeException("error!"); } return value; } }); Observable<Long> delayed = source.delay(1L, TimeUnit.SECONDS, scheduler); delayed.subscribe(observer); InOrder inOrder = inOrder(observer); scheduler.advanceTimeTo(1999L, TimeUnit.MILLISECONDS); verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(2000L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onError(any(Throwable.class)); inOrder.verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); scheduler.advanceTimeTo(5000L, TimeUnit.MILLISECONDS); inOrder.verify(observer, never()).onNext(anyLong()); inOrder.verify(observer, never()).onError(any(Throwable.class)); verify(observer, never()).onCompleted(); } @Test public void testDelayWithMultipleSubscriptions() { Observable<Long> source = Observable.interval(1L, TimeUnit.SECONDS, scheduler).take(3); Observable<Long> delayed = source.delay(500L, TimeUnit.MILLISECONDS, scheduler); delayed.subscribe(observer); delayed.subscribe(observer2); InOrder inOrder = inOrder(observer); InOrder inOrder2 = inOrder(observer2); scheduler.advanceTimeTo(1499L, TimeUnit.MILLISECONDS); verify(observer, never()).onNext(anyLong()); verify(observer2, never()).onNext(anyLong()); scheduler.advanceTimeTo(1500L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onNext(0L); inOrder2.verify(observer2, times(1)).onNext(0L); scheduler.advanceTimeTo(2499L, TimeUnit.MILLISECONDS); inOrder.verify(observer, never()).onNext(anyLong()); inOrder2.verify(observer2, never()).onNext(anyLong()); scheduler.advanceTimeTo(2500L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onNext(1L); inOrder2.verify(observer2, times(1)).onNext(1L); verify(observer, never()).onCompleted(); verify(observer2, never()).onCompleted(); scheduler.advanceTimeTo(3500L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onNext(2L); inOrder2.verify(observer2, times(1)).onNext(2L); inOrder.verify(observer, never()).onNext(anyLong()); inOrder2.verify(observer2, never()).onNext(anyLong()); inOrder.verify(observer, times(1)).onCompleted(); inOrder2.verify(observer2, times(1)).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); verify(observer2, never()).onError(any(Throwable.class)); } @Test public void testDelaySubscription() { Observable<Integer> result = Observable.just(1, 2, 3).delaySubscription(100, TimeUnit.MILLISECONDS, scheduler); @SuppressWarnings("unchecked") Observer<Object> o = mock(Observer.class); InOrder inOrder = inOrder(o); result.subscribe(o); inOrder.verify(o, never()).onNext(any()); inOrder.verify(o, never()).onCompleted(); scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS); inOrder.verify(o, times(1)).onNext(1); inOrder.verify(o, times(1)).onNext(2); inOrder.verify(o, times(1)).onNext(3); inOrder.verify(o, times(1)).onCompleted(); verify(o, never()).onError(any(Throwable.class)); } @Test public void testDelaySubscriptionCancelBeforeTime() { Observable<Integer> result = Observable.just(1, 2, 3).delaySubscription(100, TimeUnit.MILLISECONDS, scheduler); @SuppressWarnings("unchecked") Observer<Object> o = mock(Observer.class); Subscription s = result.subscribe(o); s.unsubscribe(); scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS); verify(o, never()).onNext(any()); verify(o, never()).onCompleted(); verify(o, never()).onError(any(Throwable.class)); } @Test public void testDelayWithObservableNormal1() { PublishSubject<Integer> source = PublishSubject.create(); final List<PublishSubject<Integer>> delays = new ArrayList<PublishSubject<Integer>>(); final int n = 10; for (int i = 0; i < n; i++) { PublishSubject<Integer> delay = PublishSubject.create(); delays.add(delay); } Func1<Integer, Observable<Integer>> delayFunc = new Func1<Integer, Observable<Integer>>() { @Override public Observable<Integer> call(Integer t1) { return delays.get(t1); } }; @SuppressWarnings("unchecked") Observer<Object> o = mock(Observer.class); InOrder inOrder = inOrder(o); source.delay(delayFunc).subscribe(o); for (int i = 0; i < n; i++) { source.onNext(i); delays.get(i).onNext(i); inOrder.verify(o).onNext(i); } source.onCompleted(); inOrder.verify(o).onCompleted(); inOrder.verifyNoMoreInteractions(); verify(o, never()).onError(any(Throwable.class)); } @Test public void testDelayWithObservableSingleSend1() { PublishSubject<Integer> source = PublishSubject.create(); final PublishSubject<Integer> delay = PublishSubject.create(); Func1<Integer, Observable<Integer>> delayFunc = new Func1<Integer, Observable<Integer>>() { @Override public Observable<Integer> call(Integer t1) { return delay; } }; @SuppressWarnings("unchecked") Observer<Object> o = mock(Observer.class); InOrder inOrder = inOrder(o); source.delay(delayFunc).subscribe(o); source.onNext(1); delay.onNext(1); delay.onNext(2); inOrder.verify(o).onNext(1); inOrder.verifyNoMoreInteractions(); verify(o, never()).onError(any(Throwable.class)); } @Test public void testDelayWithObservableSourceThrows() { PublishSubject<Integer> source = PublishSubject.create(); final PublishSubject<Integer> delay = PublishSubject.create(); Func1<Integer, Observable<Integer>> delayFunc = new Func1<Integer, Observable<Integer>>() { @Override public Observable<Integer> call(Integer t1) { return delay; } }; @SuppressWarnings("unchecked") Observer<Object> o = mock(Observer.class); InOrder inOrder = inOrder(o); source.delay(delayFunc).subscribe(o); source.onNext(1); source.onError(new TestException()); delay.onNext(1); inOrder.verify(o).onError(any(TestException.class)); inOrder.verifyNoMoreInteractions(); verify(o, never()).onNext(any()); verify(o, never()).onCompleted(); } @Test public void testDelayWithObservableDelayFunctionThrows() { PublishSubject<Integer> source = PublishSubject.create(); Func1<Integer, Observable<Integer>> delayFunc = new Func1<Integer, Observable<Integer>>() { @Override public Observable<Integer> call(Integer t1) { throw new TestException(); } }; @SuppressWarnings("unchecked") Observer<Object> o = mock(Observer.class); InOrder inOrder = inOrder(o); source.delay(delayFunc).subscribe(o); source.onNext(1); inOrder.verify(o).onError(any(TestException.class)); inOrder.verifyNoMoreInteractions(); verify(o, never()).onNext(any()); verify(o, never()).onCompleted(); } @Test public void testDelayWithObservableDelayThrows() { PublishSubject<Integer> source = PublishSubject.create(); final PublishSubject<Integer> delay = PublishSubject.create(); Func1<Integer, Observable<Integer>> delayFunc = new Func1<Integer, Observable<Integer>>() { @Override public Observable<Integer> call(Integer t1) { return delay; } }; @SuppressWarnings("unchecked") Observer<Object> o = mock(Observer.class); InOrder inOrder = inOrder(o); source.delay(delayFunc).subscribe(o); source.onNext(1); delay.onError(new TestException()); inOrder.verify(o).onError(any(TestException.class)); inOrder.verifyNoMoreInteractions(); verify(o, never()).onNext(any()); verify(o, never()).onCompleted(); } @Test public void testDelayWithObservableSubscriptionNormal() { PublishSubject<Integer> source = PublishSubject.create(); final PublishSubject<Integer> delay = PublishSubject.create(); Func0<Observable<Integer>> subFunc = new Func0<Observable<Integer>>() { @Override public Observable<Integer> call() { return delay; } }; Func1<Integer, Observable<Integer>> delayFunc = new Func1<Integer, Observable<Integer>>() { @Override public Observable<Integer> call(Integer t1) { return delay; } }; @SuppressWarnings("unchecked") Observer<Object> o = mock(Observer.class); InOrder inOrder = inOrder(o); source.delay(subFunc, delayFunc).subscribe(o); source.onNext(1); delay.onNext(1); source.onNext(2); delay.onNext(2); inOrder.verify(o).onNext(2); inOrder.verifyNoMoreInteractions(); verify(o, never()).onError(any(Throwable.class)); verify(o, never()).onCompleted(); } @Test public void testDelayWithObservableSubscriptionFunctionThrows() { PublishSubject<Integer> source = PublishSubject.create(); final PublishSubject<Integer> delay = PublishSubject.create(); Func0<Observable<Integer>> subFunc = new Func0<Observable<Integer>>() { @Override public Observable<Integer> call() { throw new TestException(); } }; Func1<Integer, Observable<Integer>> delayFunc = new Func1<Integer, Observable<Integer>>() { @Override public Observable<Integer> call(Integer t1) { return delay; } }; @SuppressWarnings("unchecked") Observer<Object> o = mock(Observer.class); InOrder inOrder = inOrder(o); source.delay(subFunc, delayFunc).subscribe(o); source.onNext(1); delay.onNext(1); source.onNext(2); inOrder.verify(o).onError(any(TestException.class)); inOrder.verifyNoMoreInteractions(); verify(o, never()).onNext(any()); verify(o, never()).onCompleted(); } @Test public void testDelayWithObservableSubscriptionThrows() { PublishSubject<Integer> source = PublishSubject.create(); final PublishSubject<Integer> delay = PublishSubject.create(); Func0<Observable<Integer>> subFunc = new Func0<Observable<Integer>>() { @Override public Observable<Integer> call() { return delay; } }; Func1<Integer, Observable<Integer>> delayFunc = new Func1<Integer, Observable<Integer>>() { @Override public Observable<Integer> call(Integer t1) { return delay; } }; @SuppressWarnings("unchecked") Observer<Object> o = mock(Observer.class); InOrder inOrder = inOrder(o); source.delay(subFunc, delayFunc).subscribe(o); source.onNext(1); delay.onError(new TestException()); source.onNext(2); inOrder.verify(o).onError(any(TestException.class)); inOrder.verifyNoMoreInteractions(); verify(o, never()).onNext(any()); verify(o, never()).onCompleted(); } @Test public void testDelayWithObservableEmptyDelayer() { PublishSubject<Integer> source = PublishSubject.create(); Func1<Integer, Observable<Integer>> delayFunc = new Func1<Integer, Observable<Integer>>() { @Override public Observable<Integer> call(Integer t1) { return Observable.empty(); } }; @SuppressWarnings("unchecked") Observer<Object> o = mock(Observer.class); InOrder inOrder = inOrder(o); source.delay(delayFunc).subscribe(o); source.onNext(1); source.onCompleted(); inOrder.verify(o).onNext(1); inOrder.verify(o).onCompleted(); inOrder.verifyNoMoreInteractions(); verify(o, never()).onError(any(Throwable.class)); } @Test public void testDelayWithObservableSubscriptionRunCompletion() { PublishSubject<Integer> source = PublishSubject.create(); final PublishSubject<Integer> sdelay = PublishSubject.create(); final PublishSubject<Integer> delay = PublishSubject.create(); Func0<Observable<Integer>> subFunc = new Func0<Observable<Integer>>() { @Override public Observable<Integer> call() { return sdelay; } }; Func1<Integer, Observable<Integer>> delayFunc = new Func1<Integer, Observable<Integer>>() { @Override public Observable<Integer> call(Integer t1) { return delay; } }; @SuppressWarnings("unchecked") Observer<Object> o = mock(Observer.class); InOrder inOrder = inOrder(o); source.delay(subFunc, delayFunc).subscribe(o); source.onNext(1); sdelay.onCompleted(); source.onNext(2); delay.onNext(2); inOrder.verify(o).onNext(2); inOrder.verifyNoMoreInteractions(); verify(o, never()).onError(any(Throwable.class)); verify(o, never()).onCompleted(); } @Test public void testDelayWithObservableAsTimed() { Observable<Long> source = Observable.interval(1L, TimeUnit.SECONDS, scheduler).take(3); final Observable<Long> delayer = Observable.timer(500L, TimeUnit.MILLISECONDS, scheduler); Func1<Long, Observable<Long>> delayFunc = new Func1<Long, Observable<Long>>() { @Override public Observable<Long> call(Long t1) { return delayer; } }; Observable<Long> delayed = source.delay(delayFunc); delayed.subscribe(observer); InOrder inOrder = inOrder(observer); scheduler.advanceTimeTo(1499L, TimeUnit.MILLISECONDS); verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(1500L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onNext(0L); inOrder.verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(2400L, TimeUnit.MILLISECONDS); inOrder.verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(2500L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onNext(1L); inOrder.verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(3400L, TimeUnit.MILLISECONDS); inOrder.verify(observer, never()).onNext(anyLong()); verify(observer, never()).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); scheduler.advanceTimeTo(3500L, TimeUnit.MILLISECONDS); inOrder.verify(observer, times(1)).onNext(2L); verify(observer, times(1)).onCompleted(); verify(observer, never()).onError(any(Throwable.class)); } @Test public void testDelayWithObservableReorder() { int n = 3; PublishSubject<Integer> source = PublishSubject.create(); final List<PublishSubject<Integer>> subjects = new ArrayList<PublishSubject<Integer>>(); for (int i = 0; i < n; i++) { subjects.add(PublishSubject.<Integer> create()); } Observable<Integer> result = source.delay(new Func1<Integer, Observable<Integer>>() { @Override public Observable<Integer> call(Integer t1) { return subjects.get(t1); } }); @SuppressWarnings("unchecked") Observer<Integer> o = mock(Observer.class); InOrder inOrder = inOrder(o); result.subscribe(o); for (int i = 0; i < n; i++) { source.onNext(i); } source.onCompleted(); inOrder.verify(o, never()).onNext(anyInt()); inOrder.verify(o, never()).onCompleted(); for (int i = n - 1; i >= 0; i--) { subjects.get(i).onCompleted(); inOrder.verify(o).onNext(i); } inOrder.verify(o).onCompleted(); verify(o, never()).onError(any(Throwable.class)); } @Test public void testDelayEmitsEverything() { Observable<Integer> source = Observable.range(1, 5); Observable<Integer> delayed = source.delay(500L, TimeUnit.MILLISECONDS, scheduler); delayed = delayed.doOnEach(new Action1<Notification<? super Integer>>() { @Override public void call(Notification<? super Integer> t1) { System.out.println(t1); } }); TestObserver<Integer> observer = new TestObserver<Integer>(); delayed.subscribe(observer); // all will be delivered after 500ms since range does not delay between them scheduler.advanceTimeBy(500L, TimeUnit.MILLISECONDS); observer.assertReceivedOnNext(Arrays.asList(1, 2, 3, 4, 5)); } @Test public void testBackpressureWithTimedDelay() { TestSubscriber<Integer> ts = new TestSubscriber<Integer>(); Observable.range(1, RxRingBuffer.SIZE * 2) .delay(100, TimeUnit.MILLISECONDS) .observeOn(Schedulers.computation()) .map(new Func1<Integer, Integer>() { int c = 0; @Override public Integer call(Integer t) { if (c++ <= 0) { try { Thread.sleep(500); } catch (InterruptedException e) { } } return t; } }).subscribe(ts); ts.awaitTerminalEvent(); ts.assertNoErrors(); assertEquals(RxRingBuffer.SIZE * 2, ts.getOnNextEvents().size()); } @Test public void testBackpressureWithSubscriptionTimedDelay() { TestSubscriber<Integer> ts = new TestSubscriber<Integer>(); Observable.range(1, RxRingBuffer.SIZE * 2) .delaySubscription(100, TimeUnit.MILLISECONDS) .delay(100, TimeUnit.MILLISECONDS) .observeOn(Schedulers.computation()) .map(new Func1<Integer, Integer>() { int c = 0; @Override public Integer call(Integer t) { if (c++ <= 0) { try { Thread.sleep(500); } catch (InterruptedException e) { } } return t; } }).subscribe(ts); ts.awaitTerminalEvent(); ts.assertNoErrors(); assertEquals(RxRingBuffer.SIZE * 2, ts.getOnNextEvents().size()); } @Test public void testBackpressureWithSelectorDelay() { TestSubscriber<Integer> ts = new TestSubscriber<Integer>(); Observable.range(1, RxRingBuffer.SIZE * 2) .delay(new Func1<Integer, Observable<Long>>() { @Override public Observable<Long> call(Integer i) { return Observable.timer(100, TimeUnit.MILLISECONDS); } }) .observeOn(Schedulers.computation()) .map(new Func1<Integer, Integer>() { int c = 0; @Override public Integer call(Integer t) { if (c++ <= 0) { try { Thread.sleep(500); } catch (InterruptedException e) { } } return t; } }).subscribe(ts); ts.awaitTerminalEvent(); ts.assertNoErrors(); assertEquals(RxRingBuffer.SIZE * 2, ts.getOnNextEvents().size()); } @Test public void testBackpressureWithSelectorDelayAndSubscriptionDelay() { TestSubscriber<Integer> ts = new TestSubscriber<Integer>(); Observable.range(1, RxRingBuffer.SIZE * 2) .delay(new Func0<Observable<Long>>() { @Override public Observable<Long> call() { return Observable.timer(500, TimeUnit.MILLISECONDS); } }, new Func1<Integer, Observable<Long>>() { @Override public Observable<Long> call(Integer i) { return Observable.timer(100, TimeUnit.MILLISECONDS); } }) .observeOn(Schedulers.computation()) .map(new Func1<Integer, Integer>() { int c = 0; @Override public Integer call(Integer t) { if (c++ <= 0) { try { Thread.sleep(500); } catch (InterruptedException e) { } } return t; } }).subscribe(ts); ts.awaitTerminalEvent(); ts.assertNoErrors(); assertEquals(RxRingBuffer.SIZE * 2, ts.getOnNextEvents().size()); } @Test public void testErrorRunsBeforeOnNext() { TestScheduler test = Schedulers.test(); PublishSubject<Integer> ps = PublishSubject.create(); TestSubscriber<Integer> ts = TestSubscriber.create(); ps.delay(1, TimeUnit.SECONDS, test).subscribe(ts); ps.onNext(1); test.advanceTimeBy(500, TimeUnit.MILLISECONDS); ps.onError(new TestException()); test.advanceTimeBy(1, TimeUnit.SECONDS); ts.assertNoValues(); ts.assertError(TestException.class); ts.assertNotCompleted(); } @Test public void delaySubscriptionCancelBeforeTime() { PublishSubject<Integer> source = PublishSubject.create(); TestSubscriber<Integer> ts = new TestSubscriber<Integer>(); source.delaySubscription(100, TimeUnit.MILLISECONDS, scheduler).subscribe(ts); Assert.assertFalse("source subscribed?", source.hasObservers()); ts.unsubscribe(); Assert.assertFalse("source subscribed?", source.hasObservers()); scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS); Assert.assertFalse("source subscribed?", source.hasObservers()); } @Test public void delayAndTakeUntilNeverSubscribeToSource() { PublishSubject<Integer> interrupt = PublishSubject.create(); final AtomicBoolean subscribed = new AtomicBoolean(false); TestScheduler testScheduler = new TestScheduler(); Observable.just(1) .doOnSubscribe(new Action0() { @Override public void call() { subscribed.set(true); } }) .delaySubscription(1, TimeUnit.SECONDS, testScheduler) .takeUntil(interrupt) .subscribe(); interrupt.onNext(9000); testScheduler.advanceTimeBy(1, TimeUnit.SECONDS); Assert.assertFalse(subscribed.get()); } }
package cli; import auth.User; import utils.ImportantMethods; import classes.POSRegister; import config.POSConfig; import java.io.FileNotFoundException; import java.io.IOException; import java.lang.reflect.Method; import java.sql.SQLException; import java.util.Scanner; import java.util.ArrayList; public class regcli { public static void main(String[] args) throws SQLException, ClassNotFoundException{ Scanner in = new Scanner(System.in); boolean page = true; ArrayList<String> commands = new ArrayList<String>(); commands.add("newsale"); commands.add("exit"); String command = ""; User f = null; boolean isauthorized = false; System.out.print("Username: "); String username = in.next(); while (!isauthorized){ try { f = new User( username + ".usrconf"); } catch (FileNotFoundException e) { System.err.println("An error occurred. The user does not exist."); System.exit(1); } catch (IOException e) { System.err.println("An error occurred. The user could not be accessed."); System.exit(1); } try { f.auth(); if (f.isAuthorized()){ if (f.isAdmin()){ elevated_cli.initAdminPrompt(args, f); isauthorized = true; } isauthorized = true; } } catch (IOException e) { System.err.println("The user specified does not exist."); }} while (page){ System.out.print("input>> "); command = in.next(); command = command.toLowerCase(); for (Object element:commands.toArray()){ element = (String) element; if (element.equals(command)){ if (element.equals("newsale")){ newsale(); } else if (element.equals("exit")){ System.exit(0); } } else { System.out.println("Command '" + command + "' is invalid."); } } } } public static void newsale() throws SQLException, ClassNotFoundException { ArrayList<String> items= new ArrayList<String>(); System.out.println("NewSale Initiated"); Scanner in = new Scanner(System.in); POSConfig c = new POSConfig("i", 2,"e", "e"); String storename = c.getStoreName(); POSRegister reg = null; try { reg = new POSRegister(storename,c.getTax()); } catch (FileNotFoundException e) { System.out.println("Utils folder is damaged. Please fix your POSReady Installation."); } catch (IOException e) { System.out.println("Cannot read standard files."); } ArrayList<String> commands = new ArrayList<String>(); for (Method method : POSRegister.class.getDeclaredMethods()) { commands.add(method.getName()); } String command = ""; boolean page = true; while (page){ System.out.print("posregister>> "); command = in.nextLine(); for (String element: commands.toArray(new String[commands.size()])){ element = (String) element.toLowerCase(); String[] e= command.split(" "); e[0] = e[0].toLowerCase(); if (e[0].equals("additem")){ String f = ""; int iter = 0; for (String elem : e ){ if (iter != 0){ f = f+ elem + " "; } iter ++; } f = f.trim(); if (reg.getPriceOf(f) != 0 ){ reg.addItem(f); items.add(f); } else { System.out.println("Item '" + f + "' is invalid."); } if (reg.getPriceOf(f) != 0){ System.out.println(f + "\t\t" + reg.getPriceOf(f)); } break; } else if (e[0].equals("printstorename")){ reg.printStoreName(); break; } else if(e[0].equals("finishsale")){ reg.calculateTax(); reg.calculateTotalPrice(); break; } else if (e[0].equals("recieptprint")){ reg.calculateTax(); reg.calculateTotalPrice(); reg.printSale(); break; } else if (e[0].equals("printtotal")){ reg.printTotal(); break; } else if(e[0].equals("printsubtotal")){ reg.printSubTotal(); break; } else if(e[0].equals("exportsaleinfo")){ reg.calculateTax(); reg.calculateTotalPrice(); try{ if (!e[1].equals("")){ try { reg.exportSaleInfo(e[1]); } catch (FileNotFoundException e1) { System.out.println("POSReady could not handle the request.");; } } else{ System.out.println("No file argument given"); } } catch (ArrayIndexOutOfBoundsException notgiven){ System.out.println("No file argument given"); } break; } else if (e[0].equals("removeitem")){ String f = ""; int iter = 0; for (String elem : e ){ if (iter != 0){ f = f+ elem + " "; } iter ++; } f = f.trim(); reg.removeItem(f); break; } else if (e[0].equals("return")){ return; } else { System.out.println("Command '" + e[0] + "' is invalid."); break; } } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.tests.product.launcher.cli; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.inject.Module; import io.airlift.log.Logger; import io.airlift.units.Duration; import io.prestosql.tests.product.launcher.Extensions; import io.prestosql.tests.product.launcher.LauncherModule; import io.prestosql.tests.product.launcher.env.EnvironmentConfig; import io.prestosql.tests.product.launcher.env.EnvironmentConfigFactory; import io.prestosql.tests.product.launcher.env.EnvironmentFactory; import io.prestosql.tests.product.launcher.env.EnvironmentModule; import io.prestosql.tests.product.launcher.env.EnvironmentOptions; import io.prestosql.tests.product.launcher.suite.Suite; import io.prestosql.tests.product.launcher.suite.SuiteFactory; import io.prestosql.tests.product.launcher.suite.SuiteModule; import io.prestosql.tests.product.launcher.suite.SuiteTestRun; import picocli.CommandLine.Command; import picocli.CommandLine.Mixin; import picocli.CommandLine.Option; import javax.inject.Inject; import java.io.File; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.Optional; import static com.google.common.base.MoreObjects.toStringHelper; import static io.airlift.units.Duration.nanosSince; import static io.prestosql.tests.product.launcher.cli.Commands.runCommand; import static java.lang.String.format; import static java.lang.System.exit; import static java.util.Objects.requireNonNull; @Command( name = "run", description = "Run suite tests", usageHelpAutoWidth = true) public class SuiteRun implements Runnable { private static final Logger log = Logger.get(SuiteRun.class); private final Module additionalEnvironments; private final Module additionalSuites; @Option(names = {"-h", "--help"}, usageHelp = true, description = "Show this help message and exit") public boolean usageHelpRequested; @Mixin public SuiteRunOptions suiteRunOptions = new SuiteRunOptions(); @Mixin public EnvironmentOptions environmentOptions = new EnvironmentOptions(); public SuiteRun(Extensions extensions) { this.additionalEnvironments = requireNonNull(extensions, "extensions is null").getAdditionalEnvironments(); this.additionalSuites = requireNonNull(extensions, "extensions is null").getAdditionalSuites(); } @Override public void run() { runCommand( ImmutableList.<Module>builder() .add(new LauncherModule()) .add(new SuiteModule(additionalSuites)) .add(new EnvironmentModule(environmentOptions, additionalEnvironments)) .add(suiteRunOptions.toModule()) .build(), Execution.class); } public static class SuiteRunOptions { private static final String DEFAULT_VALUE = "(default: ${DEFAULT-VALUE})"; @Option(names = "--suite", paramLabel = "<suite>", description = "Name of the suite to run", required = true) public String suite; @Option(names = "--test-jar", paramLabel = "<jar>", description = "Path to test JAR " + DEFAULT_VALUE, defaultValue = "${product-tests.module}/target/${product-tests.module}-${project.version}-executable.jar") public File testJar; @Option(names = "--logs-dir", paramLabel = "<dir>", description = "Location of the exported logs directory " + DEFAULT_VALUE, converter = OptionalPathConverter.class, defaultValue = "") public Optional<Path> logsDirBase; public Module toModule() { return binder -> binder.bind(SuiteRunOptions.class).toInstance(this); } } public static class Execution implements Runnable { private final SuiteRunOptions suiteRunOptions; private final EnvironmentOptions environmentOptions; private final SuiteFactory suiteFactory; private final EnvironmentFactory environmentFactory; private final EnvironmentConfigFactory configFactory; @Inject public Execution( SuiteRunOptions suiteRunOptions, EnvironmentOptions environmentOptions, SuiteFactory suiteFactory, EnvironmentFactory environmentFactory, EnvironmentConfigFactory configFactory) { this.suiteRunOptions = requireNonNull(suiteRunOptions, "suiteRunOptions is null"); this.environmentOptions = requireNonNull(environmentOptions, "environmentOptions is null"); this.suiteFactory = requireNonNull(suiteFactory, "suiteFactory is null"); this.environmentFactory = requireNonNull(environmentFactory, "environmentFactory is null"); this.configFactory = requireNonNull(configFactory, "configFactory is null"); } @Override public void run() { String suiteName = requireNonNull(suiteRunOptions.suite, "suiteRunOptions.suite is null"); Suite suite = suiteFactory.getSuite(suiteName); EnvironmentConfig environmentConfig = configFactory.getConfig(environmentOptions.config); List<SuiteTestRun> suiteTestRuns = suite.getTestRuns(environmentConfig); log.info("Starting suite '%s' with config '%s' and test runs: ", suiteName, environmentConfig.getConfigName()); for (int runId = 0; runId < suiteTestRuns.size(); runId++) { SuiteTestRun testRun = suiteTestRuns.get(runId); log.info("#%02d %s - groups: %s, excluded groups: %s, tests: %s, excluded tests: %s", runId + 1, testRun.getEnvironmentName(), testRun.getGroups(), testRun.getExcludedGroups(), testRun.getTests(), testRun.getExcludedTests()); } long suiteStartTime = System.nanoTime(); ImmutableList.Builder<TestRunResult> results = ImmutableList.builder(); for (int runId = 0; runId < suiteTestRuns.size(); runId++) { results.add(executeSuiteTestRun(runId + 1, suiteName, suiteTestRuns.get(runId), environmentConfig)); } List<TestRunResult> testRunsResults = results.build(); printTestRunsSummary(suiteStartTime, suiteName, testRunsResults); } private static void printTestRunsSummary(long startTime, String suiteName, List<TestRunResult> results) { long failedRuns = results.stream() .filter(TestRunResult::hasFailed) .count(); if (failedRuns > 0) { log.info("Suite %s failed in %s (%d passed, %d failed): ", suiteName, nanosSince(startTime), results.size() - failedRuns, failedRuns); } else { log.info("Suite %s succeeded in %s: ", suiteName, nanosSince(startTime)); } results.stream() .filter(TestRunResult::isSuccessful) .forEach(Execution::printTestRunSummary); results.stream() .filter(TestRunResult::hasFailed) .forEach(Execution::printTestRunSummary); exit(failedRuns == 0L ? 0 : 1); } private static void printTestRunSummary(TestRunResult result) { if (result.isSuccessful()) { log.info("PASSED %s with %s [took %s]", result.getSuiteRun(), result.getSuiteConfig(), result.getDuration()); } else { log.info("FAILED %s with %s [took %s]:\n%s", result.getSuiteRun(), result.getSuiteConfig(), result.getDuration(), result.getThrowable().map(Throwables::getStackTraceAsString).orElse("no stacktrace")); } } public TestRunResult executeSuiteTestRun(int runId, String suiteName, SuiteTestRun suiteTestRun, EnvironmentConfig environmentConfig) { log.info("Starting test run #%02d %s with config %s", runId, suiteTestRun, environmentConfig); long startTime = System.nanoTime(); Throwable t = null; try { TestRun.TestRunOptions testRunOptions = createTestRunOptions(runId, suiteName, suiteTestRun, environmentConfig, suiteRunOptions.logsDirBase); log.info("Execute this test run using:\npresto-product-tests-launcher/bin/run-launcher test run %s", OptionsPrinter.format(environmentOptions, testRunOptions)); new TestRun.Execution(environmentFactory, environmentOptions, environmentConfig, testRunOptions).run(); } catch (Exception e) { t = e; log.error("Failed to execute test run %s", suiteTestRun, e); } return new TestRunResult(runId, suiteTestRun, environmentConfig, nanosSince(startTime), Optional.ofNullable(t)); } private TestRun.TestRunOptions createTestRunOptions(int runId, String suiteName, SuiteTestRun suiteTestRun, EnvironmentConfig environmentConfig, Optional<Path> logsDirBase) { TestRun.TestRunOptions testRunOptions = new TestRun.TestRunOptions(); testRunOptions.environment = suiteTestRun.getEnvironmentName(); testRunOptions.testArguments = suiteTestRun.getTemptoRunArguments(environmentConfig); testRunOptions.testJar = suiteRunOptions.testJar; String suiteRunId = suiteRunId(runId, suiteName, suiteTestRun, environmentConfig); testRunOptions.reportsDir = Paths.get("presto-product-tests/target/reports/" + suiteRunId); testRunOptions.logsDirBase = logsDirBase.map(dir -> dir.resolve(suiteRunId)); return testRunOptions; } private static String suiteRunId(int runId, String suiteName, SuiteTestRun suiteTestRun, EnvironmentConfig environmentConfig) { return format("%s-%s-%s-%02d", suiteName, suiteTestRun.getEnvironmentName(), environmentConfig.getConfigName(), runId); } } private static class TestRunResult { private final int runId; private final SuiteTestRun suiteRun; private final EnvironmentConfig environmentConfig; private final Duration duration; private final Optional<Throwable> throwable; public TestRunResult(int runId, SuiteTestRun suiteRun, EnvironmentConfig environmentConfig, Duration duration, Optional<Throwable> throwable) { this.runId = runId; this.suiteRun = requireNonNull(suiteRun, "suiteRun is null"); this.environmentConfig = requireNonNull(environmentConfig, "suiteConfig is null"); this.duration = requireNonNull(duration, "duration is null"); this.throwable = requireNonNull(throwable, "throwable is null"); } public int getRunId() { return this.runId; } public SuiteTestRun getSuiteRun() { return this.suiteRun; } public EnvironmentConfig getSuiteConfig() { return this.environmentConfig; } public Duration getDuration() { return this.duration; } public boolean isSuccessful() { return this.throwable.isEmpty(); } public boolean hasFailed() { return this.throwable.isPresent(); } public Optional<Throwable> getThrowable() { return this.throwable; } @Override public String toString() { return toStringHelper(this) .add("runId", runId) .add("suiteRun", suiteRun) .add("suiteConfig", environmentConfig) .add("duration", duration) .add("throwable", throwable) .toString(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.test; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.sql.Timestamp; import java.time.LocalDateTime; import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.antlr.runtime.ANTLRStringStream; import org.antlr.runtime.CommonTokenStream; import org.antlr.runtime.RecognitionException; import org.apache.commons.lang3.tuple.Pair; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.expression.parser.ExprLexer; import org.apache.drill.common.expression.parser.ExprParser; import org.apache.drill.common.types.TypeProtos; import org.apache.drill.common.types.Types; import org.apache.drill.exec.expr.fn.impl.DateUtility; import org.apache.drill.exec.proto.UserBitShared; import org.apache.drill.exec.proto.UserBitShared.QueryType; import org.apache.drill.exec.proto.UserProtos.PreparedStatementHandle; import org.apache.drill.exec.record.BatchSchema; import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.util.JsonStringArrayList; import org.apache.drill.exec.util.JsonStringHashMap; import org.apache.drill.exec.util.Text; import org.apache.drill.test.DrillTestWrapper.TestServices; import org.joda.time.DateTimeZone; import org.apache.drill.shaded.guava.com.google.common.base.Joiner; import org.apache.drill.shaded.guava.com.google.common.base.Preconditions; public class TestBuilder { /** * Test query to run. Type of object depends on the {@link #queryType} */ private Object query; // the type of query for the test private UserBitShared.QueryType queryType; // should the validation enforce ordering private Boolean ordered; private boolean approximateEquality; private TestServices services; // Used to pass the type information associated with particular column names rather than relying on the // ordering of the columns in the CSV file, or the default type inferences when reading JSON, this is used for the // case where results of the test query are adding type casts to the baseline queries, this saves a little bit of // setup in cases where strict type enforcement is not necessary for a given test protected Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap; // queries to run before the baseline or test queries, can be used to set options private String baselineOptionSettingQueries; private String testOptionSettingQueries; // two different methods are available for comparing ordered results, the default reads all of the records // into giant lists of objects, like one giant on-heap batch of 'vectors' // this flag enables the other approach which iterates through a hyper batch for the test query results and baseline // while this does work faster and use less memory, it can be harder to debug as all of the elements are not in a // single list private boolean highPerformanceComparison; // column names for use with the baseline values protected String[] baselineColumns; // In cases where we need to verify larger datasets without the risk of running the baseline data through // the drill engine, results can be provided in a list of maps. While this model does make a lot of sense, there is a // lot of work to make the type handling/casting work correctly, and making robust complex type handling work completely outside // of the drill engine for generating baselines would likely be more work than it would be worth. For now we will be // going with an approach of using this facility to validate the parts of the drill engine that could break in ways // that would affect the reading of baseline files (i.e. we need robust test for storage engines, project and casting that // use this interface) and then rely on the engine for the rest of the tests that will use the baseline queries. private List<Map<String, Object>> baselineRecords; private int expectedNumBatches = DrillTestWrapper.EXPECTED_BATCH_COUNT_NOT_SET; private int expectedNumRecords = DrillTestWrapper.EXPECTED_NUM_RECORDS_NOT_SET; public TestBuilder(TestServices services) { this.services = services; reset(); } public TestBuilder(TestServices services, Object query, UserBitShared.QueryType queryType, Boolean ordered, boolean approximateEquality, Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap, String baselineOptionSettingQueries, String testOptionSettingQueries, boolean highPerformanceComparison, int expectedNumBatches) { this(services); if (ordered == null) { throw new RuntimeException("Ordering not set, when using a baseline file or query you must explicitly call the ordered() or unOrdered() method on the " + this.getClass().getSimpleName()); } this.query = query; this.queryType = queryType; this.ordered = ordered; this.approximateEquality = approximateEquality; this.baselineTypeMap = baselineTypeMap; this.baselineOptionSettingQueries = baselineOptionSettingQueries; this.testOptionSettingQueries = testOptionSettingQueries; this.highPerformanceComparison = highPerformanceComparison; this.expectedNumBatches = expectedNumBatches; } protected TestBuilder reset() { query = ""; ordered = null; approximateEquality = false; highPerformanceComparison = false; testOptionSettingQueries = ""; baselineOptionSettingQueries = ""; baselineRecords = null; return this; } public DrillTestWrapper build() { return new DrillTestWrapper(this, services, query, queryType, baselineOptionSettingQueries, testOptionSettingQueries, getValidationQueryType(), ordered, highPerformanceComparison, baselineColumns, baselineRecords, expectedNumBatches, expectedNumRecords); } public List<Pair<SchemaPath, TypeProtos.MajorType>> getExpectedSchema() { return null; } public void go() throws Exception { build().run(); } public TestBuilder sqlQuery(String query) { this.query = QueryTestUtil.normalizeQuery(query); this.queryType = UserBitShared.QueryType.SQL; return this; } public TestBuilder sqlQuery(String query, Object... replacements) { return sqlQuery(String.format(query, replacements)); } public TestBuilder preparedStatement(PreparedStatementHandle preparedStatementHandle) { queryType = QueryType.PREPARED_STATEMENT; query = preparedStatementHandle; return this; } public TestBuilder sqlQueryFromFile(String queryFile) throws IOException { String query = BaseTestQuery.getFile(queryFile); this.query = query; queryType = UserBitShared.QueryType.SQL; return this; } public TestBuilder physicalPlanFromFile(String queryFile) throws IOException { String query = BaseTestQuery.getFile(queryFile); this.query = query; queryType = UserBitShared.QueryType.PHYSICAL; return this; } public TestBuilder ordered() { ordered = true; return this; } public TestBuilder unOrdered() { ordered = false; return this; } // this can only be used with ordered verifications, it does run faster and use less memory but may be // a little harder to debug as it iterates over a hyper batch rather than reading all of the values into // large on-heap lists public TestBuilder highPerformanceComparison() throws Exception { highPerformanceComparison = true; return this; } // list of queries to run before the baseline query, can be used to set several options // list takes the form of a semi-colon separated list public TestBuilder optionSettingQueriesForBaseline(String queries) { baselineOptionSettingQueries = queries; return this; } public TestBuilder optionSettingQueriesForBaseline(String queries, Object... args) { baselineOptionSettingQueries = String.format(queries, args); return this; } /** * list of queries to run before the test query, can be used to set several options * list takes the form of a semi-colon separated list. * @param queries queries that set session and system options * @return this test builder */ public TestBuilder optionSettingQueriesForTestQuery(String queries) { testOptionSettingQueries = queries; return this; } public TestBuilder optionSettingQueriesForTestQuery(String query, Object... args) throws Exception { testOptionSettingQueries = String.format(query, args); return this; } public TestBuilder approximateEquality() { approximateEquality = true; return this; } // modified code from SchemaPath.De class. This should be used sparingly and only in tests if absolutely needed. public static SchemaPath parsePath(String path) { try { ExprLexer lexer = new ExprLexer(new ANTLRStringStream(path)); CommonTokenStream tokens = new CommonTokenStream(lexer); ExprParser parser = new ExprParser(tokens); ExprParser.parse_return ret = parser.parse(); if (ret.e instanceof SchemaPath) { return (SchemaPath) ret.e; } else { throw new IllegalStateException("Schema path is not a valid format."); } } catch (RecognitionException e) { throw new RuntimeException(e); } } Object getValidationQuery() throws Exception { throw new RuntimeException("Must provide some kind of baseline, either a baseline file or another query"); } protected UserBitShared.QueryType getValidationQueryType() { return null; } public JSONTestBuilder jsonBaselineFile(String filePath) { return new JSONTestBuilder(filePath, services, query, queryType, ordered, approximateEquality, baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison, expectedNumBatches); } public CSVTestBuilder csvBaselineFile(String filePath) { return new CSVTestBuilder(filePath, services, query, queryType, ordered, approximateEquality, baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison, expectedNumBatches); } public SchemaTestBuilder schemaBaseLine(BatchSchema batchSchema) { List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema = new ArrayList<>(); for (final MaterializedField field : batchSchema) { expectedSchema.add(Pair.of(SchemaPath.getSimplePath(field.getName()), field.getType())); } return schemaBaseLine(expectedSchema); } public SchemaTestBuilder schemaBaseLine(List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema) { assert expectedSchema != null : "The expected schema can be provided once"; assert baselineColumns == null : "The column information should be captured in expected schema, not baselineColumns"; return new SchemaTestBuilder( services, query, queryType, baselineOptionSettingQueries, testOptionSettingQueries, expectedSchema); } public TestBuilder baselineTypes(Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap) { this.baselineTypeMap = baselineTypeMap; return this; } boolean typeInfoSet() { if (baselineTypeMap != null) { return true; } else { return false; } } /** * Indicate that the tests query should be checked for an empty result set. * @return the test builder */ public TestBuilder expectsEmptyResultSet() { unOrdered(); baselineRecords = new ArrayList<>(); return this; } /** * Sets the number of expected batch count for this query. The test will fail if the query returns a different number * of batches * * @param expectedNumBatches expected batch count * @return this test builder */ public TestBuilder expectsNumBatches(int expectedNumBatches) { this.expectedNumBatches = expectedNumBatches; return this; } public TestBuilder expectsNumRecords(int expectedNumRecords) { this.expectedNumRecords = expectedNumRecords; this.ordered = false; return this; } /** * This method is used to pass in a simple list of values for a single record verification without * the need to create a CSV or JSON file to store the baseline. * * This can be called repeatedly to pass a list of records to verify. It works for both ordered and unordered * checks. * * @param baselineValues - the baseline values to validate * @return the test builder */ public TestBuilder baselineValues(Object... baselineValues) { assert getExpectedSchema() == null : "The expected schema is not needed when baselineValues are provided "; if (ordered == null) { throw new RuntimeException("Ordering not set, before specifying baseline data you must explicitly call the ordered() or unOrdered() method on the " + this.getClass().getSimpleName()); } if (baselineRecords == null) { baselineRecords = new ArrayList<>(); } Map<String, Object> ret = new HashMap<>(); int i = 0; assertEquals("Must supply the same number of baseline values as columns.", baselineValues.length, baselineColumns.length); for (String s : baselineColumns) { ret.put(s, baselineValues[i]); i++; } this.baselineRecords.add(ret); return this; } /** * This can be used in cases where we want to avoid issues with the assumptions made by the test framework. * Most of the methods for verification in the framework run drill queries to generate the read baseline files or * execute alternative baseline queries. This model relies on basic functionality of reading files with storage * plugins and applying casts/projects to be stable. * * This method can be used to verify the engine for these cases and any other future execution paths that would * be used by both the test query and baseline. Without tests like this it is possible that some tests * could falsely report as passing, as both the test query and baseline query could run into the same problem * with an assumed stable code path and produce the same erroneous result. * * @param materializedRecords - a list of maps representing materialized results * @return the test builder */ public TestBuilder baselineRecords(List<Map<String, Object>> materializedRecords) { this.baselineRecords = materializedRecords; return this; } /** * This setting has a slightly different impact on the test depending on some of the other * configuration options are set. * * If a JSON baseline file is given, this list will act as a project list to verify the * test query against a subset of the columns in the file. * * For a CSV baseline file, these will act as aliases for columns [0 .. n] in the repeated * varchar column that is read out of CSV. * * For a baseline sql query, this currently has no effect. * * For explicit baseline values given in java code with the baselineValues() method, these will * be used to create a map for the one record verification. */ public TestBuilder baselineColumns(String... columns) { assert getExpectedSchema() == null : "The expected schema is not needed when baselineColumns are provided "; for (int i = 0; i < columns.length; i++) { columns[i] = parsePath(columns[i]).toExpr(); } this.baselineColumns = columns; return this; } private boolean singleExplicitBaselineRecord() { return baselineRecords != null; } /** * Provide a SQL query to validate against. * @param baselineQuery * @return the test builder */ public BaselineQueryTestBuilder sqlBaselineQuery(Object baselineQuery) { return new BaselineQueryTestBuilder(baselineQuery, UserBitShared.QueryType.SQL, services, query, queryType, ordered, approximateEquality, baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison, expectedNumBatches); } public BaselineQueryTestBuilder sqlBaselineQuery(String query, String... replacements) { return sqlBaselineQuery(String.format(query, (Object[]) replacements)); } // provide a path to a file containing a SQL query to use as a baseline public BaselineQueryTestBuilder sqlBaselineQueryFromFile(String baselineQueryFilename) throws IOException { String baselineQuery = BaseTestQuery.getFile(baselineQueryFilename); return new BaselineQueryTestBuilder(baselineQuery, UserBitShared.QueryType.SQL, services, query, queryType, ordered, approximateEquality, baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison, expectedNumBatches); } // as physical plans are verbose, this is the only option provided for specifying them, we should enforce // that physical plans, or any large JSON strings do not live in the Java source as literals public BaselineQueryTestBuilder physicalPlanBaselineQueryFromFile(String baselinePhysicalPlanPath) throws IOException { String baselineQuery = BaseTestQuery.getFile(baselinePhysicalPlanPath); return new BaselineQueryTestBuilder(baselineQuery, UserBitShared.QueryType.PHYSICAL, services, query, queryType, ordered, approximateEquality, baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison, expectedNumBatches); } private String getDecimalPrecisionScaleInfo(TypeProtos.MajorType type) { String precision = ""; switch(type.getMinorType()) { case VARDECIMAL: case DECIMAL18: case DECIMAL28SPARSE: case DECIMAL38SPARSE: case DECIMAL38DENSE: case DECIMAL28DENSE: case DECIMAL9: precision = String.format("(%d,%d)", type.getPrecision(), type.getScale()); break; default: // do nothing empty string set above } return precision; } public class CSVTestBuilder extends TestBuilder { // path to the baseline file that will be inserted into the validation query private String baselineFilePath; // use to cast the baseline file columns, if not set the types // that come out of the test query drive interpretation of baseline private TypeProtos.MajorType[] baselineTypes; CSVTestBuilder(String baselineFile, TestServices services, Object query, UserBitShared.QueryType queryType, Boolean ordered, boolean approximateEquality, Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap, String baselineOptionSettingQueries, String testOptionSettingQueries, boolean highPerformanceComparison, int expectedNumBatches) { super(services, query, queryType, ordered, approximateEquality, baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison, expectedNumBatches); this.baselineFilePath = baselineFile; } public CSVTestBuilder baselineTypes(TypeProtos.MajorType... baselineTypes) { this.baselineTypes = baselineTypes; this.baselineTypeMap = null; return this; } // convenience method to convert minor types to major types if no decimals with precisions are needed public CSVTestBuilder baselineTypes(TypeProtos.MinorType... baselineTypes) { TypeProtos.MajorType[] majorTypes = new TypeProtos.MajorType[baselineTypes.length]; int i = 0; for(TypeProtos.MinorType minorType : baselineTypes) { majorTypes[i] = Types.required(minorType); i++; } this.baselineTypes = majorTypes; this.baselineTypeMap = null; return this; } @Override protected TestBuilder reset() { super.reset(); baselineTypeMap = null; baselineTypes = null; baselineFilePath = null; return this; } @Override boolean typeInfoSet() { if (super.typeInfoSet() || baselineTypes != null) { return true; } else { return false; } } @Override String getValidationQuery() throws Exception { if (baselineColumns.length == 0) { throw new Exception("Baseline CSV files require passing column names, please call the baselineColumns() method on the test builder."); } if (baselineTypes != null) { assertEquals("Must pass the same number of types as column names if types are provided.", baselineTypes.length, baselineColumns.length); } String[] aliasedExpectedColumns = new String[baselineColumns.length]; for (int i = 0; i < baselineColumns.length; i++) { aliasedExpectedColumns[i] = "columns[" + i + "] "; TypeProtos.MajorType majorType; if (baselineTypes != null) { majorType = baselineTypes[i]; } else if (baselineTypeMap != null) { majorType = baselineTypeMap.get(parsePath(baselineColumns[i])); } else { throw new Exception("Type information not set for interpreting csv baseline file."); } String precision = getDecimalPrecisionScaleInfo(majorType); // TODO - determine if there is a better behavior here, if we do not specify a length the default behavior is // to cast to varchar with length 1 // set default cast size for varchar, the cast function will take the lesser of this passed value and the // length of the incoming data when choosing the length for the outgoing data if (majorType.getMinorType() == TypeProtos.MinorType.VARCHAR || majorType.getMinorType() == TypeProtos.MinorType.VARBINARY) { precision = "(65000)"; } aliasedExpectedColumns[i] = "cast(" + aliasedExpectedColumns[i] + " as " + Types.getNameOfMinorType(majorType.getMinorType()) + precision + " ) " + baselineColumns[i]; } String query = "select " + Joiner.on(", ").join(aliasedExpectedColumns) + " from cp.`" + baselineFilePath + "`"; return query; } @Override protected UserBitShared.QueryType getValidationQueryType() { return UserBitShared.QueryType.SQL; } } public class SchemaTestBuilder extends TestBuilder { private List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema; SchemaTestBuilder(TestServices services, Object query, UserBitShared.QueryType queryType, String baselineOptionSettingQueries, String testOptionSettingQueries, List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema) { super(services, query, queryType, false, false, null, baselineOptionSettingQueries, testOptionSettingQueries, false, -1); expectsEmptyResultSet(); this.expectedSchema = expectedSchema; } @Override public TestBuilder baselineColumns(String... columns) { assert false : "The column information should be captured in expected scheme, not baselineColumns"; return this; } @Override public TestBuilder baselineRecords(List<Map<String, Object>> materializedRecords) { assert false : "Since only schema will be compared in this test, no record is expected"; return this; } @Override public TestBuilder baselineValues(Object... objects) { assert false : "Since only schema will be compared in this test, no record is expected"; return this; } @Override protected UserBitShared.QueryType getValidationQueryType() { return null; } @Override public List<Pair<SchemaPath, TypeProtos.MajorType>> getExpectedSchema() { return expectedSchema; } } public class JSONTestBuilder extends TestBuilder { // path to the baseline file that will be inserted into the validation query private String baselineFilePath; JSONTestBuilder(String baselineFile, TestServices services, Object query, UserBitShared.QueryType queryType, Boolean ordered, boolean approximateEquality, Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap, String baselineOptionSettingQueries, String testOptionSettingQueries, boolean highPerformanceComparison, int expectedNumBatches) { super(services, query, queryType, ordered, approximateEquality, baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison, expectedNumBatches); this.baselineFilePath = baselineFile; this.baselineColumns = new String[] {"*"}; } @Override String getValidationQuery() { return "select " + Joiner.on(", ").join(baselineColumns) + " from cp.`" + baselineFilePath + "`"; } @Override protected UserBitShared.QueryType getValidationQueryType() { return UserBitShared.QueryType.SQL; } } public class BaselineQueryTestBuilder extends TestBuilder { /** * Baseline query. Type of object depends on {@link #baselineQueryType} */ private Object baselineQuery; private UserBitShared.QueryType baselineQueryType; BaselineQueryTestBuilder(Object baselineQuery, UserBitShared.QueryType baselineQueryType, TestServices services, Object query, UserBitShared.QueryType queryType, Boolean ordered, boolean approximateEquality, Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap, String baselineOptionSettingQueries, String testOptionSettingQueries, boolean highPerformanceComparison, int expectedNumBatches) { super(services, query, queryType, ordered, approximateEquality, baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison, expectedNumBatches); this.baselineQuery = baselineQuery; this.baselineQueryType = baselineQueryType; } @Override Object getValidationQuery() { return baselineQuery; } @Override protected UserBitShared.QueryType getValidationQueryType() { return baselineQueryType; } // This currently assumes that all explicit baseline queries will have fully qualified type information // if this changes, the baseline query can be run in a sub query with the implicit or explicit type passing // added on top of it, as is currently when done when reading a baseline file @Override boolean typeInfoSet() { return true; } } /** * Convenience method to create a {@link JsonStringArrayList list} from the given values. */ public static JsonStringArrayList<Object> listOf(Object... values) { final JsonStringArrayList<Object> list = new JsonStringArrayList<>(); for (Object value:values) { if (value instanceof CharSequence) { list.add(new Text(value.toString())); } else { list.add(value); } } return list; } /** * Convenience method to create a {@link JsonStringHashMap<String, Object> map} instance with the given key value sequence. * * Key value sequence consists of key - value pairs such that a key precedes its value. For instance: * * mapOf("name", "Adam", "age", 41) corresponds to {"name": "Adam", "age": 41} in JSON. */ public static JsonStringHashMap<String, Object> mapOf(Object... keyValueSequence) { Preconditions.checkArgument(keyValueSequence.length%2==0, "Length of key value sequence must be even"); final JsonStringHashMap<String, Object> map = new JsonStringHashMap<>(); for (int i=0; i<keyValueSequence.length; i+=2) { Object value = keyValueSequence[i+1]; if (value instanceof CharSequence) { value = new Text(value.toString()); } map.put(String.class.cast(keyValueSequence[i]), value); } return map; } /** * Helper method for the timestamp values that depend on the local timezone * @param value expected timestamp value in UTC * @return timestamp value for the local timezone */ public static Timestamp convertToLocalTimestamp(String value) { long UTCTimestamp = Timestamp.valueOf(value).getTime(); return new Timestamp(DateTimeZone.getDefault().convertUTCToLocal(UTCTimestamp)); } /** * Helper method for the timestamp values that depend on the local timezone * @param value expected timestamp value in UTC * @return LocalDateTime value for the local timezone */ public static LocalDateTime convertToLocalDateTime(String value) { OffsetDateTime utcDateTime = LocalDateTime.parse(value, DateUtility.getDateTimeFormatter()).atOffset(ZoneOffset.UTC); return utcDateTime.atZoneSameInstant(ZoneOffset.systemDefault()).toLocalDateTime(); } }
package com.jwetherell.algorithms.mathematics.timing; import java.text.DecimalFormat; import com.jwetherell.algorithms.mathematics.Division; import com.jwetherell.algorithms.mathematics.Multiplication; public class MathematicsTiming { private static final DecimalFormat FORMAT = new DecimalFormat("#.######"); public static void main(String[] args) { // MULTIPLICATION { final int a = 1200; final int b = 1400; System.out.println("Multiplication using a loop."); long before = System.nanoTime(); long result = Multiplication.multiplyUsingLoop(a, b); long after = System.nanoTime(); long check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using recursion."); before = System.nanoTime(); result = Multiplication.multiplyUsingRecursion(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using shifts."); before = System.nanoTime(); result = Multiplication.multiplyUsingShift(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using logs."); before = System.nanoTime(); result = Multiplication.multiplyUsingLogs(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using FFT."); before = System.nanoTime(); result = Long.parseLong(Multiplication.multiplyUsingFFT(String.valueOf(a), String.valueOf(b))); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using loop, String input."); before = System.nanoTime(); result = Long.parseLong(Multiplication.multiplyUsingLoopWithStringInput(String.valueOf(a), String.valueOf(b))); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using loop, Integer input."); before = System.nanoTime(); result = Multiplication.multiplyUsingLoopWithIntegerInput(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.out.println(); System.gc(); } { final int a = -7400; final int b = 6200; System.out.println("Multiplication using a loop."); long before = System.nanoTime(); long result = Multiplication.multiplyUsingLoop(a, b); long after = System.nanoTime(); long check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using recursion."); before = System.nanoTime(); result = Multiplication.multiplyUsingRecursion(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using shifts."); before = System.nanoTime(); result = Multiplication.multiplyUsingShift(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using logs."); before = System.nanoTime(); result = Multiplication.multiplyUsingLogs(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using FFT."); before = System.nanoTime(); result = Long.parseLong(Multiplication.multiplyUsingFFT(String.valueOf(a), String.valueOf(b))); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using loop, String input."); before = System.nanoTime(); result = Long.parseLong(Multiplication.multiplyUsingLoopWithStringInput(String.valueOf(a), String.valueOf(b))); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using loop, Integer input."); before = System.nanoTime(); result = Multiplication.multiplyUsingLoopWithIntegerInput(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.out.println(); System.gc(); } { final int a = 8400; final int b = -2900; System.out.println("Multiplication using a loop."); long before = System.nanoTime(); long result = Multiplication.multiplyUsingLoop(a, b); long after = System.nanoTime(); long check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using recursion."); before = System.nanoTime(); result = Multiplication.multiplyUsingRecursion(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using shifts."); before = System.nanoTime(); result = Multiplication.multiplyUsingShift(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using logs."); before = System.nanoTime(); result = Multiplication.multiplyUsingLogs(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using FFT."); before = System.nanoTime(); result = Long.parseLong(Multiplication.multiplyUsingFFT(String.valueOf(a), String.valueOf(b))); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using loop, String input."); before = System.nanoTime(); result = Long.parseLong(Multiplication.multiplyUsingLoopWithStringInput(String.valueOf(a), String.valueOf(b))); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using loop, Integer input."); before = System.nanoTime(); result = Multiplication.multiplyUsingLoopWithIntegerInput(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.out.println(); System.gc(); } { final int a = -9200; final int b = -3700; System.out.println("Multiplication using a loop."); long before = System.nanoTime(); long result = Multiplication.multiplyUsingLoop(a, b); long after = System.nanoTime(); long check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using recursion."); before = System.nanoTime(); result = Multiplication.multiplyUsingRecursion(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using shifts."); before = System.nanoTime(); result = Multiplication.multiplyUsingShift(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using logs."); before = System.nanoTime(); result = Multiplication.multiplyUsingLogs(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using FFT."); before = System.nanoTime(); result = Long.parseLong(Multiplication.multiplyUsingFFT(String.valueOf(a), String.valueOf(b))); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using loop, String input."); before = System.nanoTime(); result = Long.parseLong(Multiplication.multiplyUsingLoopWithStringInput(String.valueOf(a), String.valueOf(b))); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Multiplication using loop, Integer input."); before = System.nanoTime(); result = Multiplication.multiplyUsingLoopWithIntegerInput(a, b); after = System.nanoTime(); check = Multiplication.multiplication(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.out.println(); System.gc(); } // DIVISION { final int a = 9; final int b = 3; System.out.println("Division using a loop."); long before = System.nanoTime(); long result = Division.divisionUsingLoop(a, b); long after = System.nanoTime(); long check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using recursion."); before = System.nanoTime(); result = Division.divisionUsingRecursion(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using shifts."); before = System.nanoTime(); result = Division.divisionUsingShift(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using logs."); before = System.nanoTime(); result = Division.divisionUsingLogs(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using multiplication."); before = System.nanoTime(); result = Division.divisionUsingMultiplication(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.out.println(); System.gc(); } { final int a = -54; final int b = 6; System.out.println("Division using a loop."); long before = System.nanoTime(); long result = Division.divisionUsingLoop(a, b); long after = System.nanoTime(); long check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using recursion."); before = System.nanoTime(); result = Division.divisionUsingRecursion(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using shifts."); before = System.nanoTime(); result = Division.divisionUsingShift(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using logs."); before = System.nanoTime(); result = Division.divisionUsingLogs(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using multiplication."); before = System.nanoTime(); result = Division.divisionUsingMultiplication(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.out.println(); System.gc(); } { final int a = 98; final int b = -7; System.out.println("Division using a loop."); long before = System.nanoTime(); long result = Division.divisionUsingLoop(a, b); long after = System.nanoTime(); long check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using recursion."); before = System.nanoTime(); result = Division.divisionUsingRecursion(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using shifts."); before = System.nanoTime(); result = Division.divisionUsingShift(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using logs."); before = System.nanoTime(); result = Division.divisionUsingLogs(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using multiplication."); before = System.nanoTime(); result = Division.divisionUsingMultiplication(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.out.println(); System.gc(); } { final int a = -568; final int b = -15; System.out.println("Division using a loop."); long before = System.nanoTime(); long result = Division.divisionUsingLoop(a, b); long after = System.nanoTime(); long check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using recursion."); before = System.nanoTime(); result = Division.divisionUsingRecursion(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using shifts."); before = System.nanoTime(); result = Division.divisionUsingShift(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using logs."); before = System.nanoTime(); result = Division.divisionUsingLogs(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.gc(); System.out.println("Division using multiplication."); before = System.nanoTime(); result = Division.divisionUsingMultiplication(a, b); after = System.nanoTime(); check = Division.division(a, b); if (result != check) System.out.println("ERROR with a=" + a + " b=" + b + " result=" + result + " check=" + check); System.out.println("Computed in " + FORMAT.format(after - before) + " ns"); System.out.println(); System.gc(); } } }
package org.magcode.sunricher.api; import java.awt.Color; import java.io.IOException; import java.util.List; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.magcode.sunricher.mqtt.TcpClient; public class WifiHandlerImpl implements WifiHandler { private static Logger logger = LogManager.getLogger(WifiHandlerImpl.class); private int repeat = 4; private TcpClient tcpClient; private final int SLEEP = 8; private final int SLEEP_AT_END = 7; public WifiHandlerImpl(TcpClient aTcpClient, int repeat) { this.repeat = repeat; this.tcpClient = aTcpClient; } @Override public void setRGB(List<Integer> zones, int r, int g, int b) { try { this.setR(zones, r); Thread.sleep(SLEEP); this.setG(zones, g); Thread.sleep(SLEEP); this.setB(zones, b); } catch (InterruptedException e) { logger.error(e); } } @Override public void setHSB(List<Integer> zones, int h, int s, int v) { Color color = Color.getHSBColor((float) h / 360, (float) s / 100, (float) v / 100); this.setRGB(zones, color.getRed(), color.getGreen(), color.getBlue()); } @Override public void setR(List<Integer> zones, int value) { value = cleanValue(value, 0, 255); byte[] data = Constants.DATA_RGB_R; send(this.getMessage(zones, data[0], data[1], (byte) value)); } private int cleanValue(int value, int min, int max) { int ret = value; if (value > max) { ret = max; } if (value < min) { ret = min; } return ret; } @Override public void setG(List<Integer> zones, int value) { value = cleanValue(value, 0, 255); byte[] data = Constants.DATA_RGB_G; send(this.getMessage(zones, data[0], data[1], (byte) value)); } @Override public void setB(List<Integer> zones, int value) { value = cleanValue(value, 0, 255); byte[] data = Constants.DATA_RGB_B; send(this.getMessage(zones, data[0], data[1], (byte) value)); } @Override public void setW(List<Integer> zones, int value) { value = cleanValue(value, 0, 255); byte[] data = Constants.DATA_RGB_W; send(this.getMessage(zones, data[0], data[1], (byte) value)); } @Override public void setRGBFadeProgram(List<Integer> zones, int value) { value = cleanValue(value, 0, 10); // 0 -> turn off // 1-10 -> select and start program if (value == 0) { byte[] data = Constants.DATA_RGB_PRG_OFF; send(this.getMessage(zones, data[0], data[1], data[2])); } else { byte[] data = Constants.DATA_RGB_PRG_ON; // we need to add +20 to program. The controller expects 21 to 22 it seems. send(this.getMessage(zones, data[0], data[1], (byte) (value + 20))); } } @Override public void setRGBFadeSpeed(List<Integer> zones, int value) { value = cleanValue(value, 1, 10); byte[] data = Constants.DATA_RGB_PRG_SPEED; send(this.getMessage(zones, data[0], data[1], (byte) (value))); } @Override public void togglePower(List<Integer> zones, boolean powerState) { // works byte[] data = null; for (int zone : zones) { switch (zone) { case 1: if (powerState) { data = Constants.DATA_ROOM1_ON; } else { data = Constants.DATA_ROOM1_OFF; } break; case 2: if (powerState) { data = Constants.DATA_ROOM2_ON; } else { data = Constants.DATA_ROOM2_OFF; } break; case 3: if (powerState) { data = Constants.DATA_ROOM3_ON; } else { data = Constants.DATA_ROOM3_OFF; } break; case 4: if (powerState) { data = Constants.DATA_ROOM4_ON; } else { data = Constants.DATA_ROOM4_OFF; } break; case 5: if (powerState) { data = Constants.DATA_ROOM5_ON; } else { data = Constants.DATA_ROOM5_OFF; } break; case 6: if (powerState) { data = Constants.DATA_ROOM6_ON; } else { data = Constants.DATA_ROOM6_OFF; } break; case 7: if (powerState) { data = Constants.DATA_ROOM7_ON; } else { data = Constants.DATA_ROOM7_OFF; } break; case 8: if (powerState) { data = Constants.DATA_ROOM8_ON; } else { data = Constants.DATA_ROOM8_OFF; } break; } } if (null != data) { send(this.getMessage(zones, data[0], data[1], data[2])); } } @Override public void setBrightness(List<Integer> zones, int value) { value = cleanValue(value, 0, 256); byte[] data = Constants.DATA_BRIGHT; send(this.getMessage(zones, data[0], data[1], (byte) value)); } @Override public void setRGBBrightness(List<Integer> zones, int value) { value = cleanValue(value, 0, 256); byte[] data = Constants.DATA_RGB_BRIGHT; send(this.getMessage(zones, data[0], data[1], (byte) value)); } /** * set bit for the corresponding zone number. If array is empty no bit will be * set. * * @param zones * @return */ private byte generateZoneByte(List<Integer> zones) { if (zones.size() == 0) return 0; byte result = 0; for (int currentZone : zones) { if (currentZone <= 0 || currentZone > 8) { continue; } result = (byte) (result | (1 << currentZone - 1)); } return result; } /** * see {@link #getMessage(List, byte, byte, int)} * * @param zone * @param category * @param channel * @param value * @return */ private byte[] getMessage(List<Integer> zones, byte category, byte channel, byte value) { byte[] result = new byte[12]; // remote identifier result[0] = 0x55; result[1] = 0x33; result[2] = 0x61; result[3] = 0x39; result[4] = 0x02; // zone result[5] = this.generateZoneByte(zones); // category - rgb vaules result[6] = category; // color channel result[7] = channel; // value result[8] = (byte) value; // checksum result[9] = (byte) (result[8] + result[7] + result[6] + result[5] + result[4]); // marker bytes result[10] = (byte) 0xaa; result[11] = (byte) 0xaa; return result; } private void send(byte[] bytes) { try { logger.trace("Sending {} times: {} - {} - {}", repeat, bytes[6], bytes[7], bytes[8]); for (int i = 1; i <= repeat; i++) { tcpClient.getOs().write(bytes); tcpClient.getOs().flush(); Thread.sleep(SLEEP_AT_END); } } catch (IOException e) { logger.error("Stream write error", e); tcpClient.connect(); tcpClient.getUpdClient().sendDisableWifi(); } catch (InterruptedException e) { logger.error("Interrupted", e); } } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.simpleworkflow.model; import java.io.Serializable; /** * <p> * Provides details of the <code>WorkflowExecutionTimedOut</code> event. * </p> */ public class WorkflowExecutionTimedOutEventAttributes implements Serializable, Cloneable { /** * The type of timeout that caused this event. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>START_TO_CLOSE */ private String timeoutType; /** * The policy used for the child workflow executions of this workflow * execution. <p>The supported child policies are: <ul> * <li><b>TERMINATE:</b> the child executions will be terminated.</li> * <li><b>REQUEST_CANCEL:</b> a request to cancel will be attempted for * each child execution by recording a * <code>WorkflowExecutionCancelRequested</code> event in its history. It * is up to the decider to take appropriate actions when it receives an * execution history with this event.</li> <li><b>ABANDON:</b> no action * will be taken. The child executions will continue to run.</li> </ul> * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>TERMINATE, REQUEST_CANCEL, ABANDON */ private String childPolicy; /** * The type of timeout that caused this event. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>START_TO_CLOSE * * @return The type of timeout that caused this event. * * @see WorkflowExecutionTimeoutType */ public String getTimeoutType() { return timeoutType; } /** * The type of timeout that caused this event. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>START_TO_CLOSE * * @param timeoutType The type of timeout that caused this event. * * @see WorkflowExecutionTimeoutType */ public void setTimeoutType(String timeoutType) { this.timeoutType = timeoutType; } /** * The type of timeout that caused this event. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>START_TO_CLOSE * * @param timeoutType The type of timeout that caused this event. * * @return A reference to this updated object so that method calls can be chained * together. * * @see WorkflowExecutionTimeoutType */ public WorkflowExecutionTimedOutEventAttributes withTimeoutType(String timeoutType) { this.timeoutType = timeoutType; return this; } /** * The type of timeout that caused this event. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>START_TO_CLOSE * * @param timeoutType The type of timeout that caused this event. * * @see WorkflowExecutionTimeoutType */ public void setTimeoutType(WorkflowExecutionTimeoutType timeoutType) { this.timeoutType = timeoutType.toString(); } /** * The type of timeout that caused this event. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>START_TO_CLOSE * * @param timeoutType The type of timeout that caused this event. * * @return A reference to this updated object so that method calls can be chained * together. * * @see WorkflowExecutionTimeoutType */ public WorkflowExecutionTimedOutEventAttributes withTimeoutType(WorkflowExecutionTimeoutType timeoutType) { this.timeoutType = timeoutType.toString(); return this; } /** * The policy used for the child workflow executions of this workflow * execution. <p>The supported child policies are: <ul> * <li><b>TERMINATE:</b> the child executions will be terminated.</li> * <li><b>REQUEST_CANCEL:</b> a request to cancel will be attempted for * each child execution by recording a * <code>WorkflowExecutionCancelRequested</code> event in its history. It * is up to the decider to take appropriate actions when it receives an * execution history with this event.</li> <li><b>ABANDON:</b> no action * will be taken. The child executions will continue to run.</li> </ul> * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>TERMINATE, REQUEST_CANCEL, ABANDON * * @return The policy used for the child workflow executions of this workflow * execution. <p>The supported child policies are: <ul> * <li><b>TERMINATE:</b> the child executions will be terminated.</li> * <li><b>REQUEST_CANCEL:</b> a request to cancel will be attempted for * each child execution by recording a * <code>WorkflowExecutionCancelRequested</code> event in its history. It * is up to the decider to take appropriate actions when it receives an * execution history with this event.</li> <li><b>ABANDON:</b> no action * will be taken. The child executions will continue to run.</li> </ul> * * @see ChildPolicy */ public String getChildPolicy() { return childPolicy; } /** * The policy used for the child workflow executions of this workflow * execution. <p>The supported child policies are: <ul> * <li><b>TERMINATE:</b> the child executions will be terminated.</li> * <li><b>REQUEST_CANCEL:</b> a request to cancel will be attempted for * each child execution by recording a * <code>WorkflowExecutionCancelRequested</code> event in its history. It * is up to the decider to take appropriate actions when it receives an * execution history with this event.</li> <li><b>ABANDON:</b> no action * will be taken. The child executions will continue to run.</li> </ul> * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>TERMINATE, REQUEST_CANCEL, ABANDON * * @param childPolicy The policy used for the child workflow executions of this workflow * execution. <p>The supported child policies are: <ul> * <li><b>TERMINATE:</b> the child executions will be terminated.</li> * <li><b>REQUEST_CANCEL:</b> a request to cancel will be attempted for * each child execution by recording a * <code>WorkflowExecutionCancelRequested</code> event in its history. It * is up to the decider to take appropriate actions when it receives an * execution history with this event.</li> <li><b>ABANDON:</b> no action * will be taken. The child executions will continue to run.</li> </ul> * * @see ChildPolicy */ public void setChildPolicy(String childPolicy) { this.childPolicy = childPolicy; } /** * The policy used for the child workflow executions of this workflow * execution. <p>The supported child policies are: <ul> * <li><b>TERMINATE:</b> the child executions will be terminated.</li> * <li><b>REQUEST_CANCEL:</b> a request to cancel will be attempted for * each child execution by recording a * <code>WorkflowExecutionCancelRequested</code> event in its history. It * is up to the decider to take appropriate actions when it receives an * execution history with this event.</li> <li><b>ABANDON:</b> no action * will be taken. The child executions will continue to run.</li> </ul> * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>TERMINATE, REQUEST_CANCEL, ABANDON * * @param childPolicy The policy used for the child workflow executions of this workflow * execution. <p>The supported child policies are: <ul> * <li><b>TERMINATE:</b> the child executions will be terminated.</li> * <li><b>REQUEST_CANCEL:</b> a request to cancel will be attempted for * each child execution by recording a * <code>WorkflowExecutionCancelRequested</code> event in its history. It * is up to the decider to take appropriate actions when it receives an * execution history with this event.</li> <li><b>ABANDON:</b> no action * will be taken. The child executions will continue to run.</li> </ul> * * @return A reference to this updated object so that method calls can be chained * together. * * @see ChildPolicy */ public WorkflowExecutionTimedOutEventAttributes withChildPolicy(String childPolicy) { this.childPolicy = childPolicy; return this; } /** * The policy used for the child workflow executions of this workflow * execution. <p>The supported child policies are: <ul> * <li><b>TERMINATE:</b> the child executions will be terminated.</li> * <li><b>REQUEST_CANCEL:</b> a request to cancel will be attempted for * each child execution by recording a * <code>WorkflowExecutionCancelRequested</code> event in its history. It * is up to the decider to take appropriate actions when it receives an * execution history with this event.</li> <li><b>ABANDON:</b> no action * will be taken. The child executions will continue to run.</li> </ul> * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>TERMINATE, REQUEST_CANCEL, ABANDON * * @param childPolicy The policy used for the child workflow executions of this workflow * execution. <p>The supported child policies are: <ul> * <li><b>TERMINATE:</b> the child executions will be terminated.</li> * <li><b>REQUEST_CANCEL:</b> a request to cancel will be attempted for * each child execution by recording a * <code>WorkflowExecutionCancelRequested</code> event in its history. It * is up to the decider to take appropriate actions when it receives an * execution history with this event.</li> <li><b>ABANDON:</b> no action * will be taken. The child executions will continue to run.</li> </ul> * * @see ChildPolicy */ public void setChildPolicy(ChildPolicy childPolicy) { this.childPolicy = childPolicy.toString(); } /** * The policy used for the child workflow executions of this workflow * execution. <p>The supported child policies are: <ul> * <li><b>TERMINATE:</b> the child executions will be terminated.</li> * <li><b>REQUEST_CANCEL:</b> a request to cancel will be attempted for * each child execution by recording a * <code>WorkflowExecutionCancelRequested</code> event in its history. It * is up to the decider to take appropriate actions when it receives an * execution history with this event.</li> <li><b>ABANDON:</b> no action * will be taken. The child executions will continue to run.</li> </ul> * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>TERMINATE, REQUEST_CANCEL, ABANDON * * @param childPolicy The policy used for the child workflow executions of this workflow * execution. <p>The supported child policies are: <ul> * <li><b>TERMINATE:</b> the child executions will be terminated.</li> * <li><b>REQUEST_CANCEL:</b> a request to cancel will be attempted for * each child execution by recording a * <code>WorkflowExecutionCancelRequested</code> event in its history. It * is up to the decider to take appropriate actions when it receives an * execution history with this event.</li> <li><b>ABANDON:</b> no action * will be taken. The child executions will continue to run.</li> </ul> * * @return A reference to this updated object so that method calls can be chained * together. * * @see ChildPolicy */ public WorkflowExecutionTimedOutEventAttributes withChildPolicy(ChildPolicy childPolicy) { this.childPolicy = childPolicy.toString(); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTimeoutType() != null) sb.append("TimeoutType: " + getTimeoutType() + ","); if (getChildPolicy() != null) sb.append("ChildPolicy: " + getChildPolicy() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTimeoutType() == null) ? 0 : getTimeoutType().hashCode()); hashCode = prime * hashCode + ((getChildPolicy() == null) ? 0 : getChildPolicy().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof WorkflowExecutionTimedOutEventAttributes == false) return false; WorkflowExecutionTimedOutEventAttributes other = (WorkflowExecutionTimedOutEventAttributes)obj; if (other.getTimeoutType() == null ^ this.getTimeoutType() == null) return false; if (other.getTimeoutType() != null && other.getTimeoutType().equals(this.getTimeoutType()) == false) return false; if (other.getChildPolicy() == null ^ this.getChildPolicy() == null) return false; if (other.getChildPolicy() != null && other.getChildPolicy().equals(this.getChildPolicy()) == false) return false; return true; } @Override public WorkflowExecutionTimedOutEventAttributes clone() { try { return (WorkflowExecutionTimedOutEventAttributes) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2012 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.decisiontable.parser; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.drools.decisiontable.parser.RuleSheetParserUtil; import org.drools.decisiontable.parser.xls.PropertiesSheetListener; import org.drools.decisiontable.parser.xls.PropertiesSheetListener.CaseInsensitiveMap; import org.drools.template.model.Condition; import org.drools.template.model.Consequence; import org.drools.template.model.Global; import org.drools.template.model.Import; import org.drools.template.model.Package; import org.drools.template.model.Rule; import org.drools.template.parser.DecisionTableParseException; import static org.drools.decisiontable.parser.ActionType.Code; /** * An object of this class is prepared to receive calls passing it the * contents of a spreadsheet containing one or more decision tables. * Each of these tables is then expanded into a set of similar rules, * varying to a degree with respect to the patterns and actions. * * A "rule set" starts with some overall definitions such as imports, * globals, functions and queries. * * A table is identifed by a cell beginning with the text "RuleTable". The first * row after the table identifier defines the column type: either a pattern of * the condition or an action for the consequence, or an attribute. * * The second row contains optional pattern type declarations. If cells in * this row are merged, then all snippets below the merged stretch become part of * the same pattern, as separate constraints. * * The third row identifies the java code block associated with the condition * or consequence. This code block should include one or more parameter markers * for the insertion of values defined in cells of that column. * * The third row is available for comments on the purpose of the column. * * All subsequent rows identify rules with the set, providing values to be * inserted where there are markers in the code snippets defined in the third * row, or for the attribute identified by the column header. * * href="mailto:michael.neale@gmail.com"> Michael Neale </a> */ public class DefaultRuleSheetListener implements RuleSheetListener { //keywords public static final String QUERIES_TAG = "Queries"; public static final String FUNCTIONS_TAG = "Functions"; public static final String DECLARES_TAG = "Declare"; public static final String IMPORT_TAG = "Import"; public static final String SEQUENTIAL_FLAG = "Sequential"; public static final String ESCAPE_QUOTES_FLAG = "EscapeQuotes"; public static final String VARIABLES_TAG = "Variables"; public static final String RULE_TABLE_TAG = "ruletable"; public static final String RULESET_TAG = "RuleSet"; private static final int ACTION_ROW = 1; private static final int OBJECT_TYPE_ROW = 2; private static final int CODE_ROW = 3; private static final int LABEL_ROW = 4; //state machine variables for this parser private boolean _isInRuleTable = false; private int _ruleRow; private int _ruleStartColumn; private int _ruleStartRow; private Rule _currentRule; private String _currentRulePrefix; private boolean _currentSequentialFlag = false; // indicates that we are in sequential mode private boolean _currentEscapeQuotesFlag = true; // indicates that we are escaping quotes //accumulated output private Map<Integer, ActionType> _actions; private final HashMap<Integer, String> _cellComments = new HashMap<Integer, String>(); private final List<Rule> _ruleList = new LinkedList<Rule>(); //need to keep an ordered list of this to make conditions appear in the right order private List<SourceBuilder> sourceBuilders = new ArrayList<SourceBuilder>(); private final PropertiesSheetListener _propertiesListener = new PropertiesSheetListener(); private boolean showPackage; private String worksheetName = null; /** * Constructor. */ public DefaultRuleSheetListener() { this( true ); } /** * Constructor. * @param showPackage if true, the rule set name is passed to the resulting package */ public DefaultRuleSheetListener(boolean showPackage) { this.showPackage = showPackage; } public void setWorksheetName(String worksheetName) { this.worksheetName = worksheetName; } /* (non-Javadoc) * @see org.kie.decisiontable.parser.RuleSheetListener#getProperties() */ public CaseInsensitiveMap getProperties() { return this._propertiesListener.getProperties(); } /* (non-Javadoc) * @see org.kie.decisiontable.parser.RuleSheetListener#getRuleSet() */ public Package getRuleSet() { if ( this._ruleList.isEmpty() ) { throw new DecisionTableParseException( "No RuleTable cells in spreadsheet." ); } final Package ruleset = buildRuleSet(); return ruleset; } /** * Add a new rule to the current list of rules * @param newRule */ protected void addRule(final Rule newRule) { this._ruleList.add( newRule ); } private Package buildRuleSet() { final String defaultPackageName = "rule_table"; final String rulesetName = getProperties().getSingleProperty( RULESET_TAG, defaultPackageName ); final Package ruleset = new Package( (showPackage) ? rulesetName : null ); for ( Rule rule : this._ruleList ) { ruleset.addRule( rule ); } final List<Import> importList = RuleSheetParserUtil.getImportList( getProperties().getProperty( IMPORT_TAG ) ); for ( Import import1 : importList ) { ruleset.addImport( import1 ); } final List<Global> variableList = RuleSheetParserUtil.getVariableList( getProperties().getProperty( VARIABLES_TAG ) ); for ( Global global : variableList ) { ruleset.addVariable( global ); } final List<String> functions = getProperties().getProperty( FUNCTIONS_TAG ); if( functions != null ){ for( String function: functions ){ ruleset.addFunctions( function ); } } final List<String> queries = getProperties().getProperty( QUERIES_TAG ); if( queries != null ){ for( String query: queries ){ ruleset.addQueries( query ); } } final List<String> declarations = getProperties().getProperty( DECLARES_TAG ); if( declarations != null ){ for( String declaration: declarations ){ ruleset.addDeclaredType( declaration ); } } for( Code code: ActionType.ATTRIBUTE_CODE_SET ){ List<String> values = getProperties().getProperty( code.getColHeader() ); if( values != null ){ if( values.size() > 1 ){ List<String> cells = getProperties().getPropertyCells( code.getColHeader() ); throw new DecisionTableParseException( "Multiple values for " + code.getColHeader() + " in cells " + cells.toString() ); } String value = values.get( 0 ); switch( code ){ case SALIENCE: try { ruleset.setSalience( new Integer( value ) ); } catch( NumberFormatException nfe ){ throw new DecisionTableParseException( "Priority is not an integer literal, in cell " + getProperties().getSinglePropertyCell( code.getColHeader() ) ); } break; case DURATION: try { ruleset.setDuration( new Long( value ) ); } catch( NumberFormatException nfe ){ throw new DecisionTableParseException( "Duration is not an integer literal, in cell " + getProperties().getSinglePropertyCell( code.getColHeader() ) ); } break; case TIMER: ruleset.setTimer( value ); break; case ENABLED: ruleset.setEnabled( RuleSheetParserUtil.isStringMeaningTrue( value ) ); break; case CALENDARS: ruleset.setCalendars( value ); break; case NOLOOP: ruleset.setNoLoop( RuleSheetParserUtil.isStringMeaningTrue( value ) ); break; case LOCKONACTIVE: ruleset.setLockOnActive( RuleSheetParserUtil.isStringMeaningTrue( value ) ); break; case AUTOFOCUS: ruleset.setAutoFocus( RuleSheetParserUtil.isStringMeaningTrue( value ) ); break; case ACTIVATIONGROUP: ruleset.setActivationGroup( value ); break; case AGENDAGROUP: ruleset.setAgendaGroup( value ); break; case RULEFLOWGROUP: ruleset.setRuleFlowGroup( value ); break; case DATEEFFECTIVE: ruleset.setDateEffective( value ); break; case DATEEXPIRES: ruleset.setDateExpires( value ); break; } } } return ruleset; } /* * (non-Javadoc) * * @see my.hssf.util.SheetListener#startSheet(java.lang.String) */ public void startSheet(final String name) { // nothing to see here... move along.. } /* * (non-Javadoc) * * @see my.hssf.util.SheetListener#finishSheet() */ public void finishSheet() { this._propertiesListener.finishSheet(); finishRuleTable(); flushRule(); } /* * (non-Javadoc) * * @see my.hssf.util.SheetListener#newRow() */ public void newRow(final int rowNumber, final int columns) { if ( _currentRule != null ) flushRule(); // nothing to see here... these aren't the droids your looking for.. // move along... } /** * This makes sure that the rules have all their components added. * As when there are merged/spanned cells, they may be left out. */ private void flushRule() { for ( Iterator<SourceBuilder> iter = sourceBuilders.iterator(); iter.hasNext(); ) { SourceBuilder src = iter.next(); if ( src.hasValues() ) { switch( src.getActionTypeCode() ){ case CONDITION: Condition cond = new Condition(); cond.setSnippet( src.getResult() ); _currentRule.addCondition( cond ); break; case ACTION: Consequence cons = new Consequence(); cons.setSnippet( src.getResult() ); _currentRule.addConsequence( cons ); break; case METADATA: _currentRule.addMetadata( src.getResult() ); break; } src.clearValues(); } } } /* * (non-Javadoc) * * @see my.hssf.util.SheetListener#newCell(int, int, java.lang.String) */ public void newCell(final int row, final int column, final String value, int mergedColStart) { if ( isCellValueEmpty( value ) ) { return; } if ( _isInRuleTable && row == this._ruleStartRow ) { return; } if ( this._isInRuleTable ) { processRuleCell( row, column, value, mergedColStart ); } else { processNonRuleCell( row, column, value ); } } /** * This gets called each time a "new" rule table is found. */ private void initRuleTable(final int row, final int column, final String value) { preInitRuleTable( row, column, value ); this._isInRuleTable = true; this._actions = new HashMap<Integer, ActionType>(); this.sourceBuilders = new ArrayList<SourceBuilder>(); this._ruleStartColumn = column; this._ruleStartRow = row; this._ruleRow = row + LABEL_ROW + 1; // setup stuff for the rules to come.. (the order of these steps are // important !) this._currentRulePrefix = RuleSheetParserUtil.getRuleName( value ); if (this.worksheetName != null) { this._currentRulePrefix += " " + worksheetName; } this._currentSequentialFlag = getSequentialFlag(); this._currentEscapeQuotesFlag = getEscapeQuotesFlag(); String headCell = RuleSheetParserUtil.rc2name( this._ruleStartRow, this._ruleStartColumn ); String ruleCell = RuleSheetParserUtil.rc2name( this._ruleRow, this._ruleStartColumn ); this._currentRule = createNewRuleForRow( this._ruleRow, headCell, ruleCell ); this._ruleList.add( this._currentRule ); postInitRuleTable( row, column, value ); } /** * Called before rule table initialisation. Subclasses may * override this method to do additional processing. */ protected void preInitRuleTable(int row, int column, String value) { } protected Rule getCurrentRule() { return _currentRule; } /** * Called after rule table initialisation. Subclasses may * override this method to do additional processing. */ protected void postInitRuleTable(int row, int column, String value) { } private boolean getSequentialFlag() { final String seqFlag = getProperties().getSingleProperty( SEQUENTIAL_FLAG, "false" ); return RuleSheetParserUtil.isStringMeaningTrue( seqFlag ); } private boolean getEscapeQuotesFlag() { final String escFlag = getProperties().getSingleProperty( ESCAPE_QUOTES_FLAG, "true" ); return RuleSheetParserUtil.isStringMeaningTrue( escFlag ); } private void finishRuleTable() { if ( this._isInRuleTable ) { this._currentSequentialFlag = false; this._isInRuleTable = false; } } private void processNonRuleCell(final int row, final int column, final String value) { String testVal = value.trim().toLowerCase(); if ( testVal.startsWith( RULE_TABLE_TAG ) ) { initRuleTable( row, column, value.trim() ); } else { this._propertiesListener.newCell( row, column, value, RuleSheetListener.NON_MERGED ); } } private void processRuleCell(final int row, final int column, final String value, final int mergedColStart) { String trimVal = value.trim(); String testVal = trimVal.toLowerCase(); if ( testVal.startsWith( RULE_TABLE_TAG ) ) { finishRuleTable(); initRuleTable( row, column, trimVal ); return; } // Ignore any comments cells preceding the first rule table column if ( column < this._ruleStartColumn ) { return; } // Ignore any further cells from the rule def row if ( row == this._ruleStartRow ) { return; } switch ( row - this._ruleStartRow ) { case ACTION_ROW : ActionType.addNewActionType( this._actions, trimVal, column, row ); break; case OBJECT_TYPE_ROW : objectTypeRow( row, column, trimVal, mergedColStart ); break; case CODE_ROW : codeRow( row, column, trimVal ); break; case LABEL_ROW : labelRow( row, column, trimVal ); break; default : nextDataCell( row, column, trimVal ); break; } } /** * This is for handling a row where an object declaration may appear, * this is the row immediately above the snippets. * It may be blank, but there has to be a row here. * * Merged cells have "special meaning" which is why this is so freaking hard. * A future refactor may be to move away from an "event" based listener. */ private void objectTypeRow(final int row, final int column, final String value, final int mergedColStart) { if ( value.indexOf( "$param" ) > -1 || value.indexOf( "$1" ) > -1 ) { throw new DecisionTableParseException( "It looks like you have snippets in the row that is " + "meant for object declarations." + " Please insert an additional row before the snippets, " + "at cell " + RuleSheetParserUtil.rc2name( row, column ) ); } ActionType action = getActionForColumn( row, column ); if ( mergedColStart == RuleSheetListener.NON_MERGED ) { if ( action.getCode() == Code.CONDITION ) { SourceBuilder src = new LhsBuilder( row-1, column, value ); action.setSourceBuilder( src ); this.sourceBuilders.add( src ); } else if ( action.getCode() == Code.ACTION ) { SourceBuilder src = new RhsBuilder( Code.ACTION, row-1, column, value ); action.setSourceBuilder( src ); this.sourceBuilders.add( src ); } } else { if ( column == mergedColStart ) { if ( action.getCode() == Code.CONDITION ) { action.setSourceBuilder( new LhsBuilder( row-1, column, value ) ); this.sourceBuilders.add( action.getSourceBuilder() ); } else if ( action.getCode() == Code.ACTION ) { action.setSourceBuilder( new RhsBuilder( Code.ACTION, row-1, column, value ) ); this.sourceBuilders.add( action.getSourceBuilder() ); } } else { ActionType startOfMergeAction = getActionForColumn( row, mergedColStart ); action.setSourceBuilder( startOfMergeAction.getSourceBuilder() ); } } } private void codeRow(final int row, final int column, final String value) { final ActionType actionType = getActionForColumn( row, column ); if ( actionType.getSourceBuilder() == null ) { if ( actionType.getCode() == Code.CONDITION ) { actionType.setSourceBuilder( new LhsBuilder( row-2, column, null ) ); this.sourceBuilders.add( actionType.getSourceBuilder() ); } else if ( actionType.getCode() == Code.ACTION ) { actionType.setSourceBuilder( new RhsBuilder( Code.ACTION, row-2, column, null ) ); this.sourceBuilders.add( actionType.getSourceBuilder() ); } else if ( actionType.getCode() == Code.SALIENCE ) { actionType.setSourceBuilder( new LhsBuilder( row-2, column, null ) ); this.sourceBuilders.add( actionType.getSourceBuilder() ); } else if ( actionType.getCode() == Code.METADATA ) { actionType.setSourceBuilder( new RhsBuilder( Code.METADATA, row-2, column, null ) ); this.sourceBuilders.add( actionType.getSourceBuilder() ); } } if ( value.trim().equals( "" ) && (actionType.getCode() == Code.ACTION || actionType.getCode() == Code.CONDITION || actionType.getCode() == Code.METADATA) ) { throw new DecisionTableParseException( "Code description in cell " + RuleSheetParserUtil.rc2name( row, column ) + " does not contain any code specification. It should!" ); } actionType.addTemplate( row, column, value ); } private void labelRow(final int row, final int column, final String value) { final ActionType actionType = getActionForColumn( row, column ); if ( ! value.trim().equals( "" ) && (actionType.getCode() == Code.ACTION || actionType.getCode() == Code.CONDITION) ) { this._cellComments.put( new Integer( column ), value ); } else { this._cellComments.put( new Integer( column ), "From cell: " + RuleSheetParserUtil.rc2name( row, column ) ); } } private ActionType getActionForColumn(final int row, final int column) { final ActionType actionType = this._actions.get( new Integer( column ) ); if ( actionType == null ) { throw new DecisionTableParseException( "Code description in cell " + RuleSheetParserUtil.rc2name( row, column ) + " does not have an 'ACTION' or 'CONDITION' column header." ); } return actionType; } private void nextDataCell(final int row, final int column, final String value) { final ActionType actionType = getActionForColumn( row, column ); if ( row - this._ruleRow > 1 ) { // Encountered a row gap from the last rule. // This is not part of the ruleset. finishRuleTable(); processNonRuleCell( row, column, value ); return; } if ( row > this._ruleRow ) { // In a new row/rule String headCell = RuleSheetParserUtil.rc2name( this._ruleStartRow, this._ruleStartColumn ); String ruleCell = RuleSheetParserUtil.rc2name( row, this._ruleStartColumn ); this._currentRule = createNewRuleForRow( row, headCell, ruleCell ); this._ruleList.add( this._currentRule ); this._ruleRow++; } switch( actionType.getCode() ){ case CONDITION: case ACTION: case METADATA: if (actionType.getSourceBuilder() == null) { throw new DecisionTableParseException( "Data cell " + RuleSheetParserUtil.rc2name( row, column ) + " has an empty column header." ); } actionType.addCellValue( row, column, value, _currentEscapeQuotesFlag ); break; case SALIENCE: // Only if rule set is not sequential! if( ! this._currentSequentialFlag ){ if( value.startsWith( "(" ) && value.endsWith( ")" ) ){ this._currentRule.setSalience( value ); } else { try { this._currentRule.setSalience( new Integer( value ) ); } catch( NumberFormatException nfe ){ throw new DecisionTableParseException( "Priority is not an integer literal, in cell " + RuleSheetParserUtil.rc2name( row, column ) ); } } } break; case NAME: this._currentRule.setName( value ); break; case DESCRIPTION: this._currentRule.setDescription( value ); break; case ACTIVATIONGROUP: this._currentRule.setActivationGroup( value ); break; case AGENDAGROUP: this._currentRule.setAgendaGroup( value ); break; case RULEFLOWGROUP: this._currentRule.setRuleFlowGroup( value ); break; case NOLOOP: this._currentRule.setNoLoop( RuleSheetParserUtil.isStringMeaningTrue( value ) ); break; case LOCKONACTIVE: this._currentRule.setLockOnActive( RuleSheetParserUtil.isStringMeaningTrue( value ) ); break; case AUTOFOCUS: this._currentRule.setAutoFocus( RuleSheetParserUtil.isStringMeaningTrue( value ) ); break; case DURATION: try { this._currentRule.setDuration( new Long( value ) ); } catch( NumberFormatException nfe ){ throw new DecisionTableParseException( "Duration is not an integer literal, in cell " + RuleSheetParserUtil.rc2name( row, column ) ); } break; case TIMER: this._currentRule.setTimer( value ); break; case ENABLED: this._currentRule.setEnabled( RuleSheetParserUtil.isStringMeaningTrue( value ) ); break; case CALENDARS: this._currentRule.setCalendars( value ); break; case DATEEFFECTIVE: this._currentRule.setDateEffective( value ); break; case DATEEXPIRES: this._currentRule.setDateExpires( value ); break; } } private Rule createNewRuleForRow(final int row, final String headCell, final String ruleCell ) { Integer salience = null; if ( this._currentSequentialFlag ) { salience = new Integer( Rule.calcSalience( row ) ); } final int spreadsheetRow = row + 1; final String name = this._currentRulePrefix + "_" + spreadsheetRow; final Rule rule = new Rule( name, salience, spreadsheetRow ); rule.setComment( " rule values at " + ruleCell + ", header at " + headCell ); return rule; } private boolean isCellValueEmpty(final String value) { return value == null || "".equals( value.trim() ); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simplesystemsmanagement.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ssm-2014-11-06/DescribePatchBaselines" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribePatchBaselinesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * An array of <code>PatchBaselineIdentity</code> elements. * </p> */ private com.amazonaws.internal.SdkInternalList<PatchBaselineIdentity> baselineIdentities; /** * <p> * The token to use when requesting the next set of items. If there are no additional items to return, the string is * empty. * </p> */ private String nextToken; /** * <p> * An array of <code>PatchBaselineIdentity</code> elements. * </p> * * @return An array of <code>PatchBaselineIdentity</code> elements. */ public java.util.List<PatchBaselineIdentity> getBaselineIdentities() { if (baselineIdentities == null) { baselineIdentities = new com.amazonaws.internal.SdkInternalList<PatchBaselineIdentity>(); } return baselineIdentities; } /** * <p> * An array of <code>PatchBaselineIdentity</code> elements. * </p> * * @param baselineIdentities * An array of <code>PatchBaselineIdentity</code> elements. */ public void setBaselineIdentities(java.util.Collection<PatchBaselineIdentity> baselineIdentities) { if (baselineIdentities == null) { this.baselineIdentities = null; return; } this.baselineIdentities = new com.amazonaws.internal.SdkInternalList<PatchBaselineIdentity>(baselineIdentities); } /** * <p> * An array of <code>PatchBaselineIdentity</code> elements. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setBaselineIdentities(java.util.Collection)} or {@link #withBaselineIdentities(java.util.Collection)} if * you want to override the existing values. * </p> * * @param baselineIdentities * An array of <code>PatchBaselineIdentity</code> elements. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribePatchBaselinesResult withBaselineIdentities(PatchBaselineIdentity... baselineIdentities) { if (this.baselineIdentities == null) { setBaselineIdentities(new com.amazonaws.internal.SdkInternalList<PatchBaselineIdentity>(baselineIdentities.length)); } for (PatchBaselineIdentity ele : baselineIdentities) { this.baselineIdentities.add(ele); } return this; } /** * <p> * An array of <code>PatchBaselineIdentity</code> elements. * </p> * * @param baselineIdentities * An array of <code>PatchBaselineIdentity</code> elements. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribePatchBaselinesResult withBaselineIdentities(java.util.Collection<PatchBaselineIdentity> baselineIdentities) { setBaselineIdentities(baselineIdentities); return this; } /** * <p> * The token to use when requesting the next set of items. If there are no additional items to return, the string is * empty. * </p> * * @param nextToken * The token to use when requesting the next set of items. If there are no additional items to return, the * string is empty. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token to use when requesting the next set of items. If there are no additional items to return, the string is * empty. * </p> * * @return The token to use when requesting the next set of items. If there are no additional items to return, the * string is empty. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token to use when requesting the next set of items. If there are no additional items to return, the string is * empty. * </p> * * @param nextToken * The token to use when requesting the next set of items. If there are no additional items to return, the * string is empty. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribePatchBaselinesResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getBaselineIdentities() != null) sb.append("BaselineIdentities: ").append(getBaselineIdentities()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribePatchBaselinesResult == false) return false; DescribePatchBaselinesResult other = (DescribePatchBaselinesResult) obj; if (other.getBaselineIdentities() == null ^ this.getBaselineIdentities() == null) return false; if (other.getBaselineIdentities() != null && other.getBaselineIdentities().equals(this.getBaselineIdentities()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getBaselineIdentities() == null) ? 0 : getBaselineIdentities().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public DescribePatchBaselinesResult clone() { try { return (DescribePatchBaselinesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.project; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.containers.ContainerUtil; import gnu.trove.THashMap; import gnu.trove.THashSet; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.maven.model.*; import org.jetbrains.idea.maven.server.MavenEmbedderWrapper; import org.jetbrains.idea.maven.server.MavenServerExecutionResult; import org.jetbrains.idea.maven.server.MavenServerManager; import org.jetbrains.idea.maven.server.ProfileApplicationResult; import org.jetbrains.idea.maven.utils.MavenJDOMUtil; import org.jetbrains.idea.maven.utils.MavenLog; import org.jetbrains.idea.maven.utils.MavenProcessCanceledException; import org.jetbrains.idea.maven.utils.MavenUtil; import java.io.File; import java.io.IOException; import java.util.*; import static com.intellij.openapi.util.text.StringUtil.isEmptyOrSpaces; import static org.jetbrains.idea.maven.utils.MavenUtil.getBaseDir; public class MavenProjectReader { private static final String UNKNOWN = MavenId.UNKNOWN_VALUE; private final Map<VirtualFile, RawModelReadResult> myRawModelsCache = new THashMap<>(); private final Project myProject; private SettingsProfilesCache mySettingsProfilesCache; public MavenProjectReader(Project project) { myProject = project; } public MavenProjectReaderResult readProject(MavenGeneralSettings generalSettings, VirtualFile file, MavenExplicitProfiles explicitProfiles, MavenProjectReaderProjectLocator locator) { Pair<RawModelReadResult, MavenExplicitProfiles> readResult = doReadProjectModel(generalSettings, file, explicitProfiles, new THashSet<>(), locator); File basedir = getBaseDir(file); MavenModel model = MavenServerManager.getInstance().interpolateAndAlignModel(readResult.first.model, basedir); Map<String, String> modelMap = new HashMap<>(); modelMap.put("groupId", model.getMavenId().getGroupId()); modelMap.put("artifactId", model.getMavenId().getArtifactId()); modelMap.put("version", model.getMavenId().getVersion()); modelMap.put("build.outputDirectory", model.getBuild().getOutputDirectory()); modelMap.put("build.testOutputDirectory", model.getBuild().getTestOutputDirectory()); modelMap.put("build.finalName", model.getBuild().getFinalName()); modelMap.put("build.directory", model.getBuild().getDirectory()); return new MavenProjectReaderResult(model, modelMap, readResult.second, null, readResult.first.problems, new THashSet<>()); } private Pair<RawModelReadResult, MavenExplicitProfiles> doReadProjectModel(MavenGeneralSettings generalSettings, VirtualFile file, MavenExplicitProfiles explicitProfiles, Set<VirtualFile> recursionGuard, MavenProjectReaderProjectLocator locator) { RawModelReadResult cachedModel = myRawModelsCache.get(file); if (cachedModel == null) { cachedModel = doReadProjectModel(file, false); myRawModelsCache.put(file, cachedModel); } // todo modifying cached model and problems here?????? MavenModel model = cachedModel.model; Set<String> alwaysOnProfiles = cachedModel.alwaysOnProfiles; Collection<MavenProjectProblem> problems = cachedModel.problems; model = resolveInheritance(generalSettings, model, file, explicitProfiles, recursionGuard, locator, problems); addSettingsProfiles(generalSettings, model, alwaysOnProfiles, problems); ProfileApplicationResult applied = applyProfiles(model, getBaseDir(file), explicitProfiles, alwaysOnProfiles); model = applied.getModel(); repairModelBody(model); return Pair.create(new RawModelReadResult(model, problems, alwaysOnProfiles), applied.getActivatedProfiles()); } private RawModelReadResult doReadProjectModel(VirtualFile file, boolean headerOnly) { MavenModel result = null; Collection<MavenProjectProblem> problems = MavenProjectProblem.createProblemsList(); Set<String> alwaysOnProfiles = new THashSet<>(); String fileExtension = file.getExtension(); if (!"pom".equalsIgnoreCase(fileExtension) && !"xml".equalsIgnoreCase(fileExtension)) { File basedir = getBaseDir(file); MavenEmbeddersManager manager = MavenProjectsManager.getInstance(myProject).getEmbeddersManager(); MavenEmbedderWrapper embedder = manager.getEmbedder(MavenEmbeddersManager.FOR_MODEL_READ, basedir.getPath(), basedir.getPath()); try { result = embedder.readModel(VfsUtilCore.virtualToIoFile(file)); } catch (MavenProcessCanceledException ignore) { } finally { manager.release(embedder); } if (result == null) { result = new MavenModel(); result.setPackaging(MavenConstants.TYPE_JAR); return new RawModelReadResult(result, problems, alwaysOnProfiles); } else { return new RawModelReadResult(result, problems, alwaysOnProfiles); } } result = new MavenModel(); Element xmlProject = readXml(file, problems, MavenProjectProblem.ProblemType.SYNTAX); if (xmlProject == null || !"project".equals(xmlProject.getName())) { result.setPackaging(MavenConstants.TYPE_JAR); return new RawModelReadResult(result, problems, alwaysOnProfiles); } MavenParent parent; if (MavenJDOMUtil.hasChildByPath(xmlProject, "parent")) { parent = new MavenParent(new MavenId(MavenJDOMUtil.findChildValueByPath(xmlProject, "parent.groupId", UNKNOWN), MavenJDOMUtil.findChildValueByPath(xmlProject, "parent.artifactId", UNKNOWN), MavenJDOMUtil.findChildValueByPath(xmlProject, "parent.version", UNKNOWN)), MavenJDOMUtil.findChildValueByPath(xmlProject, "parent.relativePath", "../pom.xml")); result.setParent(parent); } else { parent = new MavenParent(new MavenId(UNKNOWN, UNKNOWN, UNKNOWN), "../pom.xml"); } result.setMavenId(new MavenId(MavenJDOMUtil.findChildValueByPath(xmlProject, "groupId", parent.getMavenId().getGroupId()), MavenJDOMUtil.findChildValueByPath(xmlProject, "artifactId", UNKNOWN), MavenJDOMUtil.findChildValueByPath(xmlProject, "version", parent.getMavenId().getVersion()))); if (headerOnly) return new RawModelReadResult(result, problems, alwaysOnProfiles); result.setPackaging(MavenJDOMUtil.findChildValueByPath(xmlProject, "packaging", MavenConstants.TYPE_JAR)); result.setName(MavenJDOMUtil.findChildValueByPath(xmlProject, "name")); readModelBody(result, result.getBuild(), xmlProject); result.setProfiles(collectProfiles(file, xmlProject, problems, alwaysOnProfiles)); return new RawModelReadResult(result, problems, alwaysOnProfiles); } private static void readModelBody(MavenModelBase mavenModelBase, MavenBuildBase mavenBuildBase, Element xmlModel) { mavenModelBase.setModules(MavenJDOMUtil.findChildrenValuesByPath(xmlModel, "modules", "module")); collectProperties(MavenJDOMUtil.findChildByPath(xmlModel, "properties"), mavenModelBase); Element xmlBuild = MavenJDOMUtil.findChildByPath(xmlModel, "build"); mavenBuildBase.setFinalName(MavenJDOMUtil.findChildValueByPath(xmlBuild, "finalName")); mavenBuildBase.setDefaultGoal(MavenJDOMUtil.findChildValueByPath(xmlBuild, "defaultGoal")); mavenBuildBase.setDirectory(MavenJDOMUtil.findChildValueByPath(xmlBuild, "directory")); mavenBuildBase.setResources(collectResources(MavenJDOMUtil.findChildrenByPath(xmlBuild, "resources", "resource"))); mavenBuildBase.setTestResources(collectResources(MavenJDOMUtil.findChildrenByPath(xmlBuild, "testResources", "testResource"))); mavenBuildBase.setFilters(MavenJDOMUtil.findChildrenValuesByPath(xmlBuild, "filters", "filter")); if (mavenBuildBase instanceof MavenBuild) { MavenBuild mavenBuild = (MavenBuild)mavenBuildBase; String source = MavenJDOMUtil.findChildValueByPath(xmlBuild, "sourceDirectory"); if (!isEmptyOrSpaces(source)) mavenBuild.addSource(source); String testSource = MavenJDOMUtil.findChildValueByPath(xmlBuild, "testSourceDirectory"); if (!isEmptyOrSpaces(testSource)) mavenBuild.addTestSource(testSource); mavenBuild.setOutputDirectory(MavenJDOMUtil.findChildValueByPath(xmlBuild, "outputDirectory")); mavenBuild.setTestOutputDirectory(MavenJDOMUtil.findChildValueByPath(xmlBuild, "testOutputDirectory")); } } private static List<MavenResource> collectResources(List<Element> xmlResources) { List<MavenResource> result = new ArrayList<>(); for (Element each : xmlResources) { result.add(new MavenResource(MavenJDOMUtil.findChildValueByPath(each, "directory"), "true".equals(MavenJDOMUtil.findChildValueByPath(each, "filtering")), MavenJDOMUtil.findChildValueByPath(each, "targetPath"), MavenJDOMUtil.findChildrenValuesByPath(each, "includes", "include"), MavenJDOMUtil.findChildrenValuesByPath(each, "excludes", "exclude"))); } return result; } private void repairModelBody(MavenModel model) { MavenBuild build = model.getBuild(); if (isEmptyOrSpaces(build.getFinalName())) { build.setFinalName("${project.artifactId}-${project.version}"); } if (build.getSources().isEmpty()) build.addSource("src/main/java"); if (build.getTestSources().isEmpty()) build.addTestSource("src/test/java"); build.setResources(repairResources(build.getResources(), "src/main/resources")); build.setTestResources(repairResources(build.getTestResources(), "src/test/resources")); build.setDirectory(isEmptyOrSpaces(build.getDirectory()) ? "target" : build.getDirectory()); build.setOutputDirectory(isEmptyOrSpaces(build.getOutputDirectory()) ? "${project.build.directory}/classes" : build.getOutputDirectory()); build.setTestOutputDirectory(isEmptyOrSpaces(build.getTestOutputDirectory()) ? "${project.build.directory}/test-classes" : build.getTestOutputDirectory()); } private List<MavenResource> repairResources(List<MavenResource> resources, String defaultDir) { List<MavenResource> result = new ArrayList<>(); if (resources.isEmpty()) { result.add(createResource(defaultDir)); return result; } for (MavenResource each : resources) { if (isEmptyOrSpaces(each.getDirectory())) continue; result.add(each); } return result; } private MavenResource createResource(String directory) { return new MavenResource(directory, false, null, Collections.emptyList(), Collections.emptyList()); } private List<MavenProfile> collectProfiles(VirtualFile projectFile, Element xmlProject, Collection<MavenProjectProblem> problems, Set<String> alwaysOnProfiles) { List<MavenProfile> result = new ArrayList<>(); collectProfiles(MavenJDOMUtil.findChildrenByPath(xmlProject, "profiles", "profile"), result, MavenConstants.PROFILE_FROM_POM); VirtualFile profilesFile = MavenUtil.findProfilesXmlFile(projectFile); if (profilesFile != null) { collectProfilesFromSettingsXmlOrProfilesXml(profilesFile, "profilesXml", true, MavenConstants.PROFILE_FROM_PROFILES_XML, result, alwaysOnProfiles, problems); } return result; } private void addSettingsProfiles(MavenGeneralSettings generalSettings, MavenModel model, Set<String> alwaysOnProfiles, Collection<MavenProjectProblem> problems) { if (mySettingsProfilesCache == null) { List<MavenProfile> settingsProfiles = new ArrayList<>(); Collection<MavenProjectProblem> settingsProblems = MavenProjectProblem.createProblemsList(); Set<String> settingsAlwaysOnProfiles = new THashSet<>(); for (VirtualFile each : generalSettings.getEffectiveSettingsFiles()) { collectProfilesFromSettingsXmlOrProfilesXml(each, "settings", false, MavenConstants.PROFILE_FROM_SETTINGS_XML, settingsProfiles, settingsAlwaysOnProfiles, settingsProblems); } mySettingsProfilesCache = new SettingsProfilesCache(settingsProfiles, settingsAlwaysOnProfiles, settingsProblems); } List<MavenProfile> modelProfiles = new ArrayList<>(model.getProfiles()); for (MavenProfile each : mySettingsProfilesCache.profiles) { addProfileIfDoesNotExist(each, modelProfiles); } model.setProfiles(modelProfiles); problems.addAll(mySettingsProfilesCache.problems); alwaysOnProfiles.addAll(mySettingsProfilesCache.alwaysOnProfiles); } private void collectProfilesFromSettingsXmlOrProfilesXml(VirtualFile profilesFile, String rootElementName, boolean wrapRootIfNecessary, String profilesSource, List<MavenProfile> result, Set<String> alwaysOnProfiles, Collection<MavenProjectProblem> problems) { Element rootElement = readXml(profilesFile, problems, MavenProjectProblem.ProblemType.SETTINGS_OR_PROFILES); if (rootElement == null) return; if (wrapRootIfNecessary && !rootElementName.equals(rootElement.getName())) { Element wrapper = new Element(rootElementName); wrapper.addContent(rootElement); rootElement = wrapper; } List<Element> xmlProfiles = MavenJDOMUtil.findChildrenByPath(rootElement, "profiles", "profile"); collectProfiles(xmlProfiles, result, profilesSource); alwaysOnProfiles.addAll(MavenJDOMUtil.findChildrenValuesByPath(rootElement, "activeProfiles", "activeProfile")); } private void collectProfiles(List<Element> xmlProfiles, List<MavenProfile> result, String source) { for (Element each : xmlProfiles) { String id = MavenJDOMUtil.findChildValueByPath(each, "id"); if (isEmptyOrSpaces(id)) continue; MavenProfile profile = new MavenProfile(id, source); if (!addProfileIfDoesNotExist(profile, result)) continue; Element xmlActivation = MavenJDOMUtil.findChildByPath(each, "activation"); if (xmlActivation != null) { MavenProfileActivation activation = new MavenProfileActivation(); activation.setActiveByDefault("true".equals(MavenJDOMUtil.findChildValueByPath(xmlActivation, "activeByDefault"))); Element xmlOS = MavenJDOMUtil.findChildByPath(xmlActivation, "os"); if (xmlOS != null) { activation.setOs(new MavenProfileActivationOS( MavenJDOMUtil.findChildValueByPath(xmlOS, "name"), MavenJDOMUtil.findChildValueByPath(xmlOS, "family"), MavenJDOMUtil.findChildValueByPath(xmlOS, "arch"), MavenJDOMUtil.findChildValueByPath(xmlOS, "version"))); } activation.setJdk(MavenJDOMUtil.findChildValueByPath(xmlActivation, "jdk")); Element xmlProperty = MavenJDOMUtil.findChildByPath(xmlActivation, "property"); if (xmlProperty != null) { activation.setProperty(new MavenProfileActivationProperty( MavenJDOMUtil.findChildValueByPath(xmlProperty, "name"), MavenJDOMUtil.findChildValueByPath(xmlProperty, "value"))); } Element xmlFile = MavenJDOMUtil.findChildByPath(xmlActivation, "file"); if (xmlFile != null) { activation.setFile(new MavenProfileActivationFile( MavenJDOMUtil.findChildValueByPath(xmlFile, "exists"), MavenJDOMUtil.findChildValueByPath(xmlFile, "missing"))); } profile.setActivation(activation); } readModelBody(profile, profile.getBuild(), each); } } private boolean addProfileIfDoesNotExist(MavenProfile profile, List<MavenProfile> result) { for (MavenProfile each : result) { if (Comparing.equal(each.getId(), profile.getId())) return false; } result.add(profile); return true; } private static void collectProperties(Element xmlProperties, MavenModelBase mavenModelBase) { if (xmlProperties == null) return; Properties props = mavenModelBase.getProperties(); for (Element each : xmlProperties.getChildren()) { String name = each.getName(); String value = each.getTextTrim(); if (!props.containsKey(name) && !isEmptyOrSpaces(name)) { props.setProperty(name, value); } } } private static ProfileApplicationResult applyProfiles(MavenModel model, File basedir, MavenExplicitProfiles explicitProfiles, Collection<String> alwaysOnProfiles) { return MavenServerManager.getInstance().applyProfiles(model, basedir, explicitProfiles, alwaysOnProfiles); } private MavenModel resolveInheritance(final MavenGeneralSettings generalSettings, MavenModel model, final VirtualFile file, final MavenExplicitProfiles explicitProfiles, final Set<VirtualFile> recursionGuard, final MavenProjectReaderProjectLocator locator, Collection<MavenProjectProblem> problems) { if (recursionGuard.contains(file)) { problems.add(MavenProjectProblem.createProblem(file.getPath(), ProjectBundle.message("maven.project.problem.recursiveInheritance"), MavenProjectProblem.ProblemType.PARENT)); return model; } recursionGuard.add(file); try { final MavenParentDesc[] parentDesc = new MavenParentDesc[1]; MavenParent parent = model.getParent(); if (parent != null) { if (model.getMavenId().equals(parent.getMavenId())) { problems.add(MavenProjectProblem.createProblem(file.getPath(), ProjectBundle.message("maven.project.problem.selfInheritance"), MavenProjectProblem.ProblemType.PARENT)); return model; } parentDesc[0] = new MavenParentDesc(parent.getMavenId(), parent.getRelativePath()); } Pair<VirtualFile, RawModelReadResult> parentModelWithProblems = new MavenParentProjectFileProcessor<Pair<VirtualFile, RawModelReadResult>>() { @Nullable protected VirtualFile findManagedFile(@NotNull MavenId id) { return locator.findProjectFile(id); } @Override @Nullable protected Pair<VirtualFile, RawModelReadResult> processRelativeParent(VirtualFile parentFile) { MavenModel parentModel = doReadProjectModel(parentFile, true).model; MavenId parentId = parentDesc[0].getParentId(); if (!parentId.equals(parentModel.getMavenId())) return null; return super.processRelativeParent(parentFile); } @Override protected Pair<VirtualFile, RawModelReadResult> processSuperParent(VirtualFile parentFile) { return null; // do not process superPom } @Override protected Pair<VirtualFile, RawModelReadResult> doProcessParent(VirtualFile parentFile) { RawModelReadResult result = doReadProjectModel(generalSettings, parentFile, explicitProfiles, recursionGuard, locator).first; return Pair.create(parentFile, result); } }.process(generalSettings, file, parentDesc[0]); if (parentModelWithProblems == null) return model; // no parent or parent not found; MavenModel parentModel = parentModelWithProblems.second.model; if (!parentModelWithProblems.second.problems.isEmpty()) { problems.add(MavenProjectProblem.createProblem(parentModelWithProblems.first.getPath(), ProjectBundle.message("maven.project.problem.parentHasProblems", parentModel.getMavenId()), MavenProjectProblem.ProblemType.PARENT)); } model = MavenServerManager.getInstance().assembleInheritance(model, parentModel); // todo: it is a quick-hack here - we add inherited dummy profiles to correctly collect activated profiles in 'applyProfiles'. List<MavenProfile> profiles = model.getProfiles(); for (MavenProfile each : parentModel.getProfiles()) { MavenProfile copyProfile = new MavenProfile(each.getId(), each.getSource()); if (each.getActivation() != null) { copyProfile.setActivation(each.getActivation().clone()); } addProfileIfDoesNotExist(copyProfile, profiles); } return model; } finally { recursionGuard.remove(file); } } public Collection<MavenProjectReaderResult> resolveProject(final MavenGeneralSettings generalSettings, MavenEmbedderWrapper embedder, Collection<VirtualFile> files, final MavenExplicitProfiles explicitProfiles, final MavenProjectReaderProjectLocator locator) throws MavenProcessCanceledException { try { Collection<MavenServerExecutionResult> executionResults = embedder.resolveProject(files, explicitProfiles.getEnabledProfiles(), explicitProfiles.getDisabledProfiles()); Collection<MavenProjectReaderResult> readerResults = ContainerUtil.newArrayList(); for (MavenServerExecutionResult result : executionResults) { MavenServerExecutionResult.ProjectData projectData = result.projectData; if (projectData == null) { if(files.size() == 1) { final VirtualFile file = files.iterator().next(); MavenProjectReaderResult temp = readProject(generalSettings, file, explicitProfiles, locator); temp.readingProblems.addAll(result.problems); temp.unresolvedArtifactIds.addAll(result.unresolvedArtifacts); readerResults.add(temp); } } else { readerResults.add(new MavenProjectReaderResult(projectData.mavenModel, projectData.mavenModelMap, new MavenExplicitProfiles(projectData.activatedProfiles, explicitProfiles.getDisabledProfiles()), projectData.nativeMavenProject, result.problems, result.unresolvedArtifacts)); } } return readerResults; } catch (MavenProcessCanceledException e) { throw e; } catch (final Throwable e) { MavenLog.LOG.info(e); MavenLog.printInTests(e); // print exception since we need to know if something wrong with our logic return ContainerUtil.mapNotNull(files, file -> { MavenProjectReaderResult result = readProject(generalSettings, file, explicitProfiles, locator); String message = e.getMessage(); if (message != null) { result.readingProblems.add(MavenProjectProblem.createStructureProblem(file.getPath(), message)); } else { result.readingProblems.add(MavenProjectProblem.createSyntaxProblem(file.getPath(), MavenProjectProblem.ProblemType.SYNTAX)); } return result; }); } } @Nullable public static MavenProjectReaderResult generateSources(MavenEmbedderWrapper embedder, MavenImportingSettings importingSettings, VirtualFile file, MavenExplicitProfiles profiles, MavenConsole console) throws MavenProcessCanceledException { try { List<String> goals = Collections.singletonList(importingSettings.getUpdateFoldersOnImportPhase()); MavenServerExecutionResult result = embedder.execute(file, profiles.getEnabledProfiles(), profiles.getDisabledProfiles(), goals); MavenServerExecutionResult.ProjectData projectData = result.projectData; if (projectData == null) return null; return new MavenProjectReaderResult(projectData.mavenModel, projectData.mavenModelMap, new MavenExplicitProfiles(projectData.activatedProfiles, profiles.getDisabledProfiles()), projectData.nativeMavenProject, result.problems, result.unresolvedArtifacts); } catch (Throwable e) { console.printException(e); MavenLog.LOG.warn(e); return null; } } private static Element readXml(final VirtualFile file, final Collection<MavenProjectProblem> problems, final MavenProjectProblem.ProblemType type) { return MavenJDOMUtil.read(file, new MavenJDOMUtil.ErrorHandler() { public void onReadError(IOException e) { MavenLog.LOG.warn("Cannot read the pom file: " + e); problems.add(MavenProjectProblem.createProblem(file.getPath(), e.getMessage(), type)); } public void onSyntaxError() { problems.add(MavenProjectProblem.createSyntaxProblem(file.getPath(), type)); } }); } private static class SettingsProfilesCache { final List<MavenProfile> profiles; final Set<String> alwaysOnProfiles; final Collection<MavenProjectProblem> problems; private SettingsProfilesCache(List<MavenProfile> profiles, Set<String> alwaysOnProfiles, Collection<MavenProjectProblem> problems) { this.profiles = profiles; this.alwaysOnProfiles = alwaysOnProfiles; this.problems = problems; } } private static class RawModelReadResult { public MavenModel model; public Collection<MavenProjectProblem> problems; public Set<String> alwaysOnProfiles; private RawModelReadResult(MavenModel model, Collection<MavenProjectProblem> problems, Set<String> alwaysOnProfiles) { this.model = model; this.problems = problems; this.alwaysOnProfiles = alwaysOnProfiles; } } }
/* * Copyright (C) 2007 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.globaltime; import java.io.ByteArrayInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import java.util.Locale; import java.util.TimeZone; import android.view.Menu; import android.view.MenuItem; import android.view.inputmethod.InputMethodManager; import javax.microedition.khronos.egl.*; import javax.microedition.khronos.opengles.*; import android.app.Activity; import android.content.Context; import android.content.res.AssetManager; import android.graphics.Canvas; import android.opengl.Object3D; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.os.MessageQueue; import android.util.Log; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.animation.AccelerateDecelerateInterpolator; import android.view.animation.DecelerateInterpolator; import android.view.animation.Interpolator; /** * The main View of the GlobalTime Activity. */ class GTView extends SurfaceView implements SurfaceHolder.Callback { /** * A TimeZone object used to compute the current UTC time. */ private static final TimeZone UTC_TIME_ZONE = TimeZone.getTimeZone("utc"); /** * The Sun's color is close to that of a 5780K blackbody. */ private static final float[] SUNLIGHT_COLOR = { 1.0f, 0.9375f, 0.91015625f, 1.0f }; /** * The inclination of the earth relative to the plane of the ecliptic * is 23.45 degrees. */ private static final float EARTH_INCLINATION = 23.45f * Shape.PI / 180.0f; /** Seconds in a day */ private static final int SECONDS_PER_DAY = 24 * 60 * 60; /** Flag for the depth test */ private static final boolean PERFORM_DEPTH_TEST= false; /** Use raw time zone offsets, disregarding "summer time." If false, * current offsets will be used, which requires a much longer startup time * in order to sort the city database. */ private static final boolean USE_RAW_OFFSETS = true; /** * The earth's atmosphere. */ private static final Annulus ATMOSPHERE = new Annulus(0.0f, 0.0f, 1.75f, 0.9f, 1.08f, 0.4f, 0.4f, 0.8f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 50); /** * The tesselation of the earth by latitude. */ private static final int SPHERE_LATITUDES = 25; /** * The tesselation of the earth by longitude. */ private static int SPHERE_LONGITUDES = 25; /** * A flattened version of the earth. The normals are computed identically * to those of the round earth, allowing the day/night lighting to be * applied to the flattened surface. */ private static Sphere worldFlat = new LatLongSphere(0.0f, 0.0f, 0.0f, 1.0f, SPHERE_LATITUDES, SPHERE_LONGITUDES, 0.0f, 360.0f, true, true, false, true); /** * The earth. */ private Object3D mWorld; /** * Geometry of the city lights */ private PointCloud mLights; /** * True if the activiy has been initialized. */ boolean mInitialized = false; /** * True if we're in alphabetic entry mode. */ private boolean mAlphaKeySet = false; private EGLContext mEGLContext; private EGLSurface mEGLSurface; private EGLDisplay mEGLDisplay; private EGLConfig mEGLConfig; GLView mGLView; // Rotation and tilt of the Earth private float mRotAngle = 0.0f; private float mTiltAngle = 0.0f; // Rotational velocity of the orbiting viewer private float mRotVelocity = 1.0f; // Rotation of the flat view private float mWrapX = 0.0f; private float mWrapVelocity = 0.0f; private float mWrapVelocityFactor = 0.01f; // Toggle switches private boolean mDisplayAtmosphere = true; private boolean mDisplayClock = false; private boolean mClockShowing = false; private boolean mDisplayLights = false; private boolean mDisplayWorld = true; private boolean mDisplayWorldFlat = false; private boolean mSmoothShading = true; // City search string private String mCityName = ""; // List of all cities private List<City> mClockCities; // List of cities matching a user-supplied prefix private List<City> mCityNameMatches = new ArrayList<City>(); private List<City> mCities; // Start time for clock fade animation private long mClockFadeTime; // Interpolator for clock fade animation private Interpolator mClockSizeInterpolator = new DecelerateInterpolator(1.0f); // Index of current clock private int mCityIndex; // Current clock private Clock mClock; // City-to-city flight animation parameters private boolean mFlyToCity = false; private long mCityFlyStartTime; private float mCityFlightTime; private float mRotAngleStart, mRotAngleDest; private float mTiltAngleStart, mTiltAngleDest; // Interpolator for flight motion animation private Interpolator mFlyToCityInterpolator = new AccelerateDecelerateInterpolator(); private static int sNumLights; private static int[] sLightCoords; // static Map<Float,int[]> cityCoords = new HashMap<Float,int[]>(); // Arrays for GL calls private float[] mClipPlaneEquation = new float[4]; private float[] mLightDir = new float[4]; // Calendar for computing the Sun's position Calendar mSunCal = Calendar.getInstance(UTC_TIME_ZONE); // Triangles drawn per frame private int mNumTriangles; private long startTime; private static final int MOTION_NONE = 0; private static final int MOTION_X = 1; private static final int MOTION_Y = 2; private static final int MIN_MANHATTAN_DISTANCE = 20; private static final float ROTATION_FACTOR = 1.0f / 30.0f; private static final float TILT_FACTOR = 0.35f; // Touchscreen support private float mMotionStartX; private float mMotionStartY; private float mMotionStartRotVelocity; private float mMotionStartTiltAngle; private int mMotionDirection; public void surfaceCreated(SurfaceHolder holder) { EGL10 egl = (EGL10)EGLContext.getEGL(); mEGLSurface = egl.eglCreateWindowSurface(mEGLDisplay, mEGLConfig, this, null); egl.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext); } public void surfaceDestroyed(SurfaceHolder holder) { // nothing to do } public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { // nothing to do } /** * Set up the view. * * @param context the Context * @param am an AssetManager to retrieve the city database from */ public GTView(Context context) { super(context); getHolder().addCallback(this); getHolder().setType(SurfaceHolder.SURFACE_TYPE_GPU); AssetManager am = context.getAssets(); startTime = System.currentTimeMillis(); EGL10 egl = (EGL10)EGLContext.getEGL(); EGLDisplay dpy = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); int[] version = new int[2]; egl.eglInitialize(dpy, version); int[] configSpec = { EGL10.EGL_DEPTH_SIZE, 16, EGL10.EGL_NONE }; EGLConfig[] configs = new EGLConfig[1]; int[] num_config = new int[1]; egl.eglChooseConfig(dpy, configSpec, configs, 1, num_config); mEGLConfig = configs[0]; mEGLContext = egl.eglCreateContext(dpy, mEGLConfig, EGL10.EGL_NO_CONTEXT, null); mEGLDisplay = dpy; mClock = new Clock(); setFocusable(true); setFocusableInTouchMode(true); requestFocus(); try { loadAssets(am); } catch (IOException ioe) { ioe.printStackTrace(); throw new RuntimeException(ioe); } catch (ArrayIndexOutOfBoundsException aioobe) { aioobe.printStackTrace(); throw new RuntimeException(aioobe); } } /** * Destroy the view. */ public void destroy() { EGL10 egl = (EGL10)EGLContext.getEGL(); egl.eglMakeCurrent(mEGLDisplay, egl.EGL_NO_SURFACE, egl.EGL_NO_SURFACE, egl.EGL_NO_CONTEXT); egl.eglDestroyContext(mEGLDisplay, mEGLContext); egl.eglDestroySurface(mEGLDisplay, mEGLSurface); egl.eglTerminate(mEGLDisplay); mEGLContext = null; } /** * Begin animation. */ public void startAnimating() { mHandler.sendEmptyMessage(INVALIDATE); } /** * Quit animation. */ public void stopAnimating() { mHandler.removeMessages(INVALIDATE); } /** * Read a two-byte integer from the input stream. */ private int readInt16(InputStream is) throws IOException { int lo = is.read(); int hi = is.read(); return (hi << 8) | lo; } /** * Returns the offset from UTC for the given city. If USE_RAW_OFFSETS * is true, summer/daylight savings is ignored. */ private static float getOffset(City c) { return USE_RAW_OFFSETS ? c.getRawOffset() : c.getOffset(); } private InputStream cache(InputStream is) throws IOException { int nbytes = is.available(); byte[] data = new byte[nbytes]; int nread = 0; while (nread < nbytes) { nread += is.read(data, nread, nbytes - nread); } return new ByteArrayInputStream(data); } /** * Load the city and lights databases. * * @param am the AssetManager to load from. */ private void loadAssets(final AssetManager am) throws IOException { Locale locale = Locale.getDefault(); String language = locale.getLanguage(); String country = locale.getCountry(); InputStream cis = null; try { // Look for (e.g.) cities_fr_FR.dat or cities_fr_CA.dat cis = am.open("cities_" + language + "_" + country + ".dat"); } catch (FileNotFoundException e1) { try { // Look for (e.g.) cities_fr.dat or cities_fr.dat cis = am.open("cities_" + language + ".dat"); } catch (FileNotFoundException e2) { try { // Use English city names by default cis = am.open("cities_en.dat"); } catch (FileNotFoundException e3) { throw e3; } } } cis = cache(cis); City.loadCities(cis); City[] cities; if (USE_RAW_OFFSETS) { cities = City.getCitiesByRawOffset(); } else { cities = City.getCitiesByOffset(); } mClockCities = new ArrayList<City>(cities.length); for (int i = 0; i < cities.length; i++) { mClockCities.add(cities[i]); } mCities = mClockCities; mCityIndex = 0; this.mWorld = new Object3D() { @Override public InputStream readFile(String filename) throws IOException { return cache(am.open(filename)); } }; mWorld.load("world.gles"); // lights.dat has the following format. All integers // are 16 bits, low byte first. // // width // height // N [# of lights] // light 0 X [in the range 0 to (width - 1)] // light 0 Y ]in the range 0 to (height - 1)] // light 1 X [in the range 0 to (width - 1)] // light 1 Y ]in the range 0 to (height - 1)] // ... // light (N - 1) X [in the range 0 to (width - 1)] // light (N - 1) Y ]in the range 0 to (height - 1)] // // For a larger number of lights, it could make more // sense to store the light positions in a bitmap // and extract them manually InputStream lis = am.open("lights.dat"); lis = cache(lis); int lightWidth = readInt16(lis); int lightHeight = readInt16(lis); sNumLights = readInt16(lis); sLightCoords = new int[3 * sNumLights]; int lidx = 0; float lightRadius = 1.009f; float lightScale = 65536.0f * lightRadius; float[] cosTheta = new float[lightWidth]; float[] sinTheta = new float[lightWidth]; float twoPi = (float) (2.0 * Math.PI); float scaleW = twoPi / lightWidth; for (int i = 0; i < lightWidth; i++) { float theta = twoPi - i * scaleW; cosTheta[i] = (float)Math.cos(theta); sinTheta[i] = (float)Math.sin(theta); } float[] cosPhi = new float[lightHeight]; float[] sinPhi = new float[lightHeight]; float scaleH = (float) (Math.PI / lightHeight); for (int j = 0; j < lightHeight; j++) { float phi = j * scaleH; cosPhi[j] = (float)Math.cos(phi); sinPhi[j] = (float)Math.sin(phi); } int nbytes = 4 * sNumLights; byte[] ilights = new byte[nbytes]; int nread = 0; while (nread < nbytes) { nread += lis.read(ilights, nread, nbytes - nread); } int idx = 0; for (int i = 0; i < sNumLights; i++) { int lx = (((ilights[idx + 1] & 0xff) << 8) | (ilights[idx ] & 0xff)); int ly = (((ilights[idx + 3] & 0xff) << 8) | (ilights[idx + 2] & 0xff)); idx += 4; float sin = sinPhi[ly]; float x = cosTheta[lx]*sin; float y = cosPhi[ly]; float z = sinTheta[lx]*sin; sLightCoords[lidx++] = (int) (x * lightScale); sLightCoords[lidx++] = (int) (y * lightScale); sLightCoords[lidx++] = (int) (z * lightScale); } mLights = new PointCloud(sLightCoords); } /** * Returns true if two time zone offsets are equal. We assume distinct * time zone offsets will differ by at least a few minutes. */ private boolean tzEqual(float o1, float o2) { return Math.abs(o1 - o2) < 0.001; } /** * Move to a different time zone. * * @param incr The increment between the current and future time zones. */ private void shiftTimeZone(int incr) { // If only 1 city in the current set, there's nowhere to go if (mCities.size() <= 1) { return; } float offset = getOffset(mCities.get(mCityIndex)); do { mCityIndex = (mCityIndex + mCities.size() + incr) % mCities.size(); } while (tzEqual(getOffset(mCities.get(mCityIndex)), offset)); offset = getOffset(mCities.get(mCityIndex)); locateCity(true, offset); goToCity(); } /** * Returns true if there is another city within the current time zone * that is the given increment away from the current city. * * @param incr the increment, +1 or -1 * @return */ private boolean atEndOfTimeZone(int incr) { if (mCities.size() <= 1) { return true; } float offset = getOffset(mCities.get(mCityIndex)); int nindex = (mCityIndex + mCities.size() + incr) % mCities.size(); if (tzEqual(getOffset(mCities.get(nindex)), offset)) { return false; } return true; } /** * Shifts cities within the current time zone. * * @param incr the increment, +1 or -1 */ private void shiftWithinTimeZone(int incr) { float offset = getOffset(mCities.get(mCityIndex)); int nindex = (mCityIndex + mCities.size() + incr) % mCities.size(); if (tzEqual(getOffset(mCities.get(nindex)), offset)) { mCityIndex = nindex; goToCity(); } } /** * Returns true if the city name matches the given prefix, ignoring spaces. */ private boolean nameMatches(City city, String prefix) { String cityName = city.getName().replaceAll("[ ]", ""); return prefix.regionMatches(true, 0, cityName, 0, prefix.length()); } /** * Returns true if there are cities matching the given name prefix. */ private boolean hasMatches(String prefix) { for (int i = 0; i < mClockCities.size(); i++) { City city = mClockCities.get(i); if (nameMatches(city, prefix)) { return true; } } return false; } /** * Shifts to the nearest city that matches the new prefix. */ private void shiftByName() { // Attempt to keep current city if it matches City finalCity = null; City currCity = mCities.get(mCityIndex); if (nameMatches(currCity, mCityName)) { finalCity = currCity; } mCityNameMatches.clear(); for (int i = 0; i < mClockCities.size(); i++) { City city = mClockCities.get(i); if (nameMatches(city, mCityName)) { mCityNameMatches.add(city); } } mCities = mCityNameMatches; if (finalCity != null) { for (int i = 0; i < mCityNameMatches.size(); i++) { if (mCityNameMatches.get(i) == finalCity) { mCityIndex = i; break; } } } else { // Find the closest matching city locateCity(false, 0.0f); } goToCity(); } /** * Increases or decreases the rotational speed of the earth. */ private void incrementRotationalVelocity(float incr) { if (mDisplayWorldFlat) { mWrapVelocity -= incr; } else { mRotVelocity -= incr; } } /** * Clears the current matching prefix, while keeping the focus on * the current city. */ private void clearCityMatches() { // Determine the global city index that matches the current city if (mCityNameMatches.size() > 0) { City city = mCityNameMatches.get(mCityIndex); for (int i = 0; i < mClockCities.size(); i++) { City ncity = mClockCities.get(i); if (city.equals(ncity)) { mCityIndex = i; break; } } } mCityName = ""; mCityNameMatches.clear(); mCities = mClockCities; goToCity(); } /** * Fade the clock in or out. */ private void enableClock(boolean enabled) { mClockFadeTime = System.currentTimeMillis(); mDisplayClock = enabled; mClockShowing = true; mAlphaKeySet = enabled; if (enabled) { // Find the closest matching city locateCity(false, 0.0f); } clearCityMatches(); } /** * Use the touchscreen to alter the rotational velocity or the * tilt of the earth. */ @Override public boolean onTouchEvent(MotionEvent event) { switch (event.getAction()) { case MotionEvent.ACTION_DOWN: mMotionStartX = event.getX(); mMotionStartY = event.getY(); mMotionStartRotVelocity = mDisplayWorldFlat ? mWrapVelocity : mRotVelocity; mMotionStartTiltAngle = mTiltAngle; // Stop the rotation if (mDisplayWorldFlat) { mWrapVelocity = 0.0f; } else { mRotVelocity = 0.0f; } mMotionDirection = MOTION_NONE; break; case MotionEvent.ACTION_MOVE: // Disregard motion events when the clock is displayed float dx = event.getX() - mMotionStartX; float dy = event.getY() - mMotionStartY; float delx = Math.abs(dx); float dely = Math.abs(dy); // Determine the direction of motion (major axis) // Once if has been determined, it's locked in until // we receive ACTION_UP or ACTION_CANCEL if ((mMotionDirection == MOTION_NONE) && (delx + dely > MIN_MANHATTAN_DISTANCE)) { if (delx > dely) { mMotionDirection = MOTION_X; } else { mMotionDirection = MOTION_Y; } } // If the clock is displayed, don't actually rotate or tilt; // just use mMotionDirection to record whether motion occurred if (!mDisplayClock) { if (mMotionDirection == MOTION_X) { if (mDisplayWorldFlat) { mWrapVelocity = mMotionStartRotVelocity + dx * ROTATION_FACTOR; } else { mRotVelocity = mMotionStartRotVelocity + dx * ROTATION_FACTOR; } mClock.setCity(null); } else if (mMotionDirection == MOTION_Y && !mDisplayWorldFlat) { mTiltAngle = mMotionStartTiltAngle + dy * TILT_FACTOR; if (mTiltAngle < -90.0f) { mTiltAngle = -90.0f; } if (mTiltAngle > 90.0f) { mTiltAngle = 90.0f; } mClock.setCity(null); } } break; case MotionEvent.ACTION_UP: mMotionDirection = MOTION_NONE; break; case MotionEvent.ACTION_CANCEL: mTiltAngle = mMotionStartTiltAngle; if (mDisplayWorldFlat) { mWrapVelocity = mMotionStartRotVelocity; } else { mRotVelocity = mMotionStartRotVelocity; } mMotionDirection = MOTION_NONE; break; } return true; } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (mInitialized && mGLView.processKey(keyCode)) { boolean drawing = (mClockShowing || mGLView.hasMessages()); this.setWillNotDraw(!drawing); return true; } boolean handled = false; // If we're not in alphabetical entry mode, convert letters // to their digit equivalents if (!mAlphaKeySet) { char numChar = event.getNumber(); if (numChar >= '0' && numChar <= '9') { keyCode = KeyEvent.KEYCODE_0 + (numChar - '0'); } } switch (keyCode) { // The 'space' key toggles the clock case KeyEvent.KEYCODE_SPACE: mAlphaKeySet = !mAlphaKeySet; enableClock(mAlphaKeySet); handled = true; break; // The 'left' and 'right' buttons shift time zones if the clock is // displayed, otherwise they alters the rotational speed of the earthh case KeyEvent.KEYCODE_DPAD_LEFT: if (mDisplayClock) { shiftTimeZone(-1); } else { mClock.setCity(null); incrementRotationalVelocity(1.0f); } handled = true; break; case KeyEvent.KEYCODE_DPAD_RIGHT: if (mDisplayClock) { shiftTimeZone(1); } else { mClock.setCity(null); incrementRotationalVelocity(-1.0f); } handled = true; break; // The 'up' and 'down' buttons shift cities within a time zone if the // clock is displayed, otherwise they tilt the earth case KeyEvent.KEYCODE_DPAD_UP: if (mDisplayClock) { shiftWithinTimeZone(-1); } else { mClock.setCity(null); if (!mDisplayWorldFlat) { mTiltAngle += 360.0f / 48.0f; } } handled = true; break; case KeyEvent.KEYCODE_DPAD_DOWN: if (mDisplayClock) { shiftWithinTimeZone(1); } else { mClock.setCity(null); if (!mDisplayWorldFlat) { mTiltAngle -= 360.0f / 48.0f; } } handled = true; break; // The center key stops the earth's rotation, then toggles between the // round and flat views of the earth case KeyEvent.KEYCODE_DPAD_CENTER: if ((!mDisplayWorldFlat && mRotVelocity == 0.0f) || (mDisplayWorldFlat && mWrapVelocity == 0.0f)) { mDisplayWorldFlat = !mDisplayWorldFlat; } else { if (mDisplayWorldFlat) { mWrapVelocity = 0.0f; } else { mRotVelocity = 0.0f; } } handled = true; break; // The 'L' key toggles the city lights case KeyEvent.KEYCODE_L: if (!mAlphaKeySet && !mDisplayWorldFlat) { mDisplayLights = !mDisplayLights; handled = true; } break; // The 'W' key toggles the earth (just for fun) case KeyEvent.KEYCODE_W: if (!mAlphaKeySet && !mDisplayWorldFlat) { mDisplayWorld = !mDisplayWorld; handled = true; } break; // The 'A' key toggles the atmosphere case KeyEvent.KEYCODE_A: if (!mAlphaKeySet && !mDisplayWorldFlat) { mDisplayAtmosphere = !mDisplayAtmosphere; handled = true; } break; // The '2' key zooms out case KeyEvent.KEYCODE_2: if (!mAlphaKeySet && !mDisplayWorldFlat) { mGLView.zoom(-2); handled = true; } break; // The '8' key zooms in case KeyEvent.KEYCODE_8: if (!mAlphaKeySet && !mDisplayWorldFlat) { mGLView.zoom(2); handled = true; } break; } // Handle letters in city names if (!handled && mAlphaKeySet) { switch (keyCode) { // Add a letter to the city name prefix case KeyEvent.KEYCODE_A: case KeyEvent.KEYCODE_B: case KeyEvent.KEYCODE_C: case KeyEvent.KEYCODE_D: case KeyEvent.KEYCODE_E: case KeyEvent.KEYCODE_F: case KeyEvent.KEYCODE_G: case KeyEvent.KEYCODE_H: case KeyEvent.KEYCODE_I: case KeyEvent.KEYCODE_J: case KeyEvent.KEYCODE_K: case KeyEvent.KEYCODE_L: case KeyEvent.KEYCODE_M: case KeyEvent.KEYCODE_N: case KeyEvent.KEYCODE_O: case KeyEvent.KEYCODE_P: case KeyEvent.KEYCODE_Q: case KeyEvent.KEYCODE_R: case KeyEvent.KEYCODE_S: case KeyEvent.KEYCODE_T: case KeyEvent.KEYCODE_U: case KeyEvent.KEYCODE_V: case KeyEvent.KEYCODE_W: case KeyEvent.KEYCODE_X: case KeyEvent.KEYCODE_Y: case KeyEvent.KEYCODE_Z: char c = (char)(keyCode - KeyEvent.KEYCODE_A + 'A'); if (hasMatches(mCityName + c)) { mCityName += c; shiftByName(); } handled = true; break; // Remove a letter from the city name prefix case KeyEvent.KEYCODE_DEL: if (mCityName.length() > 0) { mCityName = mCityName.substring(0, mCityName.length() - 1); shiftByName(); } else { clearCityMatches(); } handled = true; break; // Clear the city name prefix case KeyEvent.KEYCODE_ENTER: clearCityMatches(); handled = true; break; } } boolean drawing = (mClockShowing || ((mGLView != null) && (mGLView.hasMessages()))); this.setWillNotDraw(!drawing); // Let the system handle other keypresses if (!handled) { return super.onKeyDown(keyCode, event); } return true; } /** * Initialize OpenGL ES drawing. */ private synchronized void init(GL10 gl) { mGLView = new GLView(); mGLView.setNearFrustum(5.0f); mGLView.setFarFrustum(50.0f); mGLView.setLightModelAmbientIntensity(0.225f); mGLView.setAmbientIntensity(0.0f); mGLView.setDiffuseIntensity(1.5f); mGLView.setDiffuseColor(SUNLIGHT_COLOR); mGLView.setSpecularIntensity(0.0f); mGLView.setSpecularColor(SUNLIGHT_COLOR); if (PERFORM_DEPTH_TEST) { gl.glEnable(GL10.GL_DEPTH_TEST); } gl.glDisable(GL10.GL_SCISSOR_TEST); gl.glClearColor(0, 0, 0, 1); gl.glHint(GL10.GL_POINT_SMOOTH_HINT, GL10.GL_NICEST); mInitialized = true; } /** * Computes the vector from the center of the earth to the sun for a * particular moment in time. */ private void computeSunDirection() { mSunCal.setTimeInMillis(System.currentTimeMillis()); int day = mSunCal.get(Calendar.DAY_OF_YEAR); int seconds = 3600 * mSunCal.get(Calendar.HOUR_OF_DAY) + 60 * mSunCal.get(Calendar.MINUTE) + mSunCal.get(Calendar.SECOND); day += (float) seconds / SECONDS_PER_DAY; // Approximate declination of the sun, changes sinusoidally // during the year. The winter solstice occurs 10 days before // the start of the year. float decl = (float) (EARTH_INCLINATION * Math.cos(Shape.TWO_PI * (day + 10) / 365.0)); // Subsolar latitude, convert from (-PI/2, PI/2) -> (0, PI) form float phi = decl + Shape.PI_OVER_TWO; // Subsolar longitude float theta = Shape.TWO_PI * seconds / SECONDS_PER_DAY; float sinPhi = (float) Math.sin(phi); float cosPhi = (float) Math.cos(phi); float sinTheta = (float) Math.sin(theta); float cosTheta = (float) Math.cos(theta); // Convert from polar to rectangular coordinates float x = cosTheta * sinPhi; float y = cosPhi; float z = sinTheta * sinPhi; // Directional light -> w == 0 mLightDir[0] = x; mLightDir[1] = y; mLightDir[2] = z; mLightDir[3] = 0.0f; } /** * Computes the approximate spherical distance between two * (latitude, longitude) coordinates. */ private float distance(float lat1, float lon1, float lat2, float lon2) { lat1 *= Shape.DEGREES_TO_RADIANS; lat2 *= Shape.DEGREES_TO_RADIANS; lon1 *= Shape.DEGREES_TO_RADIANS; lon2 *= Shape.DEGREES_TO_RADIANS; float r = 6371.0f; // Earth's radius in km float dlat = lat2 - lat1; float dlon = lon2 - lon1; double sinlat2 = Math.sin(dlat / 2.0f); sinlat2 *= sinlat2; double sinlon2 = Math.sin(dlon / 2.0f); sinlon2 *= sinlon2; double a = sinlat2 + Math.cos(lat1) * Math.cos(lat2) * sinlon2; double c = 2.0 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a)); return (float) (r * c); } /** * Locates the closest city to the currently displayed center point, * optionally restricting the search to cities within a given time zone. */ private void locateCity(boolean useOffset, float offset) { float mindist = Float.MAX_VALUE; int minidx = -1; for (int i = 0; i < mCities.size(); i++) { City city = mCities.get(i); if (useOffset && !tzEqual(getOffset(city), offset)) { continue; } float dist = distance(city.getLatitude(), city.getLongitude(), mTiltAngle, mRotAngle - 90.0f); if (dist < mindist) { mindist = dist; minidx = i; } } mCityIndex = minidx; } /** * Animates the earth to be centered at the current city. */ private void goToCity() { City city = mCities.get(mCityIndex); float dist = distance(city.getLatitude(), city.getLongitude(), mTiltAngle, mRotAngle - 90.0f); mFlyToCity = true; mCityFlyStartTime = System.currentTimeMillis(); mCityFlightTime = dist / 5.0f; // 5000 km/sec mRotAngleStart = mRotAngle; mRotAngleDest = city.getLongitude() + 90; if (mRotAngleDest - mRotAngleStart > 180.0f) { mRotAngleDest -= 360.0f; } else if (mRotAngleStart - mRotAngleDest > 180.0f) { mRotAngleDest += 360.0f; } mTiltAngleStart = mTiltAngle; mTiltAngleDest = city.getLatitude(); mRotVelocity = 0.0f; } /** * Returns a linearly interpolated value between two values. */ private float lerp(float a, float b, float lerp) { return a + (b - a)*lerp; } /** * Draws the city lights, using a clip plane to restrict the lights * to the night side of the earth. */ private void drawCityLights(GL10 gl, float brightness) { gl.glEnable(GL10.GL_POINT_SMOOTH); gl.glDisable(GL10.GL_DEPTH_TEST); gl.glDisable(GL10.GL_LIGHTING); gl.glDisable(GL10.GL_DITHER); gl.glShadeModel(GL10.GL_FLAT); gl.glEnable(GL10.GL_BLEND); gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA); gl.glPointSize(1.0f); float ls = lerp(0.8f, 0.3f, brightness); gl.glColor4f(ls * 1.0f, ls * 1.0f, ls * 0.8f, 1.0f); if (mDisplayWorld) { mClipPlaneEquation[0] = -mLightDir[0]; mClipPlaneEquation[1] = -mLightDir[1]; mClipPlaneEquation[2] = -mLightDir[2]; mClipPlaneEquation[3] = 0.0f; // Assume we have glClipPlanef() from OpenGL ES 1.1 ((GL11) gl).glClipPlanef(GL11.GL_CLIP_PLANE0, mClipPlaneEquation, 0); gl.glEnable(GL11.GL_CLIP_PLANE0); } mLights.draw(gl); if (mDisplayWorld) { gl.glDisable(GL11.GL_CLIP_PLANE0); } mNumTriangles += mLights.getNumTriangles()*2; } /** * Draws the atmosphere. */ private void drawAtmosphere(GL10 gl) { gl.glDisable(GL10.GL_LIGHTING); gl.glDisable(GL10.GL_CULL_FACE); gl.glDisable(GL10.GL_DITHER); gl.glDisable(GL10.GL_DEPTH_TEST); gl.glShadeModel(mSmoothShading ? GL10.GL_SMOOTH : GL10.GL_FLAT); // Draw the atmospheric layer float tx = mGLView.getTranslateX(); float ty = mGLView.getTranslateY(); float tz = mGLView.getTranslateZ(); gl.glMatrixMode(GL10.GL_MODELVIEW); gl.glLoadIdentity(); gl.glTranslatef(tx, ty, tz); // Blend in the atmosphere a bit gl.glEnable(GL10.GL_BLEND); gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA); ATMOSPHERE.draw(gl); mNumTriangles += ATMOSPHERE.getNumTriangles(); } /** * Draws the world in a 2D map view. */ private void drawWorldFlat(GL10 gl) { gl.glDisable(GL10.GL_BLEND); gl.glEnable(GL10.GL_DITHER); gl.glShadeModel(mSmoothShading ? GL10.GL_SMOOTH : GL10.GL_FLAT); gl.glTranslatef(mWrapX - 2, 0.0f, 0.0f); worldFlat.draw(gl); gl.glTranslatef(2.0f, 0.0f, 0.0f); worldFlat.draw(gl); mNumTriangles += worldFlat.getNumTriangles() * 2; mWrapX += mWrapVelocity * mWrapVelocityFactor; while (mWrapX < 0.0f) { mWrapX += 2.0f; } while (mWrapX > 2.0f) { mWrapX -= 2.0f; } } /** * Draws the world in a 2D round view. */ private void drawWorldRound(GL10 gl) { gl.glDisable(GL10.GL_BLEND); gl.glEnable(GL10.GL_DITHER); gl.glShadeModel(mSmoothShading ? GL10.GL_SMOOTH : GL10.GL_FLAT); mWorld.draw(gl); mNumTriangles += mWorld.getNumTriangles(); } /** * Draws the clock. * * @param canvas the Canvas to draw to * @param now the current time * @param w the width of the screen * @param h the height of the screen * @param lerp controls the animation, between 0.0 and 1.0 */ private void drawClock(Canvas canvas, long now, int w, int h, float lerp) { float clockAlpha = lerp(0.0f, 0.8f, lerp); mClockShowing = clockAlpha > 0.0f; if (clockAlpha > 0.0f) { City city = mCities.get(mCityIndex); mClock.setCity(city); mClock.setTime(now); float cx = w / 2.0f; float cy = h / 2.0f; float smallRadius = 18.0f; float bigRadius = 0.75f * 0.5f * Math.min(w, h); float radius = lerp(smallRadius, bigRadius, lerp); // Only display left/right arrows if we are in a name search boolean scrollingByName = (mCityName.length() > 0) && (mCities.size() > 1); mClock.drawClock(canvas, cx, cy, radius, clockAlpha, 1.0f, lerp == 1.0f, lerp == 1.0f, !atEndOfTimeZone(-1), !atEndOfTimeZone(1), scrollingByName, mCityName.length()); } } /** * Draws the 2D layer. */ @Override protected void onDraw(Canvas canvas) { long now = System.currentTimeMillis(); if (startTime != -1) { startTime = -1; } int w = getWidth(); int h = getHeight(); // Interpolator for clock size, clock alpha, night lights intensity float lerp = Math.min((now - mClockFadeTime)/1000.0f, 1.0f); if (!mDisplayClock) { // Clock is receding lerp = 1.0f - lerp; } lerp = mClockSizeInterpolator.getInterpolation(lerp); // we don't need to make sure OpenGL rendering is done because // we're drawing in to a different surface drawClock(canvas, now, w, h, lerp); mGLView.showMessages(canvas); mGLView.showStatistics(canvas, w); } /** * Draws the 3D layer. */ protected void drawOpenGLScene() { long now = System.currentTimeMillis(); mNumTriangles = 0; EGL10 egl = (EGL10)EGLContext.getEGL(); /* Make out context current in case 2D drawing used difference GL * context. */ egl.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext); GL10 gl = (GL10)mEGLContext.getGL(); if (!mInitialized) { init(gl); } int w = getWidth(); int h = getHeight(); gl.glViewport(0, 0, w, h); gl.glEnable(GL10.GL_LIGHTING); gl.glEnable(GL10.GL_LIGHT0); gl.glEnable(GL10.GL_CULL_FACE); gl.glFrontFace(GL10.GL_CCW); float ratio = (float) w / h; mGLView.setAspectRatio(ratio); mGLView.setTextureParameters(gl); if (PERFORM_DEPTH_TEST) { gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT); } else { gl.glClear(GL10.GL_COLOR_BUFFER_BIT); } if (mDisplayWorldFlat) { gl.glMatrixMode(GL10.GL_PROJECTION); gl.glLoadIdentity(); gl.glFrustumf(-1.0f, 1.0f, -1.0f / ratio, 1.0f / ratio, 1.0f, 2.0f); gl.glMatrixMode(GL10.GL_MODELVIEW); gl.glLoadIdentity(); gl.glTranslatef(0.0f, 0.0f, -1.0f); } else { mGLView.setProjection(gl); mGLView.setView(gl); } if (!mDisplayWorldFlat) { if (mFlyToCity) { float lerp = (now - mCityFlyStartTime)/mCityFlightTime; if (lerp >= 1.0f) { mFlyToCity = false; } lerp = Math.min(lerp, 1.0f); lerp = mFlyToCityInterpolator.getInterpolation(lerp); mRotAngle = lerp(mRotAngleStart, mRotAngleDest, lerp); mTiltAngle = lerp(mTiltAngleStart, mTiltAngleDest, lerp); } // Rotate the viewpoint around the earth gl.glMatrixMode(GL10.GL_MODELVIEW); gl.glRotatef(mTiltAngle, 1, 0, 0); gl.glRotatef(mRotAngle, 0, 1, 0); // Increment the rotation angle mRotAngle += mRotVelocity; if (mRotAngle < 0.0f) { mRotAngle += 360.0f; } if (mRotAngle > 360.0f) { mRotAngle -= 360.0f; } } // Draw the world with lighting gl.glLightfv(GL10.GL_LIGHT0, GL10.GL_POSITION, mLightDir, 0); mGLView.setLights(gl, GL10.GL_LIGHT0); if (mDisplayWorldFlat) { drawWorldFlat(gl); } else if (mDisplayWorld) { drawWorldRound(gl); } if (mDisplayLights && !mDisplayWorldFlat) { // Interpolator for clock size, clock alpha, night lights intensity float lerp = Math.min((now - mClockFadeTime)/1000.0f, 1.0f); if (!mDisplayClock) { // Clock is receding lerp = 1.0f - lerp; } lerp = mClockSizeInterpolator.getInterpolation(lerp); drawCityLights(gl, lerp); } if (mDisplayAtmosphere && !mDisplayWorldFlat) { drawAtmosphere(gl); } mGLView.setNumTriangles(mNumTriangles); egl.eglSwapBuffers(mEGLDisplay, mEGLSurface); if (egl.eglGetError() == EGL11.EGL_CONTEXT_LOST) { // we lost the gpu, quit immediately Context c = getContext(); if (c instanceof Activity) { ((Activity)c).finish(); } } } private static final int INVALIDATE = 1; private static final int ONE_MINUTE = 60000; /** * Controls the animation using the message queue. Every time we receive * an INVALIDATE message, we redraw and place another message in the queue. */ private final Handler mHandler = new Handler() { private long mLastSunPositionTime = 0; @Override public void handleMessage(Message msg) { if (msg.what == INVALIDATE) { // Use the message's time, it's good enough and // allows us to avoid a system call. if ((msg.getWhen() - mLastSunPositionTime) >= ONE_MINUTE) { // Recompute the sun's position once per minute // Place the light at the Sun's direction computeSunDirection(); mLastSunPositionTime = msg.getWhen(); } // Draw the GL scene drawOpenGLScene(); // Send an update for the 2D overlay if needed if (mInitialized && (mClockShowing || mGLView.hasMessages())) { invalidate(); } // Just send another message immediately. This works because // drawOpenGLScene() does the timing for us -- it will // block until the last frame has been processed. // The invalidate message we're posting here will be // interleaved properly with motion/key events which // guarantee a prompt reaction to the user input. sendEmptyMessage(INVALIDATE); } } }; } /** * The main activity class for GlobalTime. */ public class GlobalTime extends Activity { private static final int MENU_KBD = Menu.FIRST + 1; GTView gtView = null; private void setGTView() { if (gtView == null) { gtView = new GTView(this); setContentView(gtView); } } @Override protected void onCreate(Bundle icicle) { super.onCreate(icicle); setGTView(); } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); menu.add(0, MENU_KBD, 0, R.string.menu_keyboard); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case MENU_KBD: { InputMethodManager inputManager = (InputMethodManager) getSystemService(Context.INPUT_METHOD_SERVICE); inputManager.showSoftInput(gtView, 0); return true; } } return super.onOptionsItemSelected(item); } @Override protected void onResume() { super.onResume(); setGTView(); Looper.myQueue().addIdleHandler(new Idler()); } @Override protected void onPause() { super.onPause(); gtView.stopAnimating(); } @Override protected void onStop() { super.onStop(); gtView.stopAnimating(); gtView.destroy(); gtView = null; } // Allow the activity to go idle before its animation starts class Idler implements MessageQueue.IdleHandler { public Idler() { super(); } public final boolean queueIdle() { if (gtView != null) { gtView.startAnimating(); } return false; } } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.autoscaling.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * Container for the parameters to the {@link com.amazonaws.services.autoscaling.AmazonAutoScaling#describeTags(DescribeTagsRequest) DescribeTags operation}. * <p> * Describes the specified tags. * </p> * <p> * You can use filters to limit the results. For example, you can query * for the tags for a specific Auto Scaling group. You can specify * multiple values for a filter. A tag must match at least one of the * specified values for it to be included in the results. * </p> * <p> * You can also specify multiple filters. The result includes information * for a particular tag only if it matches all the filters. If there's no * match, no special message is returned. * </p> * * @see com.amazonaws.services.autoscaling.AmazonAutoScaling#describeTags(DescribeTagsRequest) */ public class DescribeTagsRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * A filter used to scope the tags to return. */ private com.amazonaws.internal.ListWithAutoConstructFlag<Filter> filters; /** * The token for the next set of items to return. (You received this * token from a previous call.) * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>[&#92;u0020-&#92;uD7FF&#92;uE000-&#92;uFFFD&#92;uD800&#92;uDC00-&#92;uDBFF&#92;uDFFF\r\n\t]*<br/> */ private String nextToken; /** * The maximum number of items to return with this call. */ private Integer maxRecords; /** * A filter used to scope the tags to return. * * @return A filter used to scope the tags to return. */ public java.util.List<Filter> getFilters() { if (filters == null) { filters = new com.amazonaws.internal.ListWithAutoConstructFlag<Filter>(); filters.setAutoConstruct(true); } return filters; } /** * A filter used to scope the tags to return. * * @param filters A filter used to scope the tags to return. */ public void setFilters(java.util.Collection<Filter> filters) { if (filters == null) { this.filters = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<Filter> filtersCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Filter>(filters.size()); filtersCopy.addAll(filters); this.filters = filtersCopy; } /** * A filter used to scope the tags to return. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setFilters(java.util.Collection)} or {@link * #withFilters(java.util.Collection)} if you want to override the * existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param filters A filter used to scope the tags to return. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeTagsRequest withFilters(Filter... filters) { if (getFilters() == null) setFilters(new java.util.ArrayList<Filter>(filters.length)); for (Filter value : filters) { getFilters().add(value); } return this; } /** * A filter used to scope the tags to return. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param filters A filter used to scope the tags to return. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeTagsRequest withFilters(java.util.Collection<Filter> filters) { if (filters == null) { this.filters = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<Filter> filtersCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Filter>(filters.size()); filtersCopy.addAll(filters); this.filters = filtersCopy; } return this; } /** * The token for the next set of items to return. (You received this * token from a previous call.) * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>[&#92;u0020-&#92;uD7FF&#92;uE000-&#92;uFFFD&#92;uD800&#92;uDC00-&#92;uDBFF&#92;uDFFF\r\n\t]*<br/> * * @return The token for the next set of items to return. (You received this * token from a previous call.) */ public String getNextToken() { return nextToken; } /** * The token for the next set of items to return. (You received this * token from a previous call.) * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>[&#92;u0020-&#92;uD7FF&#92;uE000-&#92;uFFFD&#92;uD800&#92;uDC00-&#92;uDBFF&#92;uDFFF\r\n\t]*<br/> * * @param nextToken The token for the next set of items to return. (You received this * token from a previous call.) */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * The token for the next set of items to return. (You received this * token from a previous call.) * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>[&#92;u0020-&#92;uD7FF&#92;uE000-&#92;uFFFD&#92;uD800&#92;uDC00-&#92;uDBFF&#92;uDFFF\r\n\t]*<br/> * * @param nextToken The token for the next set of items to return. (You received this * token from a previous call.) * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeTagsRequest withNextToken(String nextToken) { this.nextToken = nextToken; return this; } /** * The maximum number of items to return with this call. * * @return The maximum number of items to return with this call. */ public Integer getMaxRecords() { return maxRecords; } /** * The maximum number of items to return with this call. * * @param maxRecords The maximum number of items to return with this call. */ public void setMaxRecords(Integer maxRecords) { this.maxRecords = maxRecords; } /** * The maximum number of items to return with this call. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param maxRecords The maximum number of items to return with this call. * * @return A reference to this updated object so that method calls can be chained * together. */ public DescribeTagsRequest withMaxRecords(Integer maxRecords) { this.maxRecords = maxRecords; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getFilters() != null) sb.append("Filters: " + getFilters() + ","); if (getNextToken() != null) sb.append("NextToken: " + getNextToken() + ","); if (getMaxRecords() != null) sb.append("MaxRecords: " + getMaxRecords() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getFilters() == null) ? 0 : getFilters().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getMaxRecords() == null) ? 0 : getMaxRecords().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeTagsRequest == false) return false; DescribeTagsRequest other = (DescribeTagsRequest)obj; if (other.getFilters() == null ^ this.getFilters() == null) return false; if (other.getFilters() != null && other.getFilters().equals(this.getFilters()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getMaxRecords() == null ^ this.getMaxRecords() == null) return false; if (other.getMaxRecords() != null && other.getMaxRecords().equals(this.getMaxRecords()) == false) return false; return true; } @Override public DescribeTagsRequest clone() { return (DescribeTagsRequest) super.clone(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.engine; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.codecs.Codec; import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.TranslogRecoveryPerformer; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.threadpool.ThreadPool; /* * Holds all the configuration that is used to create an {@link Engine}. * Once {@link Engine} has been created with this object, changes to this * object will affect the {@link Engine} instance. */ public final class EngineConfig { private final ShardId shardId; private final TranslogRecoveryPerformer translogRecoveryPerformer; private final IndexSettings indexSettings; private final ByteSizeValue indexingBufferSize; private volatile boolean enableGcDeletes = true; private final TimeValue flushMergesAfter; private final String codecName; private final ThreadPool threadPool; private final Engine.Warmer warmer; private final Store store; private final SnapshotDeletionPolicy deletionPolicy; private final MergePolicy mergePolicy; private final Analyzer analyzer; private final Similarity similarity; private final CodecService codecService; private final Engine.EventListener eventListener; private final QueryCache queryCache; private final QueryCachingPolicy queryCachingPolicy; private final long maxUnsafeAutoIdTimestamp; @Nullable private final ReferenceManager.RefreshListener refreshListeners; /** * Index setting to change the low level lucene codec used for writing new segments. * This setting is <b>not</b> realtime updateable. * This setting is also settable on the node and the index level, it's commonly used in hot/cold node archs where index is likely * allocated on both `kind` of nodes. */ public static final Setting<String> INDEX_CODEC_SETTING = new Setting<>("index.codec", "default", s -> { switch (s) { case "default": case "best_compression": case "lucene_default": return s; default: if (Codec.availableCodecs().contains(s) == false) { // we don't error message the not officially supported ones throw new IllegalArgumentException( "unknown value for [index.codec] must be one of [default, best_compression] but was: " + s); } return s; } }, Property.IndexScope, Property.NodeScope); /** * Configures an index to optimize documents with auto generated ids for append only. If this setting is updated from <code>false</code> * to <code>true</code> might not take effect immediately. In other words, disabling the optimiation will be immediately applied while * re-enabling it might not be applied until the engine is in a safe state to do so. Depending on the engine implementation a change to * this setting won't be reflected re-enabled optimization until the engine is restarted or the index is closed and reopened. * The default is <code>true</code> */ public static final Setting<Boolean> INDEX_OPTIMIZE_AUTO_GENERATED_IDS = Setting.boolSetting("index.optimize_auto_generated_id", true, Property.IndexScope, Property.Dynamic); private final TranslogConfig translogConfig; private final OpenMode openMode; /** * Creates a new {@link org.elasticsearch.index.engine.EngineConfig} */ public EngineConfig(OpenMode openMode, ShardId shardId, ThreadPool threadPool, IndexSettings indexSettings, Engine.Warmer warmer, Store store, SnapshotDeletionPolicy deletionPolicy, MergePolicy mergePolicy, Analyzer analyzer, Similarity similarity, CodecService codecService, Engine.EventListener eventListener, TranslogRecoveryPerformer translogRecoveryPerformer, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, TranslogConfig translogConfig, TimeValue flushMergesAfter, ReferenceManager.RefreshListener refreshListeners, long maxUnsafeAutoIdTimestamp) { if (openMode == null) { throw new IllegalArgumentException("openMode must not be null"); } this.shardId = shardId; this.indexSettings = indexSettings; this.threadPool = threadPool; this.warmer = warmer == null ? (a) -> {} : warmer; this.store = store; this.deletionPolicy = deletionPolicy; this.mergePolicy = mergePolicy; this.analyzer = analyzer; this.similarity = similarity; this.codecService = codecService; this.eventListener = eventListener; codecName = indexSettings.getValue(INDEX_CODEC_SETTING); // We give IndexWriter a "huge" (256 MB) buffer, so it won't flush on its own unless the ES indexing buffer is also huge and/or // there are not too many shards allocated to this node. Instead, IndexingMemoryController periodically checks // and refreshes the most heap-consuming shards when total indexing heap usage across all shards is too high: indexingBufferSize = new ByteSizeValue(256, ByteSizeUnit.MB); this.translogRecoveryPerformer = translogRecoveryPerformer; this.queryCache = queryCache; this.queryCachingPolicy = queryCachingPolicy; this.translogConfig = translogConfig; this.flushMergesAfter = flushMergesAfter; this.openMode = openMode; this.refreshListeners = refreshListeners; assert maxUnsafeAutoIdTimestamp >= IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP : "maxUnsafeAutoIdTimestamp must be >= -1 but was " + maxUnsafeAutoIdTimestamp; this.maxUnsafeAutoIdTimestamp = maxUnsafeAutoIdTimestamp; } /** * Enables / disables gc deletes * * @see #isEnableGcDeletes() */ public void setEnableGcDeletes(boolean enableGcDeletes) { this.enableGcDeletes = enableGcDeletes; } /** * Returns the initial index buffer size. This setting is only read on startup and otherwise controlled * by {@link IndexingMemoryController} */ public ByteSizeValue getIndexingBufferSize() { return indexingBufferSize; } /** * Returns <code>true</code> iff delete garbage collection in the engine should be enabled. This setting is updateable * in realtime and forces a volatile read. Consumers can safely read this value directly go fetch it's latest value. * The default is <code>true</code> * <p> * Engine GC deletion if enabled collects deleted documents from in-memory realtime data structures after a certain amount of * time ({@link IndexSettings#getGcDeletesInMillis()} if enabled. Before deletes are GCed they will cause re-adding the document * that was deleted to fail. * </p> */ public boolean isEnableGcDeletes() { return enableGcDeletes; } /** * Returns the {@link Codec} used in the engines {@link org.apache.lucene.index.IndexWriter} * <p> * Note: this settings is only read on startup. * </p> */ public Codec getCodec() { return codecService.codec(codecName); } /** * Returns a thread-pool mainly used to get estimated time stamps from * {@link org.elasticsearch.threadpool.ThreadPool#relativeTimeInMillis()} and to schedule * async force merge calls on the {@link org.elasticsearch.threadpool.ThreadPool.Names#FORCE_MERGE} thread-pool */ public ThreadPool getThreadPool() { return threadPool; } /** * Returns an {@link org.elasticsearch.index.engine.Engine.Warmer} used to warm new searchers before they are used for searching. */ public Engine.Warmer getWarmer() { return warmer; } /** * Returns the {@link org.elasticsearch.index.store.Store} instance that provides access to the * {@link org.apache.lucene.store.Directory} used for the engines {@link org.apache.lucene.index.IndexWriter} to write it's index files * to. * <p> * Note: In order to use this instance the consumer needs to increment the stores reference before it's used the first time and hold * it's reference until it's not needed anymore. * </p> */ public Store getStore() { return store; } /** * Returns a {@link SnapshotDeletionPolicy} used in the engines * {@link org.apache.lucene.index.IndexWriter}. */ public SnapshotDeletionPolicy getDeletionPolicy() { return deletionPolicy; } /** * Returns the {@link org.apache.lucene.index.MergePolicy} for the engines {@link org.apache.lucene.index.IndexWriter} */ public MergePolicy getMergePolicy() { return mergePolicy; } /** * Returns a listener that should be called on engine failure */ public Engine.EventListener getEventListener() { return eventListener; } /** * Returns the index settings for this index. */ public IndexSettings getIndexSettings() { return indexSettings; } /** * Returns the engines shard ID */ public ShardId getShardId() { return shardId; } /** * Returns the analyzer as the default analyzer in the engines {@link org.apache.lucene.index.IndexWriter} */ public Analyzer getAnalyzer() { return analyzer; } /** * Returns the {@link org.apache.lucene.search.similarities.Similarity} used for indexing and searching. */ public Similarity getSimilarity() { return similarity; } /** * Returns the {@link org.elasticsearch.index.shard.TranslogRecoveryPerformer} for this engine. This class is used * to apply transaction log operations to the engine. It encapsulates all the logic to transfer the translog entry into * an indexing operation. */ public TranslogRecoveryPerformer getTranslogRecoveryPerformer() { return translogRecoveryPerformer; } /** * Return the cache to use for queries. */ public QueryCache getQueryCache() { return queryCache; } /** * Return the policy to use when caching queries. */ public QueryCachingPolicy getQueryCachingPolicy() { return queryCachingPolicy; } /** * Returns the translog config for this engine */ public TranslogConfig getTranslogConfig() { return translogConfig; } /** * Returns a {@link TimeValue} at what time interval after the last write modification to the engine finished merges * should be automatically flushed. This is used to free up transient disk usage of potentially large segments that * are written after the engine became inactive from an indexing perspective. */ public TimeValue getFlushMergesAfter() { return flushMergesAfter; } /** * Returns the {@link OpenMode} for this engine config. */ public OpenMode getOpenMode() { return openMode; } /** * Engine open mode defines how the engine should be opened or in other words what the engine should expect * to recover from. We either create a brand new engine with a new index and translog or we recover from an existing index. * If the index exists we also have the ability open only the index and create a new transaction log which happens * during remote recovery since we have already transferred the index files but the translog is replayed from remote. The last * and safest option opens the lucene index as well as it's referenced transaction log for a translog recovery. * See also {@link Engine#recoverFromTranslog()} */ public enum OpenMode { CREATE_INDEX_AND_TRANSLOG, OPEN_INDEX_CREATE_TRANSLOG, OPEN_INDEX_AND_TRANSLOG; } /** * {@linkplain ReferenceManager.RefreshListener} instance to configure. */ public ReferenceManager.RefreshListener getRefreshListeners() { return refreshListeners; } /** * Returns the max timestamp that is used to de-optimize documents with auto-generated IDs in the engine. * This is used to ensure we don't add duplicate documents when we assume an append only case based on auto-generated IDs */ public long getMaxUnsafeAutoIdTimestamp() { return indexSettings.getValue(INDEX_OPTIMIZE_AUTO_GENERATED_IDS) ? maxUnsafeAutoIdTimestamp : Long.MAX_VALUE; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.analytics.purview.scanning.implementation; import com.azure.core.annotation.BodyParam; import com.azure.core.annotation.Delete; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.Put; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.exception.HttpResponseException; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.RequestOptions; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.util.BinaryData; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in KeyVaultConnections. */ public final class KeyVaultConnectionsImpl { /** The proxy service used to perform REST calls. */ private final KeyVaultConnectionsService service; /** The service client containing this operation class. */ private final PurviewScanningClientImpl client; /** * Initializes an instance of KeyVaultConnectionsImpl. * * @param client the instance of the service client containing this operation class. */ KeyVaultConnectionsImpl(PurviewScanningClientImpl client) { this.service = RestProxy.create( KeyVaultConnectionsService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for PurviewScanningClientKeyVaultConnections to be used by the proxy * service to perform REST calls. */ @Host("{Endpoint}") @ServiceInterface(name = "PurviewScanningClien") private interface KeyVaultConnectionsService { @Get("/azureKeyVaults/{keyVaultName}") @ExpectedResponses({200}) Mono<Response<BinaryData>> get( @HostParam("Endpoint") String endpoint, @PathParam("keyVaultName") String keyVaultName, @QueryParam("api-version") String apiVersion, RequestOptions requestOptions, Context context); @Put("/azureKeyVaults/{keyVaultName}") @ExpectedResponses({200}) Mono<Response<BinaryData>> create( @HostParam("Endpoint") String endpoint, @PathParam("keyVaultName") String keyVaultName, @QueryParam("api-version") String apiVersion, @BodyParam("application/json") BinaryData body, RequestOptions requestOptions, Context context); @Delete("/azureKeyVaults/{keyVaultName}") @ExpectedResponses({200, 204}) Mono<Response<BinaryData>> delete( @HostParam("Endpoint") String endpoint, @PathParam("keyVaultName") String keyVaultName, @QueryParam("api-version") String apiVersion, RequestOptions requestOptions, Context context); @Get("/azureKeyVaults") @ExpectedResponses({200}) Mono<Response<BinaryData>> listAll( @HostParam("Endpoint") String endpoint, @QueryParam("api-version") String apiVersion, RequestOptions requestOptions, Context context); @Get("{nextLink}") @ExpectedResponses({200}) Mono<Response<BinaryData>> listAllNext( @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("Endpoint") String endpoint, RequestOptions requestOptions, Context context); } /** * Gets key vault information. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * }</pre> * * @param keyVaultName The keyVaultName parameter. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return key vault information along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<BinaryData>> getWithResponseAsync(String keyVaultName, RequestOptions requestOptions) { return FluxUtil.withContext( context -> service.get( this.client.getEndpoint(), keyVaultName, this.client.getServiceVersion().getVersion(), requestOptions, context)); } /** * Gets key vault information. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * }</pre> * * @param keyVaultName The keyVaultName parameter. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @param context The context to associate with this operation. * @throws HttpResponseException thrown if the request is rejected by server. * @return key vault information along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<BinaryData>> getWithResponseAsync( String keyVaultName, RequestOptions requestOptions, Context context) { return service.get( this.client.getEndpoint(), keyVaultName, this.client.getServiceVersion().getVersion(), requestOptions, context); } /** * Gets key vault information. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * }</pre> * * @param keyVaultName The keyVaultName parameter. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return key vault information along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> getWithResponse(String keyVaultName, RequestOptions requestOptions) { return getWithResponseAsync(keyVaultName, requestOptions).block(); } /** * Creates an instance of a key vault connection. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Request Body Schema</strong> * * <pre>{@code * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * }</pre> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * }</pre> * * @param keyVaultName The keyVaultName parameter. * @param body The body parameter. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response body along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<BinaryData>> createWithResponseAsync( String keyVaultName, BinaryData body, RequestOptions requestOptions) { return FluxUtil.withContext( context -> service.create( this.client.getEndpoint(), keyVaultName, this.client.getServiceVersion().getVersion(), body, requestOptions, context)); } /** * Creates an instance of a key vault connection. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Request Body Schema</strong> * * <pre>{@code * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * }</pre> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * }</pre> * * @param keyVaultName The keyVaultName parameter. * @param body The body parameter. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @param context The context to associate with this operation. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response body along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<BinaryData>> createWithResponseAsync( String keyVaultName, BinaryData body, RequestOptions requestOptions, Context context) { return service.create( this.client.getEndpoint(), keyVaultName, this.client.getServiceVersion().getVersion(), body, requestOptions, context); } /** * Creates an instance of a key vault connection. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Request Body Schema</strong> * * <pre>{@code * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * }</pre> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * }</pre> * * @param keyVaultName The keyVaultName parameter. * @param body The body parameter. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response body along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> createWithResponse( String keyVaultName, BinaryData body, RequestOptions requestOptions) { return createWithResponseAsync(keyVaultName, body, requestOptions).block(); } /** * Deletes the key vault connection associated with the account. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * }</pre> * * @param keyVaultName The keyVaultName parameter. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response body along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<BinaryData>> deleteWithResponseAsync(String keyVaultName, RequestOptions requestOptions) { return FluxUtil.withContext( context -> service.delete( this.client.getEndpoint(), keyVaultName, this.client.getServiceVersion().getVersion(), requestOptions, context)); } /** * Deletes the key vault connection associated with the account. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * }</pre> * * @param keyVaultName The keyVaultName parameter. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @param context The context to associate with this operation. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response body along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<BinaryData>> deleteWithResponseAsync( String keyVaultName, RequestOptions requestOptions, Context context) { return service.delete( this.client.getEndpoint(), keyVaultName, this.client.getServiceVersion().getVersion(), requestOptions, context); } /** * Deletes the key vault connection associated with the account. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * }</pre> * * @param keyVaultName The keyVaultName parameter. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response body along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<BinaryData> deleteWithResponse(String keyVaultName, RequestOptions requestOptions) { return deleteWithResponseAsync(keyVaultName, requestOptions).block(); } /** * List key vault connections in account. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * ] * nextLink: String * count: Long * } * }</pre> * * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response body along with {@link PagedResponse} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PagedResponse<BinaryData>> listAllSinglePageAsync(RequestOptions requestOptions) { return FluxUtil.withContext( context -> service.listAll( this.client.getEndpoint(), this.client.getServiceVersion().getVersion(), requestOptions, context)) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), getValues(res.getValue(), "value"), getNextLink(res.getValue(), "nextLink"), null)); } /** * List key vault connections in account. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * ] * nextLink: String * count: Long * } * }</pre> * * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @param context The context to associate with this operation. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response body along with {@link PagedResponse} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PagedResponse<BinaryData>> listAllSinglePageAsync(RequestOptions requestOptions, Context context) { return service.listAll( this.client.getEndpoint(), this.client.getServiceVersion().getVersion(), requestOptions, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), getValues(res.getValue(), "value"), getNextLink(res.getValue(), "nextLink"), null)); } /** * List key vault connections in account. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * ] * nextLink: String * count: Long * } * }</pre> * * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<BinaryData> listAllAsync(RequestOptions requestOptions) { return new PagedFlux<>( () -> listAllSinglePageAsync(requestOptions), nextLink -> listAllNextSinglePageAsync(nextLink, null)); } /** * List key vault connections in account. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * ] * nextLink: String * count: Long * } * }</pre> * * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @param context The context to associate with this operation. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<BinaryData> listAllAsync(RequestOptions requestOptions, Context context) { return new PagedFlux<>( () -> listAllSinglePageAsync(requestOptions, context), nextLink -> listAllNextSinglePageAsync(nextLink, null, context)); } /** * List key vault connections in account. * * <p><strong>Query Parameters</strong> * * <table border="1"> * <caption>Query Parameters</caption> * <tr><th>Name</th><th>Type</th><th>Required</th><th>Description</th></tr> * <tr><td>apiVersion</td><td>String</td><td>Yes</td><td>Api Version</td></tr> * </table> * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * ] * nextLink: String * count: Long * } * }</pre> * * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BinaryData> listAll(RequestOptions requestOptions) { return new PagedIterable<>(listAllAsync(requestOptions)); } /** * Get the next page of items. * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * ] * nextLink: String * count: Long * } * }</pre> * * @param nextLink The nextLink parameter. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response body along with {@link PagedResponse} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PagedResponse<BinaryData>> listAllNextSinglePageAsync(String nextLink, RequestOptions requestOptions) { return FluxUtil.withContext( context -> service.listAllNext(nextLink, this.client.getEndpoint(), requestOptions, context)) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), getValues(res.getValue(), "value"), getNextLink(res.getValue(), "nextLink"), null)); } /** * Get the next page of items. * * <p><strong>Response Body Schema</strong> * * <pre>{@code * { * value: [ * { * id: String * name: String * properties: { * baseUrl: String * description: String * } * } * ] * nextLink: String * count: Long * } * }</pre> * * @param nextLink The nextLink parameter. * @param requestOptions The options to configure the HTTP request before HTTP client sends it. * @param context The context to associate with this operation. * @throws HttpResponseException thrown if the request is rejected by server. * @return the response body along with {@link PagedResponse} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PagedResponse<BinaryData>> listAllNextSinglePageAsync( String nextLink, RequestOptions requestOptions, Context context) { return service.listAllNext(nextLink, this.client.getEndpoint(), requestOptions, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), getValues(res.getValue(), "value"), getNextLink(res.getValue(), "nextLink"), null)); } private List<BinaryData> getValues(BinaryData binaryData, String path) { try { Map<?, ?> obj = binaryData.toObject(Map.class); List<?> values = (List<?>) obj.get(path); return values.stream().map(BinaryData::fromObject).collect(Collectors.toList()); } catch (RuntimeException e) { return null; } } private String getNextLink(BinaryData binaryData, String path) { try { Map<?, ?> obj = binaryData.toObject(Map.class); return (String) obj.get(path); } catch (RuntimeException e) { return null; } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.rds.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/StartActivityStream" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class StartActivityStreamRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The Amazon Resource Name (ARN) of the DB cluster, for example * <code>arn:aws:rds:us-east-1:12345667890:cluster:das-cluster</code>. * </p> */ private String resourceArn; /** * <p> * Specifies the mode of the database activity stream. Database events such as a change or access generate an * activity stream event. The database session can handle these events either synchronously or asynchronously. * </p> */ private String mode; /** * <p> * The AWS KMS key identifier for encrypting messages in the database activity stream. The key identifier can be * either a key ID, a key ARN, or a key alias. * </p> */ private String kmsKeyId; /** * <p> * Specifies whether or not the database activity stream is to start as soon as possible, regardless of the * maintenance window for the database. * </p> */ private Boolean applyImmediately; /** * <p> * The Amazon Resource Name (ARN) of the DB cluster, for example * <code>arn:aws:rds:us-east-1:12345667890:cluster:das-cluster</code>. * </p> * * @param resourceArn * The Amazon Resource Name (ARN) of the DB cluster, for example * <code>arn:aws:rds:us-east-1:12345667890:cluster:das-cluster</code>. */ public void setResourceArn(String resourceArn) { this.resourceArn = resourceArn; } /** * <p> * The Amazon Resource Name (ARN) of the DB cluster, for example * <code>arn:aws:rds:us-east-1:12345667890:cluster:das-cluster</code>. * </p> * * @return The Amazon Resource Name (ARN) of the DB cluster, for example * <code>arn:aws:rds:us-east-1:12345667890:cluster:das-cluster</code>. */ public String getResourceArn() { return this.resourceArn; } /** * <p> * The Amazon Resource Name (ARN) of the DB cluster, for example * <code>arn:aws:rds:us-east-1:12345667890:cluster:das-cluster</code>. * </p> * * @param resourceArn * The Amazon Resource Name (ARN) of the DB cluster, for example * <code>arn:aws:rds:us-east-1:12345667890:cluster:das-cluster</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public StartActivityStreamRequest withResourceArn(String resourceArn) { setResourceArn(resourceArn); return this; } /** * <p> * Specifies the mode of the database activity stream. Database events such as a change or access generate an * activity stream event. The database session can handle these events either synchronously or asynchronously. * </p> * * @param mode * Specifies the mode of the database activity stream. Database events such as a change or access generate an * activity stream event. The database session can handle these events either synchronously or * asynchronously. * @see ActivityStreamMode */ public void setMode(String mode) { this.mode = mode; } /** * <p> * Specifies the mode of the database activity stream. Database events such as a change or access generate an * activity stream event. The database session can handle these events either synchronously or asynchronously. * </p> * * @return Specifies the mode of the database activity stream. Database events such as a change or access generate * an activity stream event. The database session can handle these events either synchronously or * asynchronously. * @see ActivityStreamMode */ public String getMode() { return this.mode; } /** * <p> * Specifies the mode of the database activity stream. Database events such as a change or access generate an * activity stream event. The database session can handle these events either synchronously or asynchronously. * </p> * * @param mode * Specifies the mode of the database activity stream. Database events such as a change or access generate an * activity stream event. The database session can handle these events either synchronously or * asynchronously. * @return Returns a reference to this object so that method calls can be chained together. * @see ActivityStreamMode */ public StartActivityStreamRequest withMode(String mode) { setMode(mode); return this; } /** * <p> * Specifies the mode of the database activity stream. Database events such as a change or access generate an * activity stream event. The database session can handle these events either synchronously or asynchronously. * </p> * * @param mode * Specifies the mode of the database activity stream. Database events such as a change or access generate an * activity stream event. The database session can handle these events either synchronously or * asynchronously. * @return Returns a reference to this object so that method calls can be chained together. * @see ActivityStreamMode */ public StartActivityStreamRequest withMode(ActivityStreamMode mode) { this.mode = mode.toString(); return this; } /** * <p> * The AWS KMS key identifier for encrypting messages in the database activity stream. The key identifier can be * either a key ID, a key ARN, or a key alias. * </p> * * @param kmsKeyId * The AWS KMS key identifier for encrypting messages in the database activity stream. The key identifier can * be either a key ID, a key ARN, or a key alias. */ public void setKmsKeyId(String kmsKeyId) { this.kmsKeyId = kmsKeyId; } /** * <p> * The AWS KMS key identifier for encrypting messages in the database activity stream. The key identifier can be * either a key ID, a key ARN, or a key alias. * </p> * * @return The AWS KMS key identifier for encrypting messages in the database activity stream. The key identifier * can be either a key ID, a key ARN, or a key alias. */ public String getKmsKeyId() { return this.kmsKeyId; } /** * <p> * The AWS KMS key identifier for encrypting messages in the database activity stream. The key identifier can be * either a key ID, a key ARN, or a key alias. * </p> * * @param kmsKeyId * The AWS KMS key identifier for encrypting messages in the database activity stream. The key identifier can * be either a key ID, a key ARN, or a key alias. * @return Returns a reference to this object so that method calls can be chained together. */ public StartActivityStreamRequest withKmsKeyId(String kmsKeyId) { setKmsKeyId(kmsKeyId); return this; } /** * <p> * Specifies whether or not the database activity stream is to start as soon as possible, regardless of the * maintenance window for the database. * </p> * * @param applyImmediately * Specifies whether or not the database activity stream is to start as soon as possible, regardless of the * maintenance window for the database. */ public void setApplyImmediately(Boolean applyImmediately) { this.applyImmediately = applyImmediately; } /** * <p> * Specifies whether or not the database activity stream is to start as soon as possible, regardless of the * maintenance window for the database. * </p> * * @return Specifies whether or not the database activity stream is to start as soon as possible, regardless of the * maintenance window for the database. */ public Boolean getApplyImmediately() { return this.applyImmediately; } /** * <p> * Specifies whether or not the database activity stream is to start as soon as possible, regardless of the * maintenance window for the database. * </p> * * @param applyImmediately * Specifies whether or not the database activity stream is to start as soon as possible, regardless of the * maintenance window for the database. * @return Returns a reference to this object so that method calls can be chained together. */ public StartActivityStreamRequest withApplyImmediately(Boolean applyImmediately) { setApplyImmediately(applyImmediately); return this; } /** * <p> * Specifies whether or not the database activity stream is to start as soon as possible, regardless of the * maintenance window for the database. * </p> * * @return Specifies whether or not the database activity stream is to start as soon as possible, regardless of the * maintenance window for the database. */ public Boolean isApplyImmediately() { return this.applyImmediately; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getResourceArn() != null) sb.append("ResourceArn: ").append(getResourceArn()).append(","); if (getMode() != null) sb.append("Mode: ").append(getMode()).append(","); if (getKmsKeyId() != null) sb.append("KmsKeyId: ").append(getKmsKeyId()).append(","); if (getApplyImmediately() != null) sb.append("ApplyImmediately: ").append(getApplyImmediately()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof StartActivityStreamRequest == false) return false; StartActivityStreamRequest other = (StartActivityStreamRequest) obj; if (other.getResourceArn() == null ^ this.getResourceArn() == null) return false; if (other.getResourceArn() != null && other.getResourceArn().equals(this.getResourceArn()) == false) return false; if (other.getMode() == null ^ this.getMode() == null) return false; if (other.getMode() != null && other.getMode().equals(this.getMode()) == false) return false; if (other.getKmsKeyId() == null ^ this.getKmsKeyId() == null) return false; if (other.getKmsKeyId() != null && other.getKmsKeyId().equals(this.getKmsKeyId()) == false) return false; if (other.getApplyImmediately() == null ^ this.getApplyImmediately() == null) return false; if (other.getApplyImmediately() != null && other.getApplyImmediately().equals(this.getApplyImmediately()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getResourceArn() == null) ? 0 : getResourceArn().hashCode()); hashCode = prime * hashCode + ((getMode() == null) ? 0 : getMode().hashCode()); hashCode = prime * hashCode + ((getKmsKeyId() == null) ? 0 : getKmsKeyId().hashCode()); hashCode = prime * hashCode + ((getApplyImmediately() == null) ? 0 : getApplyImmediately().hashCode()); return hashCode; } @Override public StartActivityStreamRequest clone() { return (StartActivityStreamRequest) super.clone(); } }
package konoha.script; import nez.ast.Symbol; public interface CommonSymbols { // public final static Symbol _anno = Symbol.tag("anno"); public final static Symbol _name = Symbol.tag("name"); public final static Symbol _super = Symbol.tag("super"); public final static Symbol _impl = Symbol.tag("impl"); public final static Symbol _body = Symbol.tag("body"); public final static Symbol _type = Symbol.tag("type"); public final static Symbol _expr = Symbol.tag("expr"); public final static Symbol _list = Symbol.tag("list"); public final static Symbol _param = Symbol.tag("param"); // public final static Symbol _throws = Symbol.tag("throws"); public final static Symbol _base = Symbol.tag("base"); public final static Symbol _extends = Symbol.tag("extends"); public final static Symbol _cond = Symbol.tag("cond"); public final static Symbol _msg = Symbol.tag("msg"); public final static Symbol _then = Symbol.tag("then"); public final static Symbol _else = Symbol.tag("else"); public final static Symbol _init = Symbol.tag("init"); public final static Symbol _iter = Symbol.tag("iter"); public final static Symbol _label = Symbol.tag("label"); public final static Symbol _try = Symbol.tag("try"); public final static Symbol _catch = Symbol.tag("catch"); public final static Symbol _finally = Symbol.tag("finally"); public final static Symbol _left = Symbol.tag("left"); public final static Symbol _right = Symbol.tag("right"); public final static Symbol _recv = Symbol.tag("recv"); public final static Symbol _size = Symbol.tag("size"); public final static Symbol _prefix = Symbol.tag("prefix"); public final static Symbol _Name = Symbol.tag("Name"); public final static Symbol _ArrayName = Symbol.tag("ArrayName"); public final static Symbol _ArrayType = Symbol.tag("ArrayType"); public final static Symbol _GenericType = Symbol.tag("GenericType"); public final static Symbol _TypeOf = Symbol.tag("TypeOf"); public final static Symbol _NewArray2 = Symbol.tag("NewArray2"); public final static Symbol _Functor = Symbol.tag("Functor"); public final static Symbol _GetFreeVar = Symbol.tag("GetFreeVar"); public final static Symbol _SetFreeVar = Symbol.tag("SetFreeVar"); public final static Symbol _Const = Symbol.tag("Const"); public final static Symbol _NullCheck = Symbol.tag("NullCheck"); public final static Symbol _NonNullCheck = Symbol.tag("NonNullCheck"); public final static Symbol _Cast = Symbol.tag("Cast"); public final static Symbol _UpCast = Symbol.tag("UpCast"); public final static Symbol _DownCast = Symbol.tag("DownCast"); public final static Symbol _Error = Symbol.tag("Error"); // type // error public final static Symbol _Source = Symbol.tag("Source"); public final static Symbol _Modifiers = Symbol.tag("Modifiers"); public final static Symbol _Annotation = Symbol.tag("Annotation"); public final static Symbol _CommaList = Symbol.tag("CommaList"); public final static Symbol _KeyValue = Symbol.tag("KeyValue"); public final static Symbol _List = Symbol.tag("List"); public final static Symbol _Import = Symbol.tag("Import"); public final static Symbol _WildCardName = Symbol.tag("WildCardName"); public final static Symbol _ClassDecl = Symbol.tag("ClassDecl"); public final static Symbol _Block = Symbol.tag("Block"); public final static Symbol _InstanceInisializer = Symbol.tag("InstanceInisializer"); public final static Symbol _StaticInitializer = Symbol.tag("StaticInitializer"); public final static Symbol _Empty = Symbol.tag("Empty"); public final static Symbol _InterfaceDecl = Symbol.tag("InterfaceDecl"); public final static Symbol _EnumDecl = Symbol.tag("EnumDecl"); public final static Symbol _Enum = Symbol.tag("Enum"); public final static Symbol _VarDecl = Symbol.tag("VarDecl"); public final static Symbol _MultiVarDecl = Symbol.tag("MultiVarDecl"); public final static Symbol _ArrayDecl = Symbol.tag("ArrayDecl"); public final static Symbol _Array = Symbol.tag("Array"); public final static Symbol _FieldDecl = Symbol.tag("FieldDecl"); public final static Symbol _MethodDecl = Symbol.tag("MethodDecl"); public final static Symbol _Param = Symbol.tag("Param"); public final static Symbol _VarParam = Symbol.tag("VarParam"); public final static Symbol _Throws = Symbol.tag("Throws"); public final static Symbol _Constructor = Symbol.tag("Constructor"); public final static Symbol _ExplicitConstructorInvocation = Symbol.tag("ExplicitConstructorInvocation"); public final static Symbol _FuncDecl = Symbol.tag("FuncDecl"); public final static Symbol _Annotated = Symbol.tag("Annotated"); public final static Symbol _TypeBound = Symbol.tag("TypeBound"); public final static Symbol _TypeLists = Symbol.tag("TypeLists"); public final static Symbol _TypeArguments = Symbol.tag("TypeArguments"); public final static Symbol _TWildCard = Symbol.tag("TWildCard"); public final static Symbol _UpperBound = Symbol.tag("UpperBound"); public final static Symbol _LowerBound = Symbol.tag("LowerBound"); public final static Symbol _VoidType = Symbol.tag("VoidType"); public final static Symbol _Assert = Symbol.tag("Assert"); public final static Symbol _If = Symbol.tag("If"); public final static Symbol _While = Symbol.tag("While"); public final static Symbol _DoWhile = Symbol.tag("DoWhile"); public final static Symbol _For = Symbol.tag("For"); public final static Symbol _ForEach = Symbol.tag("ForEach"); public final static Symbol _Continue = Symbol.tag("Continue"); public final static Symbol _Break = Symbol.tag("Break"); public final static Symbol _Return = Symbol.tag("Return"); public final static Symbol _Throw = Symbol.tag("Throw"); public final static Symbol _Synchronized = Symbol.tag("Synchronized"); public final static Symbol _Label = Symbol.tag("Label"); public final static Symbol _Expression = Symbol.tag("Expression"); public final static Symbol _Try = Symbol.tag("Try"); public final static Symbol _Catch = Symbol.tag("Catch"); public final static Symbol _Switch = Symbol.tag("Switch"); public final static Symbol _SwitchCase = Symbol.tag("SwitchCase"); public final static Symbol _SwitchDefault = Symbol.tag("SwitchDefault"); public final static Symbol _Assign = Symbol.tag("Assign"); public final static Symbol _AssignMul = Symbol.tag("AssignMul"); public final static Symbol _AssignDiv = Symbol.tag("AssignDiv"); public final static Symbol _AssignMod = Symbol.tag("AssignMod"); public final static Symbol _AssignAdd = Symbol.tag("AssignAdd"); public final static Symbol _AssignSub = Symbol.tag("AssignSub"); public final static Symbol _AssignLeftShift = Symbol.tag("AssignLeftShift"); public final static Symbol _AssignRightShift = Symbol.tag("AssignRightShift"); public final static Symbol _AssignArithmeticRightShift = Symbol.tag("AssignArithmeticRightShift"); public final static Symbol _AssignLogicalRightShift = Symbol.tag("AssignLogicalRightShift"); public final static Symbol _AssignBitwiseAnd = Symbol.tag("AssignBitwiseAnd"); public final static Symbol _AssignBitwiseXOr = Symbol.tag("AssignBitwiseXOr"); public final static Symbol _AssignBitwiseOr = Symbol.tag("AssignBitwiseOr"); public final static Symbol _Conditional = Symbol.tag("Conditional"); public final static Symbol _Or = Symbol.tag("Or"); public final static Symbol _And = Symbol.tag("And"); public final static Symbol _BitwiseNot = Symbol.tag("BitwiseNot"); public final static Symbol _BitwiseOr = Symbol.tag("BitwiseOr"); public final static Symbol _BitwiseXor = Symbol.tag("BitwiseXor"); public final static Symbol _BitwiseAnd = Symbol.tag("BitwiseAnd"); public final static Symbol _LogicalNot = Symbol.tag("LogicalNot"); public final static Symbol _LogicalAnd = Symbol.tag("LogicalAnd"); public final static Symbol _LogicalOr = Symbol.tag("LogicalOr"); public final static Symbol _Equals = Symbol.tag("Equals"); public final static Symbol _NotEquals = Symbol.tag("NotEquals"); public final static Symbol _LessThanEquals = Symbol.tag("LessThanEquals"); public final static Symbol _GreaterThanEquals = Symbol.tag("GreaterThanEquals"); public final static Symbol _LessThan = Symbol.tag("LessThan"); public final static Symbol _GreaterThan = Symbol.tag("GreaterThan"); public final static Symbol _InstanceOf = Symbol.tag("InstanceOf"); public final static Symbol _LeftShift = Symbol.tag("LeftShift"); public final static Symbol _RightShift = Symbol.tag("RightShift"); public final static Symbol _ArithmeticRightShift = Symbol.tag("ArithmeticRightShift"); public final static Symbol _LogicalRightShift = Symbol.tag("LogicalRightShift"); public final static Symbol _Add = Symbol.tag("Add"); public final static Symbol _Sub = Symbol.tag("Sub"); public final static Symbol _Mul = Symbol.tag("Mul"); public final static Symbol _Div = Symbol.tag("Div"); public final static Symbol _Mod = Symbol.tag("Mod"); public final static Symbol _Inc = Symbol.tag("Inc"); public final static Symbol _Dec = Symbol.tag("Dec"); public final static Symbol _SuffixInc = Symbol.tag("SuffixInc"); public final static Symbol _SuffixDec = Symbol.tag("SuffixDec"); public final static Symbol _PreInc = Symbol.tag("PreInc"); public final static Symbol _PreDec = Symbol.tag("PreDec"); public final static Symbol _PrefixInc = Symbol.tag("PrefixInc"); public final static Symbol _PrefixDec = Symbol.tag("PrefixDec"); public final static Symbol _Plus = Symbol.tag("Plus"); public final static Symbol _Minus = Symbol.tag("Minus"); public final static Symbol _Compl = Symbol.tag("Compl"); public final static Symbol _Not = Symbol.tag("Not"); public final static Symbol _MethodApply = Symbol.tag("MethodApply"); public final static Symbol _Indexer = Symbol.tag("Indexer"); public final static Symbol _Field = Symbol.tag("Field"); public final static Symbol _This = Symbol.tag("This"); public final static Symbol _Super = Symbol.tag("Super"); public final static Symbol _Final = Symbol.tag("Final"); public final static Symbol _Apply = Symbol.tag("Apply"); public final static Symbol _New = Symbol.tag("New"); public final static Symbol _NewArray = Symbol.tag("NewArray"); public final static Symbol _Lambda = Symbol.tag("Lambda"); public final static Symbol _AddArgumentExpressionList = Symbol.tag("AddArgumentExpressionList"); public final static Symbol _LambdaParameters = Symbol.tag("LambdaParameters"); public final static Symbol _InferredFormalParameterList = Symbol.tag("InferredFormalParameterList"); public final static Symbol _QualifiedName = Symbol.tag("QualifiedName"); public final static Symbol _Null = Symbol.tag("Null"); public final static Symbol _True = Symbol.tag("True"); public final static Symbol _False = Symbol.tag("False"); public final static Symbol _Long = Symbol.tag("Long"); public final static Symbol _Float = Symbol.tag("Float"); public final static Symbol _Integer = Symbol.tag("Integer"); public final static Symbol _Double = Symbol.tag("Double"); public final static Symbol _String = Symbol.tag("String"); public final static Symbol _Character = Symbol.tag("Character"); public final static Symbol _PackageDeclaration = Symbol.tag("PackageDeclaration"); public final static Symbol _ImportDeclaration = Symbol.tag("ImportDeclaration"); public final static Symbol _StaticImportDeclaration = Symbol.tag("StaticImportDeclaration"); public final static Symbol _VarName = Symbol.tag("VarName"); public final static Symbol _Tvoid = Symbol.tag("Tvoid"); public final static Symbol _Tbyte = Symbol.tag("Tbyte"); public final static Symbol _Tshort = Symbol.tag("Tshort"); public final static Symbol _Tint = Symbol.tag("Tint"); public final static Symbol _Tlong = Symbol.tag("Tlong"); public final static Symbol _Tfloat = Symbol.tag("Tfloat"); public final static Symbol _Tdouble = Symbol.tag("Tdouble"); public final static Symbol _Tarray = Symbol.tag("Tarray"); public final static Symbol _Comma = Symbol.tag("Comma"); public final static Symbol _HashIn = Symbol.tag("HashIn"); public final static Symbol _Object = Symbol.tag("Object"); public final static Symbol _Property = Symbol.tag("Property"); }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2022 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.model.impl.jdbc.exec; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.DBConstants; import org.jkiss.dbeaver.model.DBUtils; import org.jkiss.dbeaver.model.DBValueFormatting; import org.jkiss.dbeaver.model.data.DBDDataFormatter; import org.jkiss.dbeaver.model.data.DBDDataFormatterProfile; import org.jkiss.dbeaver.model.data.DBDDisplayFormat; import org.jkiss.dbeaver.model.exec.DBCException; import org.jkiss.dbeaver.model.exec.jdbc.JDBCPreparedStatement; import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet; import org.jkiss.dbeaver.model.exec.jdbc.JDBCSession; import org.jkiss.dbeaver.model.qm.QMUtils; import org.jkiss.dbeaver.model.sql.DBSQLException; import org.jkiss.dbeaver.model.sql.SQLUtils; import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; import java.net.URL; import java.sql.*; import java.util.Calendar; import java.util.LinkedHashMap; import java.util.Map; /** * Manageable prepared statement. * Stores information about execution in query manager and operated progress monitor. */ public class JDBCPreparedStatementImpl extends JDBCStatementImpl<PreparedStatement> implements JDBCPreparedStatement { private static final Log log = Log.getLog(JDBCPreparedStatementImpl.class); private static final Object NULL_VALUE = new Object(); private Map<Object, Object> paramMap; protected static class ContentParameter { String displayString; ContentParameter(JDBCSession session, Object value) { if (value instanceof RowId) { displayString = SQLUtils.quoteString(session.getDataSource(), new String(((RowId) value).getBytes())); } else if (value instanceof byte[]) { byte[] bytes = (byte[])value; displayString = DBValueFormatting.formatBinaryString(session.getDataSource(), bytes, DBDDisplayFormat.NATIVE, true); } else { displayString = "DATA(" + (value == null ? DBConstants.NULL_VALUE_LABEL : value.getClass().getSimpleName()) + ")"; } } @Override public String toString() { return displayString; } } JDBCPreparedStatementImpl( @NotNull JDBCSession connection, @NotNull PreparedStatement original, String query, boolean disableLogging) { super(connection, original, disableLogging); setQueryString(query); } @Override public PreparedStatement getOriginal() { return original; } @Override public void close() { if (paramMap != null) { paramMap.clear(); paramMap = null; } super.close(); } public String getFormattedQuery() { if (paramMap == null) { return getQueryString(); } else { String q = getQueryString(); if (q == null) { return ""; } int length = q.length(); StringBuilder formatted = new StringBuilder(length * 2); int paramIndex = 0; for (int i = 0; i < length; i++) { char c = q.charAt(i); switch (c) { case '?': { paramIndex++; Object paramValue = paramMap.get(paramIndex); if (paramValue != null) { formatted.append(formatParameterValue(paramValue)); continue; } break; } case ':': { // FIXME: process named parameters break; } case '\'': case '"': { formatted.append(c); for (int k = i + 1; k < length; k++) { char c2 = q.charAt(k); if (c2 == c && q.charAt(k - 1) != '\\') { i = k; c = c2; break; } else { formatted.append(c2); } } break; } } formatted.append(c); } return formatted.toString(); } } @NotNull private String formatParameterValue(Object value) { if (value instanceof CharSequence) { return SQLUtils.quoteString(connection.getDataSource(), value.toString()); } else if (value instanceof Number) { return DBValueFormatting.convertNumberToNativeString((Number) value, false); } else if (value instanceof java.util.Date) { try { DBDDataFormatterProfile formatterProfile = getSession().getDataFormatterProfile(); if (value instanceof Date) { return SQLUtils.quoteString(connection.getDataSource(), formatterProfile.createFormatter(DBDDataFormatter.TYPE_NAME_DATE, null).formatValue(value)); } else if (value instanceof Time) { return SQLUtils.quoteString(connection.getDataSource(), formatterProfile.createFormatter(DBDDataFormatter.TYPE_NAME_TIME, null).formatValue(value)); } else { return SQLUtils.quoteString(connection.getDataSource(), formatterProfile.createFormatter(DBDDataFormatter.TYPE_NAME_TIMESTAMP_TZ, null).formatValue(value)); } } catch (Exception e) { log.debug("Error formatting date [" + value + "]", e); } } else if (value == NULL_VALUE) { return "NULL"; } return value.toString(); } protected void handleStatementBind(Object parameter, @Nullable Object o) { if (isQMLoggingEnabled()) { // Save parameters if (o == null) { o = NULL_VALUE; } else if (!DBUtils.isAtomicParameter(o)) { // Wrap complex things o = new ContentParameter(connection, o); } if (paramMap == null) { paramMap = new LinkedHashMap<>(); } paramMap.put(parameter, o); if (getSession().isLoggingEnabled()) { QMUtils.getDefaultHandler().handleStatementBind(this, parameter, o); } } } //////////////////////////////////////////////////////////////////// // DBC Statement overrides //////////////////////////////////////////////////////////////////// @Override public boolean executeStatement() throws DBCException { try { return execute(); } catch (SQLException e) { throw new DBSQLException(query, e, connection.getExecutionContext()); } } @Override public void addToBatch() throws DBCException { try { addBatch(); } catch (SQLException e) { throw new DBCException(e, connection.getExecutionContext()); } } //////////////////////////////////////////////////////////////////// // Statement overrides //////////////////////////////////////////////////////////////////// @Override public JDBCResultSet executeQuery() throws SQLException { this.beforeExecute(); try { return createResultSetImpl(getOriginal().executeQuery()); } catch (Throwable e) { throw super.handleExecuteError(e); } finally { super.afterExecute(); } } @Override public int executeUpdate() throws SQLException { this.beforeExecute(); try { return getOriginal().executeUpdate(); } catch (Throwable e) { throw super.handleExecuteError(e); } finally { super.afterExecute(); } } @Override public boolean execute() throws SQLException { this.beforeExecute(); try { return getOriginal().execute(); } catch (Throwable e) { throw super.handleExecuteError(e); } finally { super.afterExecute(); } } @Override public void setNull(int parameterIndex, int sqlType) throws SQLException { getOriginal().setNull(parameterIndex, sqlType); handleStatementBind(parameterIndex, null); } @Override public void setBoolean(int parameterIndex, boolean x) throws SQLException { getOriginal().setBoolean(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setByte(int parameterIndex, byte x) throws SQLException { getOriginal().setByte(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setShort(int parameterIndex, short x) throws SQLException { getOriginal().setShort(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setInt(int parameterIndex, int x) throws SQLException { getOriginal().setInt(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setLong(int parameterIndex, long x) throws SQLException { getOriginal().setLong(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setFloat(int parameterIndex, float x) throws SQLException { getOriginal().setFloat(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setDouble(int parameterIndex, double x) throws SQLException { getOriginal().setDouble(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { getOriginal().setBigDecimal(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setString(int parameterIndex, String x) throws SQLException { getOriginal().setString(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setBytes(int parameterIndex, byte[] x) throws SQLException { getOriginal().setBytes(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setDate(int parameterIndex, Date x) throws SQLException { getOriginal().setDate(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setTime(int parameterIndex, Time x) throws SQLException { getOriginal().setTime(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setTimestamp( int parameterIndex, Timestamp x) throws SQLException { getOriginal().setTimestamp(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException { getOriginal().setAsciiStream(parameterIndex, x, length); handleStatementBind(parameterIndex, x); } @Override @Deprecated public void setUnicodeStream(int parameterIndex, InputStream x, int length) throws SQLException { getOriginal().setUnicodeStream(parameterIndex, x, length); handleStatementBind(parameterIndex, x); } @Override public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException { getOriginal().setBinaryStream(parameterIndex, x, length); handleStatementBind(parameterIndex, x); } @Override public void clearParameters() throws SQLException { getOriginal().clearParameters(); } @Override public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException { getOriginal().setObject(parameterIndex, x, targetSqlType); handleStatementBind(parameterIndex, x); } @Override public void setObject(int parameterIndex, Object x) throws SQLException { getOriginal().setObject(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void addBatch() throws SQLException { getOriginal().addBatch(); } @Override public void setCharacterStream(int parameterIndex, Reader reader, int length) throws SQLException { getOriginal().setCharacterStream(parameterIndex, reader, length); handleStatementBind(parameterIndex, reader); } @Override public void setRef(int parameterIndex, Ref x) throws SQLException { getOriginal().setRef(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setBlob(int parameterIndex, Blob x) throws SQLException { getOriginal().setBlob(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setClob(int parameterIndex, Clob x) throws SQLException { getOriginal().setClob(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setArray(int parameterIndex, Array x) throws SQLException { getOriginal().setArray(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public ResultSetMetaData getMetaData() throws SQLException { return getOriginal().getMetaData(); } @Override public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { getOriginal().setDate(parameterIndex, x, cal); handleStatementBind(parameterIndex, x); } @Override public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { getOriginal().setTime(parameterIndex, x, cal); handleStatementBind(parameterIndex, x); } @Override public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { getOriginal().setTimestamp(parameterIndex, x, cal); handleStatementBind(parameterIndex, x); } @Override public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException { getOriginal().setNull(parameterIndex, sqlType, typeName); handleStatementBind(parameterIndex, null); } @Override public void setURL(int parameterIndex, URL x) throws SQLException { getOriginal().setURL(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public ParameterMetaData getParameterMetaData() throws SQLException { return getOriginal().getParameterMetaData(); } @Override public void setRowId(int parameterIndex, RowId x) throws SQLException { getOriginal().setRowId(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setNString(int parameterIndex, String x) throws SQLException { getOriginal().setNString(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setNCharacterStream(int parameterIndex, Reader x, long length) throws SQLException { getOriginal().setNCharacterStream(parameterIndex, x, length); handleStatementBind(parameterIndex, x); } @Override public void setNClob(int parameterIndex, NClob x) throws SQLException { getOriginal().setNClob(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setClob(int parameterIndex, Reader x, long length) throws SQLException { getOriginal().setClob(parameterIndex, x, length); handleStatementBind(parameterIndex, x); } @Override public void setBlob(int parameterIndex, InputStream x, long length) throws SQLException { getOriginal().setBlob(parameterIndex, x, length); handleStatementBind(parameterIndex, x); } @Override public void setNClob(int parameterIndex, Reader x, long length) throws SQLException { getOriginal().setNClob(parameterIndex, x, length); handleStatementBind(parameterIndex, x); } @Override public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException { getOriginal().setSQLXML(parameterIndex, xmlObject); handleStatementBind(parameterIndex, xmlObject); } @Override public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { getOriginal().setObject(parameterIndex, x, targetSqlType, scaleOrLength); handleStatementBind(parameterIndex, x); } @Override public void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException { getOriginal().setAsciiStream(parameterIndex, x, length); handleStatementBind(parameterIndex, x); } @Override public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException { getOriginal().setBinaryStream(parameterIndex, x, length); handleStatementBind(parameterIndex, x); } @Override public void setCharacterStream(int parameterIndex, Reader x, long length) throws SQLException { getOriginal().setCharacterStream(parameterIndex, x, length); handleStatementBind(parameterIndex, x); } @Override public void setAsciiStream(int parameterIndex, InputStream x) throws SQLException { getOriginal().setAsciiStream(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException { getOriginal().setBinaryStream(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setCharacterStream(int parameterIndex, Reader x) throws SQLException { getOriginal().setCharacterStream(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setNCharacterStream(int parameterIndex, Reader x) throws SQLException { getOriginal().setNCharacterStream(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setClob(int parameterIndex, Reader x) throws SQLException { getOriginal().setClob(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setBlob(int parameterIndex, InputStream x) throws SQLException { getOriginal().setBlob(parameterIndex, x); handleStatementBind(parameterIndex, x); } @Override public void setNClob(int parameterIndex, Reader x) throws SQLException { getOriginal().setNClob(parameterIndex, x); handleStatementBind(parameterIndex, x); } }
package org.ethereum.db; import org.ethereum.config.SystemProperties; import org.ethereum.core.Genesis; import org.ethereum.crypto.HashUtil; import org.ethereum.datasource.HashMapDB; import org.ethereum.datasource.LevelDbDataSource; import org.ethereum.facade.Repository; import org.ethereum.vm.DataWord; import org.junit.Assert; import org.junit.FixMethodOrder; import org.junit.Test; import org.junit.runners.MethodSorters; import org.spongycastle.util.encoders.Hex; import java.math.BigInteger; import static org.junit.Assert.*; /** * @author Roman Mandeleil * @since 17.11.2014 */ @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class RepositoryTest { @Test public void test1() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); repository.increaseNonce(cow); repository.increaseNonce(horse); assertEquals(BigInteger.ONE, repository.getNonce(cow)); assertEquals(BigInteger.ONE, repository.getNonce(horse)); repository.close(); } @Test public void test2() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); repository.addBalance(cow, BigInteger.TEN); repository.addBalance(horse, BigInteger.ONE); assertEquals(BigInteger.TEN, repository.getBalance(cow)); assertEquals(BigInteger.ONE, repository.getBalance(horse)); repository.close(); } @Test public void test3() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); byte[] cowCode = Hex.decode("A1A2A3"); byte[] horseCode = Hex.decode("B1B2B3"); repository.saveCode(cow, cowCode); repository.saveCode(horse, horseCode); assertArrayEquals(cowCode, repository.getCode(cow)); assertArrayEquals(horseCode, repository.getCode(horse)); repository.close(); } @Test public void test4() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); byte[] cowKey = Hex.decode("A1A2A3"); byte[] cowValue = Hex.decode("A4A5A6"); byte[] horseKey = Hex.decode("B1B2B3"); byte[] horseValue = Hex.decode("B4B5B6"); repository.addStorageRow(cow, new DataWord(cowKey), new DataWord(cowValue)); repository.addStorageRow(horse, new DataWord(horseKey), new DataWord(horseValue)); assertEquals(new DataWord(cowValue), repository.getStorageValue(cow, new DataWord(cowKey))); assertEquals(new DataWord(horseValue), repository.getStorageValue(horse, new DataWord(horseKey))); repository.close(); } @Test public void test5() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); Repository track = repository.startTracking(); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(horse); track.commit(); assertEquals(BigInteger.TEN, repository.getNonce(cow)); assertEquals(BigInteger.ONE, repository.getNonce(horse)); repository.close(); } @Test public void test6() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); Repository track = repository.startTracking(); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(cow); track.increaseNonce(horse); assertEquals(BigInteger.TEN, track.getNonce(cow)); assertEquals(BigInteger.ONE, track.getNonce(horse)); track.rollback(); assertEquals(BigInteger.ZERO, repository.getNonce(cow)); assertEquals(BigInteger.ZERO, repository.getNonce(horse)); repository.close(); } @Test public void test7() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); Repository track = repository.startTracking(); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); track.addBalance(cow, BigInteger.TEN); track.addBalance(horse, BigInteger.ONE); assertEquals(BigInteger.TEN, track.getBalance(cow)); assertEquals(BigInteger.ONE, track.getBalance(horse)); track.commit(); assertEquals(BigInteger.TEN, repository.getBalance(cow)); assertEquals(BigInteger.ONE, repository.getBalance(horse)); repository.close(); } @Test public void test8() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); Repository track = repository.startTracking(); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); track.addBalance(cow, BigInteger.TEN); track.addBalance(horse, BigInteger.ONE); assertEquals(BigInteger.TEN, track.getBalance(cow)); assertEquals(BigInteger.ONE, track.getBalance(horse)); track.rollback(); assertEquals(BigInteger.ZERO, repository.getBalance(cow)); assertEquals(BigInteger.ZERO, repository.getBalance(horse)); repository.close(); } @Test public void test7_1() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); Repository track1 = repository.startTracking(); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); track1.addBalance(cow, BigInteger.TEN); track1.addBalance(horse, BigInteger.ONE); assertEquals(BigInteger.TEN, track1.getBalance(cow)); assertEquals(BigInteger.ONE, track1.getBalance(horse)); Repository track2 = track1.startTracking(); assertEquals(BigInteger.TEN, track2.getBalance(cow)); assertEquals(BigInteger.ONE, track2.getBalance(horse)); track2.addBalance(cow, BigInteger.TEN); track2.addBalance(cow, BigInteger.TEN); track2.addBalance(cow, BigInteger.TEN); track2.commit(); track1.commit(); assertEquals(new BigInteger("40"), repository.getBalance(cow)); assertEquals(BigInteger.ONE, repository.getBalance(horse)); repository.close(); } @Test public void test7_2() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); Repository track1 = repository.startTracking(); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); track1.addBalance(cow, BigInteger.TEN); track1.addBalance(horse, BigInteger.ONE); assertEquals(BigInteger.TEN, track1.getBalance(cow)); assertEquals(BigInteger.ONE, track1.getBalance(horse)); Repository track2 = track1.startTracking(); assertEquals(BigInteger.TEN, track2.getBalance(cow)); assertEquals(BigInteger.ONE, track2.getBalance(horse)); track2.addBalance(cow, BigInteger.TEN); track2.addBalance(cow, BigInteger.TEN); track2.addBalance(cow, BigInteger.TEN); track2.commit(); track1.rollback(); assertEquals(BigInteger.ZERO, repository.getBalance(cow)); assertEquals(BigInteger.ZERO, repository.getBalance(horse)); repository.close(); } @Test public void test9() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); Repository track = repository.startTracking(); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); DataWord cowKey = new DataWord(Hex.decode("A1A2A3")); DataWord cowValue = new DataWord(Hex.decode("A4A5A6")); DataWord horseKey = new DataWord(Hex.decode("B1B2B3")); DataWord horseValue = new DataWord(Hex.decode("B4B5B6")); track.addStorageRow(cow, cowKey, cowValue); track.addStorageRow(horse, horseKey, horseValue); assertEquals(cowValue, track.getStorageValue(cow, cowKey)); assertEquals(horseValue, track.getStorageValue(horse, horseKey)); track.commit(); assertEquals(cowValue, repository.getStorageValue(cow, cowKey)); assertEquals(horseValue, repository.getStorageValue(horse, horseKey)); repository.close(); } @Test public void test10() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); Repository track = repository.startTracking(); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); DataWord cowKey = new DataWord(Hex.decode("A1A2A3")); DataWord cowValue = new DataWord(Hex.decode("A4A5A6")); DataWord horseKey = new DataWord(Hex.decode("B1B2B3")); DataWord horseValue = new DataWord(Hex.decode("B4B5B6")); track.addStorageRow(cow, cowKey, cowValue); track.addStorageRow(horse, horseKey, horseValue); assertEquals(cowValue, track.getStorageValue(cow, cowKey)); assertEquals(horseValue, track.getStorageValue(horse, horseKey)); track.rollback(); assertEquals(null, repository.getStorageValue(cow, cowKey)); assertEquals(null, repository.getStorageValue(horse, horseKey)); repository.close(); } @Test public void test11() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); Repository track = repository.startTracking(); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); byte[] cowCode = Hex.decode("A1A2A3"); byte[] horseCode = Hex.decode("B1B2B3"); track.saveCode(cow, cowCode); track.saveCode(horse, horseCode); assertArrayEquals(cowCode, track.getCode(cow)); assertArrayEquals(horseCode, track.getCode(horse)); track.commit(); assertArrayEquals(cowCode, repository.getCode(cow)); assertArrayEquals(horseCode, repository.getCode(horse)); repository.close(); } @Test public void test12() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); Repository track = repository.startTracking(); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); byte[] cowCode = Hex.decode("A1A2A3"); byte[] horseCode = Hex.decode("B1B2B3"); track.saveCode(cow, cowCode); track.saveCode(horse, horseCode); assertArrayEquals(cowCode, track.getCode(cow)); assertArrayEquals(horseCode, track.getCode(horse)); track.rollback(); assertArrayEquals(null, repository.getCode(cow)); assertArrayEquals(null, repository.getCode(horse)); repository.close(); } @Test // Let's upload genesis pre-mine just like in the real world public void test13() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); Repository track = repository.startTracking(); Genesis genesis = (Genesis)Genesis.getInstance(); for (ByteArrayWrapper key : genesis.getPremine().keySet()) { repository.createAccount(key.getData()); repository.addBalance(key.getData(), genesis.getPremine().get(key).getBalance()); } track.commit(); assertArrayEquals(Genesis.getInstance().getStateRoot(), repository.getRoot()); repository.close(); } @Test public void test14() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); final BigInteger ELEVEN = BigInteger.TEN.add(BigInteger.ONE); // changes level_1 Repository track1 = repository.startTracking(); track1.addBalance(cow, BigInteger.TEN); track1.addBalance(horse, BigInteger.ONE); assertEquals(BigInteger.TEN, track1.getBalance(cow)); assertEquals(BigInteger.ONE, track1.getBalance(horse)); // changes level_2 Repository track2 = track1.startTracking(); track2.addBalance(cow, BigInteger.ONE); track2.addBalance(horse, BigInteger.TEN); assertEquals(ELEVEN, track2.getBalance(cow)); assertEquals(ELEVEN, track2.getBalance(horse)); track2.commit(); track1.commit(); assertEquals(ELEVEN, repository.getBalance(cow)); assertEquals(ELEVEN, repository.getBalance(horse)); repository.close(); } @Test public void test15() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); final BigInteger ELEVEN = BigInteger.TEN.add(BigInteger.ONE); // changes level_1 Repository track1 = repository.startTracking(); track1.addBalance(cow, BigInteger.TEN); track1.addBalance(horse, BigInteger.ONE); assertEquals(BigInteger.TEN, track1.getBalance(cow)); assertEquals(BigInteger.ONE, track1.getBalance(horse)); // changes level_2 Repository track2 = track1.startTracking(); track2.addBalance(cow, BigInteger.ONE); track2.addBalance(horse, BigInteger.TEN); assertEquals(ELEVEN, track2.getBalance(cow)); assertEquals(ELEVEN, track2.getBalance(horse)); track2.rollback(); track1.commit(); assertEquals(BigInteger.TEN, repository.getBalance(cow)); assertEquals(BigInteger.ONE, repository.getBalance(horse)); repository.close(); } @Test public void test16() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); byte[] cowKey1 = "key-c-1".getBytes(); byte[] cowValue1 = "val-c-1".getBytes(); byte[] horseKey1 = "key-h-1".getBytes(); byte[] horseValue1 = "val-h-1".getBytes(); byte[] cowKey2 = "key-c-2".getBytes(); byte[] cowValue2 = "val-c-2".getBytes(); byte[] horseKey2 = "key-h-2".getBytes(); byte[] horseValue2 = "val-h-2".getBytes(); // changes level_1 Repository track1 = repository.startTracking(); track1.addStorageRow(cow, new DataWord(cowKey1), new DataWord(cowValue1)); track1.addStorageRow(horse, new DataWord(horseKey1), new DataWord(horseValue1)); assertEquals(new DataWord(cowValue1), track1.getStorageValue(cow, new DataWord(cowKey1))); assertEquals(new DataWord(horseValue1), track1.getStorageValue(horse, new DataWord(horseKey1))); // changes level_2 Repository track2 = track1.startTracking(); track2.addStorageRow(cow, new DataWord(cowKey2), new DataWord(cowValue2)); track2.addStorageRow(horse, new DataWord(horseKey2), new DataWord(horseValue2)); assertEquals(new DataWord(cowValue1), track2.getStorageValue(cow, new DataWord(cowKey1))); assertEquals(new DataWord(horseValue1), track2.getStorageValue(horse, new DataWord(horseKey1))); assertEquals(new DataWord(cowValue2), track2.getStorageValue(cow, new DataWord(cowKey2))); assertEquals(new DataWord(horseValue2), track2.getStorageValue(horse, new DataWord(horseKey2))); track2.commit(); // leaving level_2 assertEquals(new DataWord(cowValue1), track1.getStorageValue(cow, new DataWord(cowKey1))); assertEquals(new DataWord(horseValue1), track1.getStorageValue(horse, new DataWord(horseKey1))); assertEquals(new DataWord(cowValue2), track1.getStorageValue(cow, new DataWord(cowKey2))); assertEquals(new DataWord(horseValue2), track1.getStorageValue(horse, new DataWord(horseKey2))); track1.commit(); // leaving level_1 assertEquals(new DataWord(cowValue1), repository.getStorageValue(cow, new DataWord(cowKey1))); assertEquals(new DataWord(horseValue1), repository.getStorageValue(horse, new DataWord(horseKey1))); assertEquals(new DataWord(cowValue2), repository.getStorageValue(cow, new DataWord(cowKey2))); assertEquals(new DataWord(horseValue2), repository.getStorageValue(horse, new DataWord(horseKey2))); repository.close(); } @Test public void test16_2() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); byte[] cowKey1 = "key-c-1".getBytes(); byte[] cowValue1 = "val-c-1".getBytes(); byte[] horseKey1 = "key-h-1".getBytes(); byte[] horseValue1 = "val-h-1".getBytes(); byte[] cowKey2 = "key-c-2".getBytes(); byte[] cowValue2 = "val-c-2".getBytes(); byte[] horseKey2 = "key-h-2".getBytes(); byte[] horseValue2 = "val-h-2".getBytes(); // changes level_1 Repository track1 = repository.startTracking(); // changes level_2 Repository track2 = track1.startTracking(); track2.addStorageRow(cow, new DataWord(cowKey2), new DataWord(cowValue2)); track2.addStorageRow(horse, new DataWord(horseKey2), new DataWord(horseValue2)); assertNull(track2.getStorageValue(cow, new DataWord(cowKey1))); assertNull(track2.getStorageValue(horse, new DataWord(horseKey1))); assertEquals(new DataWord(cowValue2), track2.getStorageValue(cow, new DataWord(cowKey2))); assertEquals(new DataWord(horseValue2), track2.getStorageValue(horse, new DataWord(horseKey2))); track2.commit(); // leaving level_2 assertNull(track1.getStorageValue(cow, new DataWord(cowKey1))); assertNull(track1.getStorageValue(horse, new DataWord(horseKey1))); assertEquals(new DataWord(cowValue2), track1.getStorageValue(cow, new DataWord(cowKey2))); assertEquals(new DataWord(horseValue2), track1.getStorageValue(horse, new DataWord(horseKey2))); track1.commit(); // leaving level_1 assertEquals(null, repository.getStorageValue(cow, new DataWord(cowKey1))); assertEquals(null, repository.getStorageValue(horse, new DataWord(horseKey1))); assertEquals(new DataWord(cowValue2), repository.getStorageValue(cow, new DataWord(cowKey2))); assertEquals(new DataWord(horseValue2), repository.getStorageValue(horse, new DataWord(horseKey2))); repository.close(); } @Test public void test16_3() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); byte[] cowKey1 = "key-c-1".getBytes(); byte[] cowValue1 = "val-c-1".getBytes(); byte[] horseKey1 = "key-h-1".getBytes(); byte[] horseValue1 = "val-h-1".getBytes(); byte[] cowKey2 = "key-c-2".getBytes(); byte[] cowValue2 = "val-c-2".getBytes(); byte[] horseKey2 = "key-h-2".getBytes(); byte[] horseValue2 = "val-h-2".getBytes(); // changes level_1 Repository track1 = repository.startTracking(); // changes level_2 Repository track2 = track1.startTracking(); track2.addStorageRow(cow, new DataWord(cowKey2), new DataWord(cowValue2)); track2.addStorageRow(horse, new DataWord(horseKey2), new DataWord(horseValue2)); assertNull(track2.getStorageValue(cow, new DataWord(cowKey1))); assertNull(track2.getStorageValue(horse, new DataWord(horseKey1))); assertEquals(new DataWord(cowValue2), track2.getStorageValue(cow, new DataWord(cowKey2))); assertEquals(new DataWord(horseValue2), track2.getStorageValue(horse, new DataWord(horseKey2))); track2.commit(); // leaving level_2 assertNull(track1.getStorageValue(cow, new DataWord(cowKey1))); assertNull(track1.getStorageValue(horse, new DataWord(horseKey1))); assertEquals(new DataWord(cowValue2), track1.getStorageValue(cow, new DataWord(cowKey2))); assertEquals(new DataWord(horseValue2), track1.getStorageValue(horse, new DataWord(horseKey2))); track1.rollback(); // leaving level_1 assertNull(track1.getStorageValue(cow, new DataWord(cowKey1))); assertNull(track1.getStorageValue(horse, new DataWord(horseKey1))); assertNull(track1.getStorageValue(cow, new DataWord(cowKey2))); assertNull(track1.getStorageValue(horse, new DataWord(horseKey2))); repository.close(); } @Test public void test16_4() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); byte[] cowKey1 = "key-c-1".getBytes(); byte[] cowValue1 = "val-c-1".getBytes(); byte[] horseKey1 = "key-h-1".getBytes(); byte[] horseValue1 = "val-h-1".getBytes(); byte[] cowKey2 = "key-c-2".getBytes(); byte[] cowValue2 = "val-c-2".getBytes(); byte[] horseKey2 = "key-h-2".getBytes(); byte[] horseValue2 = "val-h-2".getBytes(); repository.addStorageRow(cow, new DataWord(cowKey1), new DataWord(cowValue1)); // changes level_1 Repository track1 = repository.startTracking(); // changes level_2 Repository track2 = track1.startTracking(); track2.addStorageRow(cow, new DataWord(cowKey2), new DataWord(cowValue2)); track2.commit(); // leaving level_2 track1.commit(); // leaving level_1 assertEquals(new DataWord(cowValue1), track1.getStorageValue(cow, new DataWord(cowKey1))); assertEquals(new DataWord(cowValue2), track1.getStorageValue(cow, new DataWord(cowKey2))); repository.close(); } @Test public void test16_5() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); byte[] cowKey1 = "key-c-1".getBytes(); byte[] cowValue1 = "val-c-1".getBytes(); byte[] horseKey1 = "key-h-1".getBytes(); byte[] horseValue1 = "val-h-1".getBytes(); byte[] cowKey2 = "key-c-2".getBytes(); byte[] cowValue2 = "val-c-2".getBytes(); byte[] horseKey2 = "key-h-2".getBytes(); byte[] horseValue2 = "val-h-2".getBytes(); // changes level_1 Repository track1 = repository.startTracking(); track1.addStorageRow(cow, new DataWord(cowKey2), new DataWord(cowValue2)); // changes level_2 Repository track2 = track1.startTracking(); assertEquals(new DataWord(cowValue2), track1.getStorageValue(cow, new DataWord(cowKey2))); assertNull(track1.getStorageValue(cow, new DataWord(cowKey1))); track2.commit(); // leaving level_2 track1.commit(); // leaving level_1 assertEquals(new DataWord(cowValue2), track1.getStorageValue(cow, new DataWord(cowKey2))); assertNull(track1.getStorageValue(cow, new DataWord(cowKey1))); repository.close(); } @Test public void test17() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] cowKey1 = "key-c-1".getBytes(); byte[] cowValue1 = "val-c-1".getBytes(); // changes level_1 Repository track1 = repository.startTracking(); // changes level_2 Repository track2 = track1.startTracking(); track2.addStorageRow(cow, new DataWord(cowKey1), new DataWord(cowValue1)); assertEquals(new DataWord(cowValue1), track2.getStorageValue(cow, new DataWord(cowKey1))); track2.rollback(); // leaving level_2 track1.commit(); // leaving level_1 Assert.assertEquals(Hex.toHexString(HashUtil.EMPTY_TRIE_HASH), Hex.toHexString(repository.getRoot())); repository.close(); } @Test public void test18() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); Repository repoTrack2 = repository.startTracking(); //track byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); byte[] pig = Hex.decode("F0B8C9D84DD2B877E0B952130B73E218106FEC04"); byte[] precompiled = Hex.decode("0000000000000000000000000000000000000002"); byte[] cowCode = Hex.decode("A1A2A3"); byte[] horseCode = Hex.decode("B1B2B3"); repository.saveCode(cow, cowCode); repository.saveCode(horse, horseCode); repository.delete(horse); assertEquals(true, repoTrack2.isExist(cow)); assertEquals(false, repoTrack2.isExist(horse)); assertEquals(false, repoTrack2.isExist(pig)); assertEquals(false, repoTrack2.isExist(precompiled)); } @Test public void test19() { Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB()); byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826"); byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F"); DataWord cowKey1 = new DataWord("c1"); DataWord cowVal1 = new DataWord("c0a1"); DataWord cowVal0 = new DataWord("c0a0"); DataWord horseKey1 = new DataWord("e1"); DataWord horseVal1 = new DataWord("c0a1"); DataWord horseVal0 = new DataWord("c0a0"); repository.addStorageRow(cow, cowKey1, cowVal0); repository.addStorageRow(horse, horseKey1, horseVal0); Repository track2 = repository.startTracking(); //track track2.addStorageRow(horse, horseKey1, horseVal0); Repository track3 = track2.startTracking(); ContractDetails cowDetails = track3.getContractDetails(cow); cowDetails.put(cowKey1, cowVal1); ContractDetails horseDetails = track3.getContractDetails(horse); horseDetails.put(horseKey1, horseVal1); track3.commit(); track2.rollback(); ContractDetails cowDetailsOrigin = repository.getContractDetails(cow); DataWord cowValOrin = cowDetailsOrigin.get(cowKey1); ContractDetails horseDetailsOrigin = repository.getContractDetails(horse); DataWord horseValOrin = horseDetailsOrigin.get(horseKey1); assertEquals(cowVal0, cowValOrin); assertEquals(horseVal0, horseValOrin); } }
package chiptools.jprobe.function.mutationprofiler; import java.io.File; import java.io.FileInputStream; import java.util.ArrayList; import java.util.Collection; import java.util.List; import jprobe.services.ErrorHandler; import jprobe.services.Log; import jprobe.services.data.Data; import jprobe.services.function.Argument; import util.genome.kmer.Kmer; import util.genome.kmer.Kmers; import util.progress.ProgressEvent; import util.progress.ProgressListener; import util.progress.ProgressEvent.Type; import chiptools.jprobe.ChiptoolsActivator; import chiptools.jprobe.data.MutationProfile; import chiptools.jprobe.function.AbstractChiptoolsFunction; public class MutationProfiler extends AbstractChiptoolsFunction<MutationProfilerParams>{ public MutationProfiler() { super(MutationProfilerParams.class); } @Override public Collection<Argument<? super MutationProfilerParams>> getArguments() { Collection<Argument<? super MutationProfilerParams>> args = new ArrayList<Argument<? super MutationProfilerParams>>(); args.add(new CompareSeqsArg(this, 2, false)); args.add(new KmerLibraryArg(this, false)); args.add(new MinEscoreArg(this, true)); args.add(new MinDifferenceArg(this, true)); args.add(new BindingSiteArg(this, true)); args.add(new RecursiveArg(this)); return args; } protected void extractChildrenFiles(File dir, List<File> fill, boolean recursive){ if(dir.isDirectory()){ for(File child : dir.listFiles()){ if(recursive && child.isDirectory()){ extractChildrenFiles(child, fill, recursive); }else if(!child.isDirectory() && child.canRead()){ fill.add(child); } } }else{ fill.add(dir); } } @Override public Data execute(ProgressListener l, MutationProfilerParams params) throws Exception { MutationProfile mutProfiles = new MutationProfile(); List<File> kmers = new ArrayList<File>(); this.extractChildrenFiles(params.kmerLibrary, kmers, params.recursive); for(File f : kmers){ if(f.exists() && f.canRead()){ try{ Kmer kmer = Kmers.readKmer(new FileInputStream(f)); if(kmer.size() > 0){ l.update(new ProgressEvent(this, Type.UPDATE, "Processing Kmer "+f.getName(), true)); profile( kmer, f.getName(), params.seq1, params.seq1Name, params.seq2, params.seq2Name, params.bindingSite, params.minEscore, params.minDifference, mutProfiles ); }else{ l.update(new ProgressEvent(this, Type.UPDATE, "Skipping Kmer "+f.getName(), true)); Log.getInstance().write(ChiptoolsActivator.getBundle(), "MutationProfilter: skipping "+f.getName()); } }catch(Exception e){ l.update(new ProgressEvent(this, Type.UPDATE, "Skipping Kmer "+f.getName(), true)); Log.getInstance().write(ChiptoolsActivator.getBundle(), "MutationProfilter: skipping "+f.getName()); ErrorHandler.getInstance().handleException(e, ChiptoolsActivator.getBundle()); } } } return mutProfiles; } protected static void profile( Kmer kmer, String kmerName, String seq1, String seq1Name, String seq2, String seq2Name, int bindingSite, double minEscore, double minDif, MutationProfile data ){ data.appendRow(kmerName); List<Integer> mutations = findMutationIndexes(seq1, seq2); for(int mut : mutations){ List<Subsequences> subseqs = getSubseqsOverIndex(kmer, bindingSite, minEscore, seq1, seq2, mut); Scores scores = getLargestScoreDif(kmer, minDif, subseqs); //put seq1 score into table String col = "pos"+(mut+1)+"_"+seq1Name+"_score"; if(!data.containsCol(col)){ data.appendCol(col); } Object val; if(scores != null){ val = scores.seq1Score; }else{ val = new Double(-1); } data.put(col, kmerName, val); //put seq2 score into table col = "pos"+(mut+1)+"_"+seq2Name+"_score"; if(!data.containsCol(col)){ data.appendCol(col); } if(scores != null){ val = scores.seq2Score; }else{ val = new Double(-1); } data.put(col, kmerName, val); //put score difference into table col = "pos"+(mut+1)+"_dif"; if(!data.containsCol(col)){ data.appendCol(col); } if(scores != null){ val = Math.abs(scores.seq1Score - scores.seq2Score); }else{ val = new Double(-1); } data.put(col, kmerName, val); } } protected static Scores getLargestScoreDif(Kmer kmer, double minDif, List<Subsequences> subseqsList){ Scores best = null; double bestDif = minDif; for(Subsequences subseqs : subseqsList){ String seq1 = subseqs.seq1; String seq2 = subseqs.seq2; double score1 = kmer.escore(seq1); double score2 = kmer.escore(seq2); double dif = Math.abs(score1 - score2); if(dif >= bestDif){ best = new Scores(score1, score2); bestDif = dif; } } return best; } protected static class Scores{ public final double seq1Score; public final double seq2Score; public Scores(double seq1, double seq2){ seq1Score = seq1; seq2Score = seq2; } } protected static class Subsequences{ public final String seq1; public final String seq2; public Subsequences(String seq1, String seq2){ this.seq1 = seq1; this.seq2 = seq2; } } protected static List<Subsequences> getSubseqsOverIndex(Kmer kmer, int bindingSite, double minEscore, String seq1, String seq2, int index){ List<Subsequences> list = new ArrayList<Subsequences>(); int seqLen = Math.min(seq1.length(), seq2.length()); for(int wordLen : kmer.getWordLengths()){ int siteLen = bindingSite > 0 ? bindingSite : wordLen; if(siteLen < wordLen){ continue; } int firstIndex = Math.max(0, index - siteLen + 1); int lastIndex = Math.min(seqLen - siteLen, index); for(int start=firstIndex; start<=lastIndex; start++){ int end = start + siteLen; if(end > seqLen){ break; } //tile from start to end with words of length wordLen and check that each word exceeds the minEscore List<Subsequences> subseqs = new ArrayList<Subsequences>(); for(int i=start; i<=end-wordLen; i++){ String seq1subseq = seq1.substring(i, i+wordLen); String seq2subseq = seq2.substring(i, i+wordLen); //add the subseqs if they meet the escore criteria if(kmer.escore(seq1subseq) >= minEscore || kmer.escore(seq2subseq) >= minEscore){ subseqs.add(new Subsequences(seq1subseq, seq2subseq)); }else{ //escore criteria not met, so clear the subseqs and stop looking at this binding site subseqs.clear(); break; } } list.addAll(subseqs); } } return list; } protected static List<Subsequences> getSubseqsOverIndex(Kmer kmer, int bindingSite, String seq1, String seq2, int index){ return getSubseqsOverIndex(kmer, bindingSite, Double.NEGATIVE_INFINITY, seq1, seq2, index); } protected static List<Subsequences> getSubseqsOverIndex(Kmer kmer, double minEscore, String seq1, String seq2, int index){ return getSubseqsOverIndex(kmer, -1, minEscore, seq1, seq2, index); } protected static List<Subsequences> getSubseqsOverIndex(Kmer kmer, String seq1, String seq2, int index){ return getSubseqsOverIndex(kmer, Double.NEGATIVE_INFINITY, seq1, seq2, index); } protected static List<Integer> findMutationIndexes(String seq1, String seq2){ List<Integer> muts = new ArrayList<Integer>(); for(int i=0; i<seq1.length() && i<seq2.length(); i++){ if(seq1.charAt(i) != seq2.charAt(i)){ muts.add(i); } } return muts; } }
/* * Copyright LWJGL. All rights reserved. * License terms: http://lwjgl.org/license.php * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.system.jawt; import java.nio.*; import org.lwjgl.*; import org.lwjgl.system.*; import static org.lwjgl.system.MemoryUtil.*; import static org.lwjgl.system.MemoryStack.*; /** * <h3>Member documentation</h3> * * <ul> * <li>{@code platformInfo} &ndash; * Pointer to the platform-specific information. This can be safely cast to a {@code JAWT_Win32DrawingSurfaceInfo} on Windows or a * {@code JAWT_X11DrawingSurfaceInfo} on Solaris. On Mac OS X this is a pointer to a {@code NSObject} that conforms to the {@code JAWT_SurfaceLayers} * protocol.</li> * <li>{@code ds} &ndash; Cached pointer to the underlying drawing surface.</li> * <li>{@code bounds} &ndash; Bounding rectangle of the drawing surface.</li> * <li>{@code clipSize} &ndash; Number of rectangles in the clip.</li> * <li>{@code clip} &ndash; Clip rectangle array.</li> * </ul> * * <h3>Layout</h3> * * <pre><code>struct JAWT_DrawingSurfaceInfo { void * platformInfo; JAWT_DrawingSurface * ds; {@link JAWTRectangle JAWT_Rectangle} bounds; jint clipSize; JAWT_Rectangle * clip; }</code></pre> */ public class JAWTDrawingSurfaceInfo extends Struct { /** The struct size in bytes. */ public static final int SIZEOF; public static final int ALIGNOF; /** The struct member offsets. */ public static final int PLATFORMINFO, DS, BOUNDS, CLIPSIZE, CLIP; static { Layout layout = __struct( __member(POINTER_SIZE), __member(POINTER_SIZE), __member(JAWTRectangle.SIZEOF, JAWTRectangle.ALIGNOF), __member(4), __member(POINTER_SIZE) ); SIZEOF = layout.getSize(); ALIGNOF = layout.getAlignment(); PLATFORMINFO = layout.offsetof(0); DS = layout.offsetof(1); BOUNDS = layout.offsetof(2); CLIPSIZE = layout.offsetof(3); CLIP = layout.offsetof(4); } JAWTDrawingSurfaceInfo(long address, ByteBuffer container) { super(address, container); } /** * Creates a {@link JAWTDrawingSurfaceInfo} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be * visible to the struct instance and vice versa. * * <p>The created instance holds a strong reference to the container object.</p> */ public JAWTDrawingSurfaceInfo(ByteBuffer container) { this(memAddress(container), checkContainer(container, SIZEOF)); } @Override public int sizeof() { return SIZEOF; } /** Returns the value of the {@code platformInfo} field. */ public long platformInfo() { return nplatformInfo(address()); } /** Returns a {@link JAWTDrawingSurface} view of the struct pointed to by the {@code ds} field. */ public JAWTDrawingSurface ds() { return nds(address()); } /** Returns a {@link JAWTRectangle} view of the {@code bounds} field. */ public JAWTRectangle bounds() { return nbounds(address()); } /** Returns the value of the {@code clipSize} field. */ public int clipSize() { return nclipSize(address()); } /** Returns a {@link JAWTRectangle.Buffer} view of the struct array pointed to by the {@code clip} field. */ public JAWTRectangle.Buffer clip() { return nclip(address()); } // ----------------------------------- /** Returns a new {@link JAWTDrawingSurfaceInfo} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */ public static JAWTDrawingSurfaceInfo malloc() { return create(nmemAlloc(SIZEOF)); } /** Returns a new {@link JAWTDrawingSurfaceInfo} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */ public static JAWTDrawingSurfaceInfo calloc() { return create(nmemCalloc(1, SIZEOF)); } /** Returns a new {@link JAWTDrawingSurfaceInfo} instance allocated with {@link BufferUtils}. */ public static JAWTDrawingSurfaceInfo create() { return new JAWTDrawingSurfaceInfo(BufferUtils.createByteBuffer(SIZEOF)); } /** Returns a new {@link JAWTDrawingSurfaceInfo} instance for the specified memory address or {@code null} if the address is {@code NULL}. */ public static JAWTDrawingSurfaceInfo create(long address) { return address == NULL ? null : new JAWTDrawingSurfaceInfo(address, null); } /** * Returns a new {@link JAWTDrawingSurfaceInfo.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static Buffer malloc(int capacity) { return create(nmemAlloc(capacity * SIZEOF), capacity); } /** * Returns a new {@link JAWTDrawingSurfaceInfo.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static Buffer calloc(int capacity) { return create(nmemCalloc(capacity, SIZEOF), capacity); } /** * Returns a new {@link JAWTDrawingSurfaceInfo.Buffer} instance allocated with {@link BufferUtils}. * * @param capacity the buffer capacity */ public static Buffer create(int capacity) { return new Buffer(BufferUtils.createByteBuffer(capacity * SIZEOF)); } /** * Create a {@link JAWTDrawingSurfaceInfo.Buffer} instance at the specified memory. * * @param address the memory address * @param capacity the buffer capacity */ public static Buffer create(long address, int capacity) { return address == NULL ? null : new Buffer(address, null, -1, 0, capacity, capacity); } // ----------------------------------- /** Returns a new {@link JAWTDrawingSurfaceInfo} instance allocated on the thread-local {@link MemoryStack}. */ public static JAWTDrawingSurfaceInfo mallocStack() { return mallocStack(stackGet()); } /** Returns a new {@link JAWTDrawingSurfaceInfo} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero. */ public static JAWTDrawingSurfaceInfo callocStack() { return callocStack(stackGet()); } /** * Returns a new {@link JAWTDrawingSurfaceInfo} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate */ public static JAWTDrawingSurfaceInfo mallocStack(MemoryStack stack) { return create(stack.nmalloc(ALIGNOF, SIZEOF)); } /** * Returns a new {@link JAWTDrawingSurfaceInfo} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate */ public static JAWTDrawingSurfaceInfo callocStack(MemoryStack stack) { return create(stack.ncalloc(ALIGNOF, 1, SIZEOF)); } /** * Returns a new {@link JAWTDrawingSurfaceInfo.Buffer} instance allocated on the thread-local {@link MemoryStack}. * * @param capacity the buffer capacity */ public static Buffer mallocStack(int capacity) { return mallocStack(capacity, stackGet()); } /** * Returns a new {@link JAWTDrawingSurfaceInfo.Buffer} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero. * * @param capacity the buffer capacity */ public static Buffer callocStack(int capacity) { return callocStack(capacity, stackGet()); } /** * Returns a new {@link JAWTDrawingSurfaceInfo.Buffer} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static Buffer mallocStack(int capacity, MemoryStack stack) { return create(stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity); } /** * Returns a new {@link JAWTDrawingSurfaceInfo.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static Buffer callocStack(int capacity, MemoryStack stack) { return create(stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity); } // ----------------------------------- /** Unsafe version of {@link #platformInfo}. */ public static long nplatformInfo(long struct) { return memGetAddress(struct + JAWTDrawingSurfaceInfo.PLATFORMINFO); } /** Unsafe version of {@link #ds}. */ public static JAWTDrawingSurface nds(long struct) { return JAWTDrawingSurface.create(memGetAddress(struct + JAWTDrawingSurfaceInfo.DS)); } /** Unsafe version of {@link #bounds}. */ public static JAWTRectangle nbounds(long struct) { return JAWTRectangle.create(struct + JAWTDrawingSurfaceInfo.BOUNDS); } /** Unsafe version of {@link #clipSize}. */ public static int nclipSize(long struct) { return memGetInt(struct + JAWTDrawingSurfaceInfo.CLIPSIZE); } /** Unsafe version of {@link #clip}. */ public static JAWTRectangle.Buffer nclip(long struct) { return JAWTRectangle.create(memGetAddress(struct + JAWTDrawingSurfaceInfo.CLIP), nclipSize(struct)); } // ----------------------------------- /** An array of {@link JAWTDrawingSurfaceInfo} structs. */ public static final class Buffer extends StructBuffer<JAWTDrawingSurfaceInfo, Buffer> { /** * Creates a new {@link JAWTDrawingSurfaceInfo.Buffer} instance backed by the specified container. * * Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values * will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided * by {@link JAWTDrawingSurfaceInfo#SIZEOF}, and its mark will be undefined. * * <p>The created buffer instance holds a strong reference to the container object.</p> */ public Buffer(ByteBuffer container) { super(container, container.remaining() / SIZEOF); } Buffer(long address, ByteBuffer container, int mark, int pos, int lim, int cap) { super(address, container, mark, pos, lim, cap); } @Override protected Buffer self() { return this; } @Override protected Buffer newBufferInstance(long address, ByteBuffer container, int mark, int pos, int lim, int cap) { return new Buffer(address, container, mark, pos, lim, cap); } @Override protected JAWTDrawingSurfaceInfo newInstance(long address) { return new JAWTDrawingSurfaceInfo(address, getContainer()); } @Override protected int sizeof() { return SIZEOF; } /** Returns the value of the {@code platformInfo} field. */ public long platformInfo() { return JAWTDrawingSurfaceInfo.nplatformInfo(address()); } /** Returns a {@link JAWTDrawingSurface} view of the struct pointed to by the {@code ds} field. */ public JAWTDrawingSurface ds() { return JAWTDrawingSurfaceInfo.nds(address()); } /** Returns a {@link JAWTRectangle} view of the {@code bounds} field. */ public JAWTRectangle bounds() { return JAWTDrawingSurfaceInfo.nbounds(address()); } /** Returns the value of the {@code clipSize} field. */ public int clipSize() { return JAWTDrawingSurfaceInfo.nclipSize(address()); } /** Returns a {@link JAWTRectangle.Buffer} view of the struct array pointed to by the {@code clip} field. */ public JAWTRectangle.Buffer clip() { return JAWTDrawingSurfaceInfo.nclip(address()); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.impl.ha; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Random; import java.util.Set; import java.util.UUID; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.stream.IntStream; import org.apache.camel.ServiceStatus; import org.apache.camel.ha.CamelClusterEventListener; import org.apache.camel.ha.CamelClusterMember; import org.apache.camel.ha.CamelClusterService; import org.junit.Assert; import org.junit.Test; public class ClusterServiceViewTest { @Test public void testViewEquality() throws Exception { TestClusterService service = new TestClusterService(UUID.randomUUID().toString()); TestClusterView view1 = service.getView("ns1").unwrap(TestClusterView.class); TestClusterView view2 = service.getView("ns1").unwrap(TestClusterView.class); TestClusterView view3 = service.getView("ns2").unwrap(TestClusterView.class); Assert.assertEquals(view1, view2); Assert.assertNotEquals(view1, view3); } @Test public void testViewReferences() throws Exception { TestClusterService service = new TestClusterService(UUID.randomUUID().toString()); service.start(); TestClusterView view1 = service.getView("ns1").unwrap(TestClusterView.class); TestClusterView view2 = service.getView("ns1").unwrap(TestClusterView.class); TestClusterView view3 = service.getView("ns2").unwrap(TestClusterView.class); Assert.assertEquals(ServiceStatus.Started, view1.getStatus()); Assert.assertEquals(ServiceStatus.Started, view2.getStatus()); Assert.assertEquals(ServiceStatus.Started, view3.getStatus()); service.releaseView(view1); Assert.assertEquals(ServiceStatus.Started, view1.getStatus()); Assert.assertEquals(ServiceStatus.Started, view2.getStatus()); Assert.assertEquals(ServiceStatus.Started, view3.getStatus()); service.releaseView(view2); Assert.assertEquals(ServiceStatus.Stopped, view1.getStatus()); Assert.assertEquals(ServiceStatus.Stopped, view2.getStatus()); Assert.assertEquals(ServiceStatus.Started, view3.getStatus()); service.releaseView(view3); TestClusterView newView1 = service.getView("ns1").unwrap(TestClusterView.class); TestClusterView newView2 = service.getView("ns1").unwrap(TestClusterView.class); Assert.assertEquals(newView1, newView2); Assert.assertEquals(view1, newView1); Assert.assertEquals(view1, newView2); Assert.assertEquals(ServiceStatus.Started, newView1.getStatus()); Assert.assertEquals(ServiceStatus.Started, newView2.getStatus()); Assert.assertEquals(ServiceStatus.Stopped, view3.getStatus()); service.stop(); Assert.assertEquals(ServiceStatus.Stopped, view1.getStatus()); Assert.assertEquals(ServiceStatus.Stopped, view2.getStatus()); Assert.assertEquals(ServiceStatus.Stopped, view3.getStatus()); Assert.assertEquals(ServiceStatus.Stopped, newView1.getStatus()); Assert.assertEquals(ServiceStatus.Stopped, newView2.getStatus()); } @Test public void testViewForceOperations() throws Exception { TestClusterService service = new TestClusterService(UUID.randomUUID().toString()); TestClusterView view = service.getView("ns1").unwrap(TestClusterView.class); Assert.assertEquals(ServiceStatus.Stopped, view.getStatus()); // This should not start the view as the service has not yet started. service.startView(view.getNamespace()); Assert.assertEquals(ServiceStatus.Stopped, view.getStatus()); // This should start the view. service.start(); Assert.assertEquals(ServiceStatus.Started, view.getStatus()); service.stopView(view.getNamespace()); Assert.assertEquals(ServiceStatus.Stopped, view.getStatus()); service.startView(view.getNamespace()); Assert.assertEquals(ServiceStatus.Started, view.getStatus()); service.releaseView(view); Assert.assertEquals(ServiceStatus.Stopped, view.getStatus()); } @Test public void testMultipleViewListeners() throws Exception { final TestClusterService service = new TestClusterService(UUID.randomUUID().toString()); final TestClusterView view = service.getView("ns1").unwrap(TestClusterView.class); final int events = 1 + new Random().nextInt(10); final Set<Integer> results = new HashSet<>(); final CountDownLatch latch = new CountDownLatch(events); IntStream.range(0, events).forEach( i -> view.addEventListener((CamelClusterEventListener.Leadership) (v, l) -> { results.add(i); latch.countDown(); }) ); service.start(); view.setLeader(true); latch.await(10, TimeUnit.SECONDS); IntStream.range(0, events).forEach( i -> Assert.assertTrue(results.contains(i)) ); } @Test public void testLateViewListeners() throws Exception { final TestClusterService service = new TestClusterService(UUID.randomUUID().toString()); final TestClusterView view = service.getView("ns1").unwrap(TestClusterView.class); final int events = 1 + new Random().nextInt(10); final Set<Integer> results = new HashSet<>(); final CountDownLatch latch = new CountDownLatch(events * 2); IntStream.range(0, events).forEach( i -> view.addEventListener((CamelClusterEventListener.Leadership) (v, l) -> { results.add(i); latch.countDown(); }) ); service.start(); view.setLeader(true); IntStream.range(events, events * 2).forEach( i -> view.addEventListener((CamelClusterEventListener.Leadership) (v, l) -> { results.add(i); latch.countDown(); }) ); latch.await(10, TimeUnit.SECONDS); IntStream.range(0, events * 2).forEach( i -> Assert.assertTrue(results.contains(i)) ); } // ********************************* // Helpers // ********************************* private static class TestClusterView extends AbstractCamelClusterView { private boolean leader; public TestClusterView(CamelClusterService cluster, String namespace) { super(cluster, namespace); } @Override public Optional<CamelClusterMember> getLeader() { return leader ? Optional.of(getLocalMember()) : Optional.empty(); } @Override public CamelClusterMember getLocalMember() { return new CamelClusterMember() { @Override public boolean isLeader() { return leader; } @Override public boolean isLocal() { return true; } @Override public String getId() { return getClusterService().getId(); } }; } @Override public List<CamelClusterMember> getMembers() { return Collections.emptyList(); } @Override protected void doStart() throws Exception { } @Override protected void doStop() throws Exception { } public boolean isLeader() { return leader; } public void setLeader(boolean leader) { this.leader = leader; if (isRunAllowed()) { fireLeadershipChangedEvent(getLeader()); } } } private static class TestClusterService extends AbstractCamelClusterService<TestClusterView> { public TestClusterService(String id) { super(id); } @Override protected TestClusterView createView(String namespace) throws Exception { return new TestClusterView(this, namespace); } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.repository; import org.apache.commons.io.IOUtils; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.Mockito; import org.pentaho.di.core.Const; import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.LogChannel; import java.io.File; import java.io.InputStream; import static org.junit.Assert.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class RepositoriesMetaTest { @BeforeClass public static void setUpClass() throws Exception { if ( !KettleEnvironment.isInitialized() ) { KettleEnvironment.init(); } } @Test public void testToString() throws Exception { RepositoriesMeta repositoriesMeta = new RepositoriesMeta(); assertEquals( "RepositoriesMeta", repositoriesMeta.toString() ); } @Test public void testReadData() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.getKettleUserRepositoriesFile() ).thenReturn( getClass().getResource( "repositories.xml" ).getPath() ); when( spy.newLogChannel() ).thenReturn( log ); spy.readData(); String repositoriesXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + Const.CR + "<repositories>" + Const.CR + " <connection>" + Const.CR + " <name>local postgres</name>" + Const.CR + " <server>localhost</server>" + Const.CR + " <type>POSTGRESQL</type>" + Const.CR + " <access>Native</access>" + Const.CR + " <database>hibernate</database>" + Const.CR + " <port>5432</port>" + Const.CR + " <username>auser</username>" + Const.CR + " <password>Encrypted 2be98afc86aa7f285bb18bd63c99dbdde</password>" + Const.CR + " <servername/>" + Const.CR + " <data_tablespace/>" + Const.CR + " <index_tablespace/>" + Const.CR + " <attributes>" + Const.CR + " <attribute><code>FORCE_IDENTIFIERS_TO_LOWERCASE</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>FORCE_IDENTIFIERS_TO_UPPERCASE</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>IS_CLUSTERED</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>PORT_NUMBER</code><attribute>5432</attribute></attribute>" + Const.CR + " <attribute><code>PRESERVE_RESERVED_WORD_CASE</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>QUOTE_ALL_FIELDS</code><attribute>N</attribute></attribute>" + Const.CR + " <attribute><code>SUPPORTS_BOOLEAN_DATA_TYPE</code><attribute>Y</attribute></attribute>" + Const.CR + " <attribute><code>SUPPORTS_TIMESTAMP_DATA_TYPE</code><attribute>Y</attribute></attribute>" + Const.CR + " <attribute><code>USE_POOLING</code><attribute>N</attribute></attribute>" + Const.CR + " </attributes>" + Const.CR + " </connection>" + Const.CR + " <repository> <id>KettleFileRepository</id>" + Const.CR + " <name>Test Repository</name>" + Const.CR + " <description>Test Repository Description</description>" + Const.CR + " <base_directory>test-repository</base_directory>" + Const.CR + " <read_only>N</read_only>" + Const.CR + " <hides_hidden_files>N</hides_hidden_files>" + Const.CR + " </repository> </repositories>" + Const.CR; assertEquals( repositoriesXml, spy.getXML() ); RepositoriesMeta clone = spy.clone(); assertEquals( repositoriesXml, spy.getXML() ); assertNotSame( clone, spy ); assertEquals( 1, spy.nrRepositories() ); RepositoryMeta repository = spy.getRepository( 0 ); assertEquals( "Test Repository", repository.getName() ); assertEquals( "Test Repository Description", repository.getDescription() ); assertEquals( " <repository> <id>KettleFileRepository</id>" + Const.CR + " <name>Test Repository</name>" + Const.CR + " <description>Test Repository Description</description>" + Const.CR + " <base_directory>test-repository</base_directory>" + Const.CR + " <read_only>N</read_only>" + Const.CR + " <hides_hidden_files>N</hides_hidden_files>" + Const.CR + " </repository>", repository.getXML() ); assertSame( repository, spy.searchRepository( "Test Repository" ) ); assertSame( repository, spy.findRepositoryById( "KettleFileRepository" ) ); assertSame( repository, spy.findRepository( "Test Repository" ) ); assertNull( spy.findRepository( "not found" ) ); assertNull( spy.findRepositoryById( "not found" ) ); assertEquals( 0, spy.indexOfRepository( repository ) ); spy.removeRepository( 0 ); assertEquals( 0, spy.nrRepositories() ); assertNull( spy.searchRepository( "Test Repository" ) ); spy.addRepository( 0, repository ); assertEquals( 1, spy.nrRepositories() ); spy.removeRepository( 1 ); assertEquals( 1, spy.nrRepositories() ); assertEquals( 1, spy.nrDatabases() ); assertEquals( "local postgres", spy.getDatabase( 0 ).getName() ); DatabaseMeta searchDatabase = spy.searchDatabase( "local postgres" ); assertSame( searchDatabase, spy.getDatabase( 0 ) ); assertEquals( 0, spy.indexOfDatabase( searchDatabase ) ); spy.removeDatabase( 0 ); assertEquals( 0, spy.nrDatabases() ); assertNull( spy.searchDatabase( "local postgres" ) ); spy.addDatabase( 0, searchDatabase ); assertEquals( 1, spy.nrDatabases() ); spy.removeDatabase( 1 ); assertEquals( 1, spy.nrDatabases() ); assertEquals( "Unable to read repository with id [junk]. RepositoryMeta is not available.", spy.getErrorMessage() ); } @Test public void testNothingToRead() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.getKettleUserRepositoriesFile() ).thenReturn( "filedoesnotexist.xml" ); when( spy.newLogChannel() ).thenReturn( log ); assertTrue( spy.readData() ); assertEquals( 0, spy.nrDatabases() ); assertEquals( 0, spy.nrRepositories() ); } @Test public void testReadDataFromInputStream() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.newLogChannel() ).thenReturn( log ); InputStream inputStream = getClass().getResourceAsStream( "repositories.xml" ); spy.readDataFromInputStream( inputStream ); assertEquals( 1, spy.nrDatabases() ); assertEquals( 1, spy.nrRepositories() ); } @Test public void testErrorReadingInputStream() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.newLogChannel() ).thenReturn( log ); try { spy.readDataFromInputStream( getClass().getResourceAsStream( "filedoesnotexist.xml" ) ); } catch ( KettleException e ) { assertEquals( Const.CR + "Error reading information from file:" + Const.CR + "InputStream cannot be null" + Const.CR, e.getMessage() ); } } @Test public void testErrorReadingFile() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.newLogChannel() ).thenReturn( log ); when( spy.getKettleUserRepositoriesFile() ).thenReturn( getClass().getResource( "bad-repositories.xml" ).getPath() ); try { spy.readData(); } catch ( KettleException e ) { assertEquals( Const.CR + "Error reading information from file:" + Const.CR + "The element type \"repositories\" must be terminated by the matching end-tag \"</repositories>\"." + Const.CR, e.getMessage() ); } } @Test public void testWriteFile() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.newLogChannel() ).thenReturn( log ); String path = getClass().getResource( "repositories.xml" ).getPath().replace( "repositories.xml", "new-repositories.xml" ); when( spy.getKettleUserRepositoriesFile() ).thenReturn( path ); spy.writeData(); InputStream resourceAsStream = getClass().getResourceAsStream( "new-repositories.xml" ); assertEquals( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + Const.CR + "<repositories>" + Const.CR + " </repositories>" + Const.CR, IOUtils.toString( resourceAsStream ) ); new File( path ).delete(); } @Test public void testErrorWritingFile() throws Exception { RepositoriesMeta meta = new RepositoriesMeta(); RepositoriesMeta spy = Mockito.spy( meta ); LogChannel log = mock( LogChannel.class ); when( spy.newLogChannel() ).thenReturn( log ); when( spy.getKettleUserRepositoriesFile() ).thenReturn( null ); try { spy.writeData(); } catch ( KettleException e ) { assertTrue( e.getMessage().startsWith( Const.CR + "Error writing repositories metadata" ) ); } } }
package swarm.client.view.cell; import java.util.logging.Logger; import swarm.client.app.AppContext; import swarm.client.entities.Camera; import swarm.client.entities.BufferCell; import swarm.client.input.I_ClickHandler; import swarm.client.managers.CellBuffer; import swarm.client.managers.CellBufferManager; import swarm.client.states.camera.Action_Camera_SnapToCoordinate; import swarm.client.states.camera.StateMachine_Camera; import swarm.client.states.camera.State_CameraSnapping; import swarm.client.states.camera.State_ViewingCell; import swarm.client.view.E_ZIndex; import swarm.client.view.I_UIElement; import swarm.client.view.S_UI; import swarm.client.view.ViewContext; import swarm.shared.app.S_CommonApp; import swarm.shared.debugging.U_Debug; import swarm.shared.statemachine.A_Action; import swarm.shared.statemachine.A_State; import swarm.shared.statemachine.StateContext; import swarm.shared.statemachine.A_BaseStateEvent; import swarm.shared.structs.GridCoordinate; import swarm.shared.structs.Point; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.Panel; public class VisualCellFocuser extends FlowPanel implements I_UIElement { private enum AnimationState { NONE, FADING_IN, FOCUSED, FADING_OUT; }; private static final Logger s_logger = Logger.getLogger(VisualCellFocuser.class.getName()); private AnimationState m_animationState = AnimationState.NONE; private double m_alpha = 0; private double m_startAlpha = 0; private double m_fadeStartTime = 0; private double m_startCameraDistance = 0; private VisualCell m_poppedCell = null; private final GridCoordinate m_poppedCellCoord = new GridCoordinate(); private final AppContext m_appContext; private final StateContext m_stateContext; private final ViewContext m_viewContext; private final double m_fadeOutTime_seconds; private final double m_maxAlpha; public VisualCellFocuser(ViewContext viewContext) { m_viewContext = viewContext; m_appContext = viewContext.appContext; m_stateContext = viewContext.stateContext; m_fadeOutTime_seconds = viewContext.config.focuserFadeOutTime_seconds; m_maxAlpha = viewContext.config.focuserMaxAlpha; this.addStyleName("cell_focuser"); this.getElement().setAttribute("ondragstart", "return false;"); this.setVisible(false); // this.getElement().getStyle().setOpacity(.01); E_ZIndex.CELL_FOCUSER.assignTo(this); } private void setAlpha(double alpha) { m_alpha = alpha; this.getElement().getStyle().setBackgroundColor("rgba(0,0,0," + alpha + ")"); } private boolean popUpTargetCell(A_State state) { CellBuffer buffer = m_appContext.cellBufferMngr.getLowestDisplayBuffer(); if( buffer.getSubCellCount() == 1 ) { GridCoordinate targetCoord = null; if( state instanceof State_CameraSnapping ) { targetCoord = ((State_CameraSnapping)state).getTargetCoord(); } else if( state instanceof State_ViewingCell ) { targetCoord = ((State_ViewingCell)state).getCell().getCoordinate(); } if( targetCoord == null ) { U_Debug.ASSERT(false, "popUpTargetCell1"); return false; } if( buffer.isInBoundsAbsolute(targetCoord) ) { BufferCell cell = buffer.getCellAtAbsoluteCoord(targetCoord); if( cell != null ) { VisualCell visualCell = (VisualCell) cell.getVisualization(); if( m_appContext.cellBufferMngr.getNonOverriddenSubCellCount() == 1 || visualCell.isLoadedEnoughForPop() ) { popUp(visualCell); m_poppedCell = visualCell; m_poppedCellCoord.copy(cell.getCoordinate()); return true; } } } } return false; } private void popUp(VisualCell visualCell) { visualCell.popUp(); m_viewContext.scrollNavigator.setTargetLayout(visualCell); } private void setAnimationState(AnimationState state) { AnimationState oldState = m_animationState; m_animationState = state; switch(m_animationState ) { case NONE: { if( this.m_poppedCell != null ) { this.m_poppedCell.pushDown(); this.m_poppedCell = null; } this.setVisible(false); this.setAlpha(0); // just to be sure. break; } case FADING_IN: { switch( oldState ) { case FADING_IN: // This is the only animation state that can be entered more than once in a row. { if( this.m_poppedCell != null ) { State_CameraSnapping snappingState = m_stateContext.getEntered(State_CameraSnapping.class); boolean pushDown = false; if( snappingState == null ) { pushDown = true; } else { if( !snappingState.getTargetCoord().isEqualTo(m_poppedCellCoord)) { pushDown = true; } else { popUp(this.m_poppedCell); } } if( pushDown ) { this.m_poppedCell.cancelPopUp(); this.m_poppedCell.pushDown(); this.m_poppedCell = null; m_startAlpha = m_alpha; } } break; } case FADING_OUT: { m_startAlpha = m_alpha; if( this.m_poppedCell != null ) { StateMachine_Camera cameraController = m_stateContext.getEntered(StateMachine_Camera.class); State_CameraSnapping snappingState = (State_CameraSnapping) cameraController.getCurrentState(); GridCoordinate targetSnapCoord = snappingState.getTargetCoord(); if( !targetSnapCoord.isEqualTo(m_poppedCellCoord) ) { this.m_poppedCell.pushDown(); this.m_poppedCell = null; } else { m_startCameraDistance = this.calcCameraDistanceToTarget(); popUp(this.m_poppedCell);// re-poppingUp after cancelPop in FADE_OUT case. } } break; } case NONE: { m_startAlpha = m_alpha; this.setVisible(true); break; } } break; } case FOCUSED: { this.setAlpha(m_maxAlpha); // just to be sure. if( m_poppedCell == null ) { //--- DRK > This can happen when directly transitioning from snap to viewing....fringe case, //--- and in the future (maybe as you're reading this!) possibly not allowed to begin with. //--- //--- DRK > Note to past doug, this is future doug...it should now be an impossible case, with a //--- forced update in between snapping and viewing, but you never know. StateMachine_Camera cameraController = m_stateContext.getEntered(StateMachine_Camera.class); this.popUpTargetCell(cameraController.getCurrentState()); } break; } case FADING_OUT: { StateMachine_Camera cameraController = m_stateContext.getEntered(StateMachine_Camera.class); m_fadeStartTime = cameraController.getTotalTimeInState(); m_startAlpha = m_alpha; if( oldState == AnimationState.FADING_IN ) { if( m_poppedCell != null ) { m_poppedCell.cancelPopUp(); } } break; } } } private double calcCameraDistanceToTarget() { StateMachine_Camera cameraController = m_stateContext.getEntered(StateMachine_Camera.class); Point cameraPoint = m_appContext.cameraMngr.getCamera().getPosition(); Point cameraTarget = m_appContext.cameraMngr.getTargetPosition(); return cameraTarget.calcDistanceTo(cameraPoint); } private void updateAnimationState(A_State currentState) { switch( m_animationState) { case FADING_IN: { if( m_alpha == m_maxAlpha ) { if( m_poppedCell == null ) { this.popUpTargetCell(currentState); } } else { double cameraDistance = calcCameraDistanceToTarget(); if( m_poppedCell == null ) { if( this.popUpTargetCell(currentState) ) { m_startCameraDistance = cameraDistance; } } if( m_poppedCell != null ) { double alphaRatio = (1 - (cameraDistance / m_startCameraDistance)); double alpha = m_startAlpha + alphaRatio * (m_maxAlpha - m_startAlpha); this.setAlpha(alpha); } } break; } case FADING_OUT: { if( m_poppedCell != null ) { CellBuffer buffer = m_appContext.cellBufferMngr.getLowestDisplayBuffer(); if( buffer.getSubCellCount() != 1 || !buffer.isInBoundsAbsolute(m_poppedCellCoord) || buffer.getCellAtAbsoluteCoord(m_poppedCellCoord) == null ) { //--- DRK > Used to push down cell here, but strictly speaking it shouldn't be necessary, //--- and might actually be dangerous, like if the cell is recycled, popped up, then //--- here we push it down right after that. //m_poppedCell.pushDown(); m_poppedCell = null; } } double elapsed = currentState.getParent().getTotalTimeInState() - m_fadeStartTime; double ratio = elapsed / m_fadeOutTime_seconds; if( ratio >= 1 ) { this.setAnimationState(AnimationState.NONE); return; } double totalAlphaToFade = m_startAlpha; this.setAlpha(m_startAlpha - totalAlphaToFade*ratio); break; } } } public void onStateEvent(A_BaseStateEvent event) { switch( event.getType() ) { case DID_ENTER: { if( event.getState() instanceof State_ViewingCell ) { this.setAnimationState(AnimationState.FOCUSED); } else if( event.getState() instanceof State_CameraSnapping ) { this.setAnimationState(AnimationState.FADING_IN); this.updateAnimationState(event.getState()); // update once just to pop cell if it's visible } break; } case DID_UPDATE: { if( event.getState().getParent() instanceof StateMachine_Camera ) { this.updateAnimationState(event.getState()); } break; } case DID_EXIT: { if( event.getState() instanceof State_ViewingCell ) { this.setAnimationState(AnimationState.FADING_OUT); } else if( event.getState() instanceof State_CameraSnapping ) { //--- DRK > If we're exiting the snapping state because we're entering the viewing state, then //--- this call to set the animation state is ignored. //TODO: Minor thing, but perhaps check here if the viewing state is entered, and if so, don't // set the animation state. this.setAnimationState(AnimationState.FADING_OUT); } break; } case DID_PERFORM_ACTION: { if( event.getTargetClass() == Action_Camera_SnapToCoordinate.class ) { State_CameraSnapping state = event.getContext().getEntered(State_CameraSnapping.class); if( state != null ) { if( state.getUpdateCount() > 0 ) { //--- DRK > This covers the case of snapping to a new cell while already snapping. this.setAnimationState(AnimationState.FADING_IN); } } else { //--- DRK > Snapping to the same coordinate you're viewing is now allowed, which //--- doesn't trigger a state change, so this assert is now invalid. //smU_Debug.ASSERT(false, "smVisucellfoc1"); } } break; } } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.hadoop.resourceestimator.skylinestore.impl; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.hadoop.resourceestimator.common.api.RecurrenceId; import org.apache.hadoop.resourceestimator.common.api.ResourceSkyline; import org.apache.hadoop.resourceestimator.skylinestore.api.SkylineStore; import org.apache.hadoop.resourceestimator.skylinestore.exceptions.DuplicateRecurrenceIdException; import org.apache.hadoop.resourceestimator.skylinestore.exceptions.EmptyResourceSkylineException; import org.apache.hadoop.resourceestimator.skylinestore.exceptions.NullPipelineIdException; import org.apache.hadoop.resourceestimator.skylinestore.exceptions.NullRLESparseResourceAllocationException; import org.apache.hadoop.resourceestimator.skylinestore.exceptions.NullRecurrenceIdException; import org.apache.hadoop.resourceestimator.skylinestore.exceptions.NullResourceSkylineException; import org.apache.hadoop.resourceestimator.skylinestore.exceptions.RecurrenceIdNotFoundException; import org.apache.hadoop.resourceestimator.skylinestore.exceptions.SkylineStoreException; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.server.resourcemanager.reservation.RLESparseResourceAllocation; import org.apache.hadoop.yarn.server.resourcemanager.reservation.ReservationInterval; import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** * Test {@link SkylineStore} class. */ public abstract class TestSkylineStore { /** * Testing variables. */ private SkylineStore skylineStore; private TreeMap<Long, Resource> resourceOverTime; private RLESparseResourceAllocation skylineList; private ReservationInterval riAdd; private Resource resource; protected abstract SkylineStore createSkylineStore(); @Before public final void setup() { skylineStore = createSkylineStore(); resourceOverTime = new TreeMap<>(); resource = Resource.newInstance(1024 * 100, 100); } private void compare(final ResourceSkyline skyline1, final ResourceSkyline skyline2) { Assert.assertEquals(skyline1.getJobId(), skyline2.getJobId()); Assert.assertEquals(skyline1.getJobInputDataSize(), skyline2.getJobInputDataSize(), 0); Assert.assertEquals(skyline1.getJobSubmissionTime(), skyline2.getJobSubmissionTime()); Assert .assertEquals(skyline1.getJobFinishTime(), skyline2.getJobFinishTime()); Assert.assertEquals(skyline1.getContainerSpec().getMemorySize(), skyline2.getContainerSpec().getMemorySize()); Assert.assertEquals(skyline1.getContainerSpec().getVirtualCores(), skyline2.getContainerSpec().getVirtualCores()); Assert.assertEquals(true, skyline2.getSkylineList().equals(skyline1.getSkylineList())); } private void addToStore(final RecurrenceId recurrenceId, final ResourceSkyline resourceSkyline) throws SkylineStoreException { final List<ResourceSkyline> resourceSkylines = new ArrayList<>(); resourceSkylines.add(resourceSkyline); skylineStore.addHistory(recurrenceId, resourceSkylines); final List<ResourceSkyline> resourceSkylinesGet = skylineStore.getHistory(recurrenceId).get(recurrenceId); Assert.assertTrue(resourceSkylinesGet.contains(resourceSkyline)); } private ResourceSkyline getSkyline(final int n) { skylineList = new RLESparseResourceAllocation(resourceOverTime, new DefaultResourceCalculator()); for (int i = 0; i < n; i++) { riAdd = new ReservationInterval(i * 10, (i + 1) * 10); skylineList.addInterval(riAdd, resource); } final ResourceSkyline resourceSkyline = new ResourceSkyline(Integer.toString(n), 1024.5, 0, 20, resource, skylineList); return resourceSkyline; } @Test public final void testGetHistory() throws SkylineStoreException { // addHistory first recurring pipeline final RecurrenceId recurrenceId1 = new RecurrenceId("FraudDetection", "17/06/20 00:00:00"); final ResourceSkyline resourceSkyline1 = getSkyline(1); addToStore(recurrenceId1, resourceSkyline1); final ResourceSkyline resourceSkyline2 = getSkyline(2); addToStore(recurrenceId1, resourceSkyline2); final RecurrenceId recurrenceId2 = new RecurrenceId("FraudDetection", "17/06/21 00:00:00"); final ResourceSkyline resourceSkyline3 = getSkyline(3); addToStore(recurrenceId2, resourceSkyline3); final ResourceSkyline resourceSkyline4 = getSkyline(4); addToStore(recurrenceId2, resourceSkyline4); // addHistory second recurring pipeline final RecurrenceId recurrenceId3 = new RecurrenceId("Random", "17/06/20 00:00:00"); addToStore(recurrenceId3, resourceSkyline1); addToStore(recurrenceId3, resourceSkyline2); // test getHistory {pipelineId, runId} Map<RecurrenceId, List<ResourceSkyline>> jobHistory = skylineStore.getHistory(recurrenceId1); Assert.assertEquals(1, jobHistory.size()); for (final Map.Entry<RecurrenceId, List<ResourceSkyline>> entry : jobHistory .entrySet()) { Assert.assertEquals(recurrenceId1, entry.getKey()); final List<ResourceSkyline> getSkylines = entry.getValue(); Assert.assertEquals(2, getSkylines.size()); compare(resourceSkyline1, getSkylines.get(0)); compare(resourceSkyline2, getSkylines.get(1)); } // test getHistory {pipelineId, *} RecurrenceId recurrenceIdTest = new RecurrenceId("FraudDetection", "*"); jobHistory = skylineStore.getHistory(recurrenceIdTest); Assert.assertEquals(2, jobHistory.size()); for (final Map.Entry<RecurrenceId, List<ResourceSkyline>> entry : jobHistory .entrySet()) { Assert.assertEquals(recurrenceId1.getPipelineId(), entry.getKey().getPipelineId()); final List<ResourceSkyline> getSkylines = entry.getValue(); if (entry.getKey().getRunId().equals("17/06/20 00:00:00")) { Assert.assertEquals(2, getSkylines.size()); compare(resourceSkyline1, getSkylines.get(0)); compare(resourceSkyline2, getSkylines.get(1)); } else { Assert.assertEquals(entry.getKey().getRunId(), "17/06/21 00:00:00"); Assert.assertEquals(2, getSkylines.size()); compare(resourceSkyline3, getSkylines.get(0)); compare(resourceSkyline4, getSkylines.get(1)); } } // test getHistory {*, runId} recurrenceIdTest = new RecurrenceId("*", "some random runId"); jobHistory = skylineStore.getHistory(recurrenceIdTest); Assert.assertEquals(3, jobHistory.size()); for (final Map.Entry<RecurrenceId, List<ResourceSkyline>> entry : jobHistory .entrySet()) { if (entry.getKey().getPipelineId().equals("FraudDetection")) { final List<ResourceSkyline> getSkylines = entry.getValue(); if (entry.getKey().getRunId().equals("17/06/20 00:00:00")) { Assert.assertEquals(2, getSkylines.size()); compare(resourceSkyline1, getSkylines.get(0)); compare(resourceSkyline2, getSkylines.get(1)); } else { Assert.assertEquals(entry.getKey().getRunId(), "17/06/21 00:00:00"); Assert.assertEquals(2, getSkylines.size()); compare(resourceSkyline3, getSkylines.get(0)); compare(resourceSkyline4, getSkylines.get(1)); } } else { Assert.assertEquals("Random", entry.getKey().getPipelineId()); Assert.assertEquals(entry.getKey().getRunId(), "17/06/20 00:00:00"); final List<ResourceSkyline> getSkylines = entry.getValue(); Assert.assertEquals(2, getSkylines.size()); compare(resourceSkyline1, getSkylines.get(0)); compare(resourceSkyline2, getSkylines.get(1)); } } // test getHistory with wrong RecurrenceId recurrenceIdTest = new RecurrenceId("some random pipelineId", "some random runId"); Assert.assertNull(skylineStore.getHistory(recurrenceIdTest)); } @Test public final void testGetEstimation() throws SkylineStoreException { // first, add estimation to the skyline store final RLESparseResourceAllocation skylineList2 = new RLESparseResourceAllocation(resourceOverTime, new DefaultResourceCalculator()); for (int i = 0; i < 5; i++) { riAdd = new ReservationInterval(i * 10, (i + 1) * 10); skylineList2.addInterval(riAdd, resource); } skylineStore.addEstimation("FraudDetection", skylineList2); // then, try to get the estimation final RLESparseResourceAllocation estimation = skylineStore.getEstimation("FraudDetection"); for (int i = 0; i < 50; i++) { Assert.assertEquals(skylineList2.getCapacityAtTime(i), estimation.getCapacityAtTime(i)); } } @Test(expected = NullRecurrenceIdException.class) public final void testGetNullRecurrenceId() throws SkylineStoreException { // addHistory first recurring pipeline final RecurrenceId recurrenceId1 = new RecurrenceId("FraudDetection", "17/06/20 00:00:00"); final ResourceSkyline resourceSkyline1 = getSkyline(1); addToStore(recurrenceId1, resourceSkyline1); final ResourceSkyline resourceSkyline2 = getSkyline(2); addToStore(recurrenceId1, resourceSkyline2); final RecurrenceId recurrenceId2 = new RecurrenceId("FraudDetection", "17/06/21 00:00:00"); final ResourceSkyline resourceSkyline3 = getSkyline(3); addToStore(recurrenceId2, resourceSkyline3); final ResourceSkyline resourceSkyline4 = getSkyline(4); addToStore(recurrenceId2, resourceSkyline4); // addHistory second recurring pipeline final RecurrenceId recurrenceId3 = new RecurrenceId("Random", "17/06/20 00:00:00"); addToStore(recurrenceId3, resourceSkyline1); addToStore(recurrenceId3, resourceSkyline2); // try to getHistory with null recurringId skylineStore.getHistory(null); } @Test(expected = NullPipelineIdException.class) public final void testGetNullPipelineIdException() throws SkylineStoreException { skylineStore.getEstimation(null); } @Test public final void testAddNormal() throws SkylineStoreException { // addHistory resource skylines to the in-memory store final RecurrenceId recurrenceId = new RecurrenceId("FraudDetection", "17/06/20 00:00:00"); final ResourceSkyline resourceSkyline1 = getSkyline(1); addToStore(recurrenceId, resourceSkyline1); final ArrayList<ResourceSkyline> resourceSkylines = new ArrayList<ResourceSkyline>(); // the resource skylines to be added contain null resourceSkylines.add(null); final ResourceSkyline resourceSkyline2 = getSkyline(2); resourceSkylines.add(resourceSkyline2); skylineStore.addHistory(recurrenceId, resourceSkylines); // query the in-memory store final Map<RecurrenceId, List<ResourceSkyline>> jobHistory = skylineStore.getHistory(recurrenceId); Assert.assertEquals(1, jobHistory.size()); for (final Map.Entry<RecurrenceId, List<ResourceSkyline>> entry : jobHistory .entrySet()) { Assert.assertEquals(recurrenceId, entry.getKey()); final List<ResourceSkyline> getSkylines = entry.getValue(); Assert.assertEquals(2, getSkylines.size()); compare(resourceSkyline1, getSkylines.get(0)); compare(resourceSkyline2, getSkylines.get(1)); } } @Test(expected = NullRecurrenceIdException.class) public final void testAddNullRecurrenceId() throws SkylineStoreException { // recurrenceId is null final RecurrenceId recurrenceIdNull = null; final ArrayList<ResourceSkyline> resourceSkylines = new ArrayList<ResourceSkyline>(); final ResourceSkyline resourceSkyline1 = getSkyline(1); resourceSkylines.add(resourceSkyline1); skylineStore.addHistory(recurrenceIdNull, resourceSkylines); } @Test(expected = NullResourceSkylineException.class) public final void testAddNullResourceSkyline() throws SkylineStoreException { final RecurrenceId recurrenceId = new RecurrenceId("FraudDetection", "17/06/20 00:00:00"); final ArrayList<ResourceSkyline> resourceSkylines = new ArrayList<ResourceSkyline>(); final ResourceSkyline resourceSkyline1 = getSkyline(1); resourceSkylines.add(resourceSkyline1); // resourceSkylines is null skylineStore.addHistory(recurrenceId, null); } @Test(expected = DuplicateRecurrenceIdException.class) public final void testAddDuplicateRecurrenceId() throws SkylineStoreException { final RecurrenceId recurrenceId = new RecurrenceId("FraudDetection", "17/06/20 00:00:00"); final ArrayList<ResourceSkyline> resourceSkylines = new ArrayList<ResourceSkyline>(); final ResourceSkyline resourceSkyline1 = getSkyline(1); resourceSkylines.add(resourceSkyline1); // trying to addHistory duplicate resource skylines skylineStore.addHistory(recurrenceId, resourceSkylines); skylineStore.addHistory(recurrenceId, resourceSkylines); } @Test(expected = NullPipelineIdException.class) public final void testAddNullPipelineIdException() throws SkylineStoreException { final RLESparseResourceAllocation skylineList2 = new RLESparseResourceAllocation(resourceOverTime, new DefaultResourceCalculator()); for (int i = 0; i < 5; i++) { riAdd = new ReservationInterval(i * 10, (i + 1) * 10); skylineList2.addInterval(riAdd, resource); } skylineStore.addEstimation(null, skylineList2); } @Test(expected = NullRLESparseResourceAllocationException.class) public final void testAddNullRLESparseResourceAllocationExceptionException() throws SkylineStoreException { skylineStore.addEstimation("FraudDetection", null); } @Test public final void testDeleteNormal() throws SkylineStoreException { // addHistory first recurring pipeline final RecurrenceId recurrenceId1 = new RecurrenceId("FraudDetection", "17/06/20 00:00:00"); final ResourceSkyline resourceSkyline1 = getSkyline(1); addToStore(recurrenceId1, resourceSkyline1); final ResourceSkyline resourceSkyline2 = getSkyline(2); addToStore(recurrenceId1, resourceSkyline2); // test deleteHistory function of the in-memory store skylineStore.deleteHistory(recurrenceId1); } @Test(expected = NullRecurrenceIdException.class) public final void testDeleteNullRecurrenceId() throws SkylineStoreException { final RecurrenceId recurrenceId1 = new RecurrenceId("FraudDetection", "17/06/20 00:00:00"); final ResourceSkyline resourceSkyline1 = getSkyline(1); addToStore(recurrenceId1, resourceSkyline1); // try to deleteHistory with null recurringId skylineStore.deleteHistory(null); } @Test(expected = RecurrenceIdNotFoundException.class) public final void testDeleteRecurrenceIdNotFound() throws SkylineStoreException { final RecurrenceId recurrenceId1 = new RecurrenceId("FraudDetection", "17/06/20 00:00:00"); final ResourceSkyline resourceSkyline1 = getSkyline(1); addToStore(recurrenceId1, resourceSkyline1); final RecurrenceId recurrenceIdInvalid = new RecurrenceId("Some random pipelineId", "Some random runId"); // try to deleteHistory non-existing recurringId skylineStore.deleteHistory(recurrenceIdInvalid); } @Test public final void testUpdateNormal() throws SkylineStoreException { // addHistory first recurring pipeline final RecurrenceId recurrenceId1 = new RecurrenceId("FraudDetection", "17/06/20 00:00:00"); final ResourceSkyline resourceSkyline1 = getSkyline(1); addToStore(recurrenceId1, resourceSkyline1); final ArrayList<ResourceSkyline> resourceSkylines = new ArrayList<ResourceSkyline>(); final ResourceSkyline resourceSkyline2 = getSkyline(2); resourceSkylines.add(resourceSkyline1); resourceSkylines.add(resourceSkyline2); skylineStore.updateHistory(recurrenceId1, resourceSkylines); // query the in-memory store final Map<RecurrenceId, List<ResourceSkyline>> jobHistory = skylineStore.getHistory(recurrenceId1); Assert.assertEquals(1, jobHistory.size()); for (final Map.Entry<RecurrenceId, List<ResourceSkyline>> entry : jobHistory .entrySet()) { Assert.assertEquals(recurrenceId1, entry.getKey()); final List<ResourceSkyline> getSkylines = entry.getValue(); Assert.assertEquals(2, getSkylines.size()); compare(resourceSkyline1, getSkylines.get(0)); compare(resourceSkyline2, getSkylines.get(1)); } } @Test(expected = NullRecurrenceIdException.class) public final void testUpdateNullRecurrenceId() throws SkylineStoreException { final ArrayList<ResourceSkyline> resourceSkylines = new ArrayList<ResourceSkyline>(); final ResourceSkyline resourceSkyline1 = getSkyline(1); resourceSkylines.add(resourceSkyline1); final ArrayList<ResourceSkyline> resourceSkylinesInvalid = new ArrayList<ResourceSkyline>(); resourceSkylinesInvalid.add(null); // try to updateHistory with null recurringId skylineStore.updateHistory(null, resourceSkylines); } @Test(expected = NullResourceSkylineException.class) public final void testUpdateNullResourceSkyline() throws SkylineStoreException { final RecurrenceId recurrenceId = new RecurrenceId("FraudDetection", "17/06/20 00:00:00"); final ArrayList<ResourceSkyline> resourceSkylines = new ArrayList<ResourceSkyline>(); final ResourceSkyline resourceSkyline1 = getSkyline(1); resourceSkylines.add(resourceSkyline1); final ArrayList<ResourceSkyline> resourceSkylinesInvalid = new ArrayList<ResourceSkyline>(); resourceSkylinesInvalid.add(null); // try to updateHistory with null resourceSkylines skylineStore.addHistory(recurrenceId, resourceSkylines); skylineStore.updateHistory(recurrenceId, null); } @Test(expected = EmptyResourceSkylineException.class) public final void testUpdateEmptyRecurrenceId() throws SkylineStoreException { final RecurrenceId recurrenceId = new RecurrenceId("FraudDetection", "17/06/20 00:00:00"); final ArrayList<ResourceSkyline> resourceSkylines = new ArrayList<ResourceSkyline>(); final ResourceSkyline resourceSkyline1 = getSkyline(1); resourceSkylines.add(resourceSkyline1); final ArrayList<ResourceSkyline> resourceSkylinesInvalid = new ArrayList<ResourceSkyline>(); resourceSkylinesInvalid.add(null); skylineStore.addHistory(recurrenceId, resourceSkylines); // try to updateHistory with empty resourceSkyline skylineStore.updateHistory(recurrenceId, resourceSkylinesInvalid); } @Test(expected = RecurrenceIdNotFoundException.class) public final void testUpdateRecurrenceIdNotFound() throws SkylineStoreException { final ArrayList<ResourceSkyline> resourceSkylines = new ArrayList<ResourceSkyline>(); final ResourceSkyline resourceSkyline1 = getSkyline(1); resourceSkylines.add(resourceSkyline1); final RecurrenceId recurrenceIdInvalid = new RecurrenceId("Some random pipelineId", "Some random runId"); final ArrayList<ResourceSkyline> resourceSkylinesInvalid = new ArrayList<ResourceSkyline>(); resourceSkylinesInvalid.add(null); // try to updateHistory with non-existing recurringId skylineStore.updateHistory(recurrenceIdInvalid, resourceSkylines); } @After public final void cleanUp() { skylineStore = null; resourceOverTime.clear(); resourceOverTime = null; skylineList = null; riAdd = null; resource = null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.core.jmx; import java.lang.management.ManagementFactory; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import javax.management.InstanceAlreadyExistsException; import javax.management.InstanceNotFoundException; import javax.management.MBeanRegistrationException; import javax.management.MBeanServer; import javax.management.NotCompliantMBeanException; import javax.management.ObjectName; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.Appender; import org.apache.logging.log4j.core.LoggerContext; import org.apache.logging.log4j.core.appender.AsyncAppender; import org.apache.logging.log4j.core.async.AsyncLoggerConfig; import org.apache.logging.log4j.core.async.AsyncLoggerContext; import org.apache.logging.log4j.core.config.LoggerConfig; import org.apache.logging.log4j.core.impl.Log4jContextFactory; import org.apache.logging.log4j.core.selector.ContextSelector; import org.apache.logging.log4j.core.util.Constants; import org.apache.logging.log4j.core.util.Log4jThreadFactory; import org.apache.logging.log4j.spi.LoggerContextFactory; import org.apache.logging.log4j.status.StatusLogger; import org.apache.logging.log4j.util.PropertiesUtil; /** * Creates MBeans to instrument various classes in the log4j class hierarchy. * <p> * All instrumentation for Log4j 2 classes can be disabled by setting system property {@code -Dlog4j2.disable.jmx=true}. * </p> */ public final class Server { private static final String CONTEXT_NAME_ALL = "*"; /** * The domain part, or prefix ({@value}) of the {@code ObjectName} of all MBeans that instrument Log4J2 components. */ public static final String DOMAIN = "org.apache.logging.log4j2"; private static final String PROPERTY_DISABLE_JMX = "log4j2.disable.jmx"; private static final String PROPERTY_ASYNC_NOTIF = "log4j2.jmx.notify.async"; private static final String THREAD_NAME_PREFIX = "jmx.notif"; private static final StatusLogger LOGGER = StatusLogger.getLogger(); static final Executor executor = isJmxDisabled() ? null : createExecutor(); private Server() { } /** * Returns either a {@code null} Executor (causing JMX notifications to be sent from the caller thread) or a daemon * background thread Executor, depending on the value of system property "log4j2.jmx.notify.async". If this * property is not set, use a {@code null} Executor for web apps to avoid memory leaks and other issues when the * web app is restarted. * @see <a href="https://issues.apache.org/jira/browse/LOG4J2-938">LOG4J2-938</a> */ private static ExecutorService createExecutor() { final boolean defaultAsync = !Constants.IS_WEB_APP; final boolean async = PropertiesUtil.getProperties().getBooleanProperty(PROPERTY_ASYNC_NOTIF, defaultAsync); return async ? Executors.newFixedThreadPool(1, Log4jThreadFactory.createDaemonThreadFactory(THREAD_NAME_PREFIX)) : null; } /** * Either returns the specified name as is, or returns a quoted value containing the specified name with the special * characters (comma, equals, colon, quote, asterisk, or question mark) preceded with a backslash. * * @param name the name to escape so it can be used as a value in an {@link ObjectName}. * @return the escaped name */ public static String escape(final String name) { final StringBuilder sb = new StringBuilder(name.length() * 2); boolean needsQuotes = false; for (int i = 0; i < name.length(); i++) { final char c = name.charAt(i); switch (c) { case '\\': case '*': case '?': case '\"': // quote, star, question & backslash must be escaped sb.append('\\'); needsQuotes = true; // ... and can only appear in quoted value break; case ',': case '=': case ':': // no need to escape these, but value must be quoted needsQuotes = true; break; case '\r': // drop \r characters: \\r gives "invalid escape sequence" continue; case '\n': // replace \n characters with \\n sequence sb.append("\\n"); needsQuotes = true; continue; } sb.append(c); } if (needsQuotes) { sb.insert(0, '\"'); sb.append('\"'); } return sb.toString(); } private static boolean isJmxDisabled() { return PropertiesUtil.getProperties().getBooleanProperty(PROPERTY_DISABLE_JMX); } public static void reregisterMBeansAfterReconfigure() { // avoid creating Platform MBean Server if JMX disabled if (isJmxDisabled()) { LOGGER.debug("JMX disabled for Log4j2. Not registering MBeans."); return; } final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); reregisterMBeansAfterReconfigure(mbs); } public static void reregisterMBeansAfterReconfigure(final MBeanServer mbs) { if (isJmxDisabled()) { LOGGER.debug("JMX disabled for Log4j2. Not registering MBeans."); return; } // now provide instrumentation for the newly configured // LoggerConfigs and Appenders try { final ContextSelector selector = getContextSelector(); if (selector == null) { LOGGER.debug("Could not register MBeans: no ContextSelector found."); return; } LOGGER.trace("Reregistering MBeans after reconfigure. Selector={}", selector); final List<LoggerContext> contexts = selector.getLoggerContexts(); int i = 0; for (final LoggerContext ctx : contexts) { LOGGER.trace("Reregistering context ({}/{}): '{}' {}", ++i, contexts.size(), ctx.getName(), ctx); // first unregister the context and all nested loggers, // appenders, statusLogger, contextSelector, ringbuffers... unregisterLoggerContext(ctx.getName(), mbs); final LoggerContextAdmin mbean = new LoggerContextAdmin(ctx, executor); register(mbs, mbean, mbean.getObjectName()); if (ctx instanceof AsyncLoggerContext) { final RingBufferAdmin rbmbean = ((AsyncLoggerContext) ctx).createRingBufferAdmin(); if (rbmbean.getBufferSize() > 0) { // don't register if Disruptor not started (DefaultConfiguration: config not found) register(mbs, rbmbean, rbmbean.getObjectName()); } } // register the status logger and the context selector // repeatedly // for each known context: if one context is unregistered, // these MBeans should still be available for the other // contexts. registerStatusLogger(ctx.getName(), mbs, executor); registerContextSelector(ctx.getName(), selector, mbs, executor); registerLoggerConfigs(ctx, mbs, executor); registerAppenders(ctx, mbs, executor); } } catch (final Exception ex) { LOGGER.error("Could not register mbeans", ex); } } /** * Unregister all log4j MBeans from the platform MBean server. */ public static void unregisterMBeans() { if (isJmxDisabled()) { LOGGER.debug("JMX disabled for Log4j2. Not unregistering MBeans."); return; } unregisterMBeans(ManagementFactory.getPlatformMBeanServer()); } /** * Unregister all log4j MBeans from the specified MBean server. * * @param mbs the MBean server to unregister from. */ public static void unregisterMBeans(final MBeanServer mbs) { if (mbs != null) { unregisterStatusLogger(CONTEXT_NAME_ALL, mbs); unregisterContextSelector(CONTEXT_NAME_ALL, mbs); unregisterContexts(mbs); unregisterLoggerConfigs(CONTEXT_NAME_ALL, mbs); unregisterAsyncLoggerRingBufferAdmins(CONTEXT_NAME_ALL, mbs); unregisterAsyncLoggerConfigRingBufferAdmins(CONTEXT_NAME_ALL, mbs); unregisterAppenders(CONTEXT_NAME_ALL, mbs); unregisterAsyncAppenders(CONTEXT_NAME_ALL, mbs); } } /** * Returns the {@code ContextSelector} of the current {@code Log4jContextFactory}. * * @return the {@code ContextSelector} of the current {@code Log4jContextFactory} */ private static ContextSelector getContextSelector() { final LoggerContextFactory factory = LogManager.getFactory(); if (factory instanceof Log4jContextFactory) { final ContextSelector selector = ((Log4jContextFactory) factory).getSelector(); return selector; } return null; } /** * Unregisters all MBeans associated with the specified logger context (including MBeans for {@code LoggerConfig}s * and {@code Appender}s from the platform MBean server. * * @param loggerContextName name of the logger context to unregister */ public static void unregisterLoggerContext(final String loggerContextName) { if (isJmxDisabled()) { LOGGER.debug("JMX disabled for Log4j2. Not unregistering MBeans."); return; } final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); unregisterLoggerContext(loggerContextName, mbs); } /** * Unregisters all MBeans associated with the specified logger context (including MBeans for {@code LoggerConfig}s * and {@code Appender}s from the platform MBean server. * * @param contextName name of the logger context to unregister * @param mbs the MBean Server to unregister the instrumented objects from */ public static void unregisterLoggerContext(final String contextName, final MBeanServer mbs) { final String search = String.format(LoggerContextAdminMBean.PATTERN, escape(contextName), "*"); unregisterAllMatching(search, mbs); // unregister context mbean // now unregister all MBeans associated with this logger context unregisterStatusLogger(contextName, mbs); unregisterContextSelector(contextName, mbs); unregisterLoggerConfigs(contextName, mbs); unregisterAppenders(contextName, mbs); unregisterAsyncAppenders(contextName, mbs); unregisterAsyncLoggerRingBufferAdmins(contextName, mbs); unregisterAsyncLoggerConfigRingBufferAdmins(contextName, mbs); } private static void registerStatusLogger(final String contextName, final MBeanServer mbs, final Executor executor) throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException { final StatusLoggerAdmin mbean = new StatusLoggerAdmin(contextName, executor); register(mbs, mbean, mbean.getObjectName()); } private static void registerContextSelector(final String contextName, final ContextSelector selector, final MBeanServer mbs, final Executor executor) throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException { final ContextSelectorAdmin mbean = new ContextSelectorAdmin(contextName, selector); register(mbs, mbean, mbean.getObjectName()); } private static void unregisterStatusLogger(final String contextName, final MBeanServer mbs) { final String search = String.format(StatusLoggerAdminMBean.PATTERN, escape(contextName), "*"); unregisterAllMatching(search, mbs); } private static void unregisterContextSelector(final String contextName, final MBeanServer mbs) { final String search = String.format(ContextSelectorAdminMBean.PATTERN, escape(contextName), "*"); unregisterAllMatching(search, mbs); } private static void unregisterLoggerConfigs(final String contextName, final MBeanServer mbs) { final String pattern = LoggerConfigAdminMBean.PATTERN; final String search = String.format(pattern, escape(contextName), "*"); unregisterAllMatching(search, mbs); } private static void unregisterContexts(final MBeanServer mbs) { final String pattern = LoggerContextAdminMBean.PATTERN; final String search = String.format(pattern, "*"); unregisterAllMatching(search, mbs); } private static void unregisterAppenders(final String contextName, final MBeanServer mbs) { final String pattern = AppenderAdminMBean.PATTERN; final String search = String.format(pattern, escape(contextName), "*"); unregisterAllMatching(search, mbs); } private static void unregisterAsyncAppenders(final String contextName, final MBeanServer mbs) { final String pattern = AsyncAppenderAdminMBean.PATTERN; final String search = String.format(pattern, escape(contextName), "*"); unregisterAllMatching(search, mbs); } private static void unregisterAsyncLoggerRingBufferAdmins(final String contextName, final MBeanServer mbs) { final String pattern1 = RingBufferAdminMBean.PATTERN_ASYNC_LOGGER; final String search1 = String.format(pattern1, escape(contextName)); unregisterAllMatching(search1, mbs); } private static void unregisterAsyncLoggerConfigRingBufferAdmins(final String contextName, final MBeanServer mbs) { final String pattern2 = RingBufferAdminMBean.PATTERN_ASYNC_LOGGER_CONFIG; final String search2 = String.format(pattern2, escape(contextName), "*"); unregisterAllMatching(search2, mbs); } private static void unregisterAllMatching(final String search, final MBeanServer mbs) { try { final ObjectName pattern = new ObjectName(search); final Set<ObjectName> found = mbs.queryNames(pattern, null); if (found == null || found.isEmpty()) { LOGGER.trace("Unregistering but no MBeans found matching '{}'", search); } else { LOGGER.trace("Unregistering {} MBeans: {}", found.size(), found); } if (found != null) { for (final ObjectName objectName : found) { mbs.unregisterMBean(objectName); } } } catch (final InstanceNotFoundException ex) { LOGGER.debug("Could not unregister MBeans for " + search + ". Ignoring " + ex); } catch (final Exception ex) { LOGGER.error("Could not unregister MBeans for " + search, ex); } } private static void registerLoggerConfigs(final LoggerContext ctx, final MBeanServer mbs, final Executor executor) throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException { final Map<String, LoggerConfig> map = ctx.getConfiguration().getLoggers(); for (final String name : map.keySet()) { final LoggerConfig cfg = map.get(name); final LoggerConfigAdmin mbean = new LoggerConfigAdmin(ctx, cfg); register(mbs, mbean, mbean.getObjectName()); if (cfg instanceof AsyncLoggerConfig) { final AsyncLoggerConfig async = (AsyncLoggerConfig) cfg; final RingBufferAdmin rbmbean = async.createRingBufferAdmin(ctx.getName()); register(mbs, rbmbean, rbmbean.getObjectName()); } } } private static void registerAppenders(final LoggerContext ctx, final MBeanServer mbs, final Executor executor) throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException { final Map<String, Appender> map = ctx.getConfiguration().getAppenders(); for (final String name : map.keySet()) { final Appender appender = map.get(name); if (appender instanceof AsyncAppender) { final AsyncAppender async = ((AsyncAppender) appender); final AsyncAppenderAdmin mbean = new AsyncAppenderAdmin(ctx.getName(), async); register(mbs, mbean, mbean.getObjectName()); } else { final AppenderAdmin mbean = new AppenderAdmin(ctx.getName(), appender); register(mbs, mbean, mbean.getObjectName()); } } } private static void register(final MBeanServer mbs, final Object mbean, final ObjectName objectName) throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException { if (mbs.isRegistered(objectName)) { try { mbs.unregisterMBean(objectName); } catch (MBeanRegistrationException | InstanceNotFoundException ex) { LOGGER.trace("Failed to unregister MBean {}", objectName); } } LOGGER.debug("Registering MBean {}", objectName); mbs.registerMBean(mbean, objectName); } }
/** * @author UCSD MOOC development team and YOU * * A class which reprsents a graph of geographic locations * Nodes in the graph are intersections between * */ package roadgraph; import geography.GeographicPoint; import roadgraph.graphsenities.MapEdge; import roadgraph.graphsenities.MapNode; import roadgraph.searchalgorithms.api.SearchAlgo; import roadgraph.searchalgorithms.impl.AStarAlgo; import roadgraph.searchalgorithms.impl.BFSAlgo; import roadgraph.searchalgorithms.impl.DijkstraAlgo; import util.GraphLoader; import java.util.*; import java.util.function.Consumer; /** * @author UCSD MOOC development team and YOU * * A class which represents a graph of geographic locations * Nodes in the graph are intersections between * */ public class MapGraph { private Map<MapNode, List<MapEdge>> graph; private Map<GeographicPoint, MapNode> pointNodeMap; private int numOfEdges; /** * Create a new empty MapGraph */ public MapGraph() { graph = new HashMap<>(); pointNodeMap = new HashMap<>(); numOfEdges = 0; } /** * Get the number of vertices (road intersections) in the graph * @return The number of vertices in the graph. */ public int getNumVertices() { return graph.size(); } /** * Return the intersections, which are the vertices in this graph. * @return The vertices in this graph as GeographicPoints */ public Set<GeographicPoint> getVertices() { return pointNodeMap.keySet(); } /** * Get the number of road segments in the graph * @return The number of edges in the graph. */ public int getNumOfEdges() { return numOfEdges; } /** Add a node corresponding to an intersection at a Geographic Point * If the location is already in the graph or null, this method does * not change the graph. * @param location The location of the intersection * @return true if a node was added, false if it was not (the node * was already in the graph, or the parameter is null). */ public boolean addVertex(GeographicPoint location) { if (location == null || pointNodeMap.containsKey(location)) { return false; } else { MapNode node = new MapNode(location); pointNodeMap.put(location, node); graph.put(node, new ArrayList<>()); return true; } } /** * Adds a directed edge to the graph from pt1 to pt2. * Precondition: Both GeographicPoints have already been added to the graph * @param from The starting point of the edge * @param to The ending point of the edge * @param roadName The name of the road * @param roadType The type of the road * @param length The length of the road, in km * @throws IllegalArgumentException If the points have not already been * added as nodes to the graph, if any of the arguments is null, * or if the length is less than 0. */ public void addEdge(GeographicPoint from, GeographicPoint to, String roadName, String roadType, double length) throws IllegalArgumentException { if (!pointNodeMap.containsKey(from) || !pointNodeMap.containsKey(to) || from == null || to == null || roadType == null || length < 0) { throw new IllegalArgumentException(); } MapNode fromNode = pointNodeMap.get(from); MapNode toNode = pointNodeMap.get(to); MapEdge edge = new MapEdge(fromNode, toNode, roadName, roadType, length); if (!graph.get(fromNode).contains(edge)) { numOfEdges++; graph.get(fromNode).add(edge); } } private List<GeographicPoint> executeAlgorithm(GeographicPoint start, GeographicPoint goal, Consumer<GeographicPoint> nodeSearched, SearchAlgo algorithm) { MapNode startNode = pointNodeMap.get(start); MapNode goalNode = pointNodeMap.get(goal); return algorithm.search(startNode, goalNode, nodeSearched); } /** Find the path from start to goal using breadth first search * * @param start The starting location * @param goal The goal location * @return The list of intersections that form the shortest (unweighted) * path from start to goal (including both start and goal). */ public List<GeographicPoint> bfs(GeographicPoint start, GeographicPoint goal) { // Dummy variable for calling the search algorithms Consumer<GeographicPoint> temp = (x) -> {}; return bfs(start, goal, temp); } /** Find the path from start to goal using breadth first search * * @param start The starting location * @param goal The goal location * @param nodeSearched A hook for visualization. See assignment instructions for how to use it. * @return The list of intersections that form the shortest (unweighted) * path from start to goal (including both start and goal). */ public List<GeographicPoint> bfs(GeographicPoint start, GeographicPoint goal, Consumer<GeographicPoint> nodeSearched) { return executeAlgorithm(start, goal, nodeSearched, new BFSAlgo(new HashMap<>(graph))); } /** Find the path from start to goal using Dijkstra's algorithm * * @param start The starting location * @param goal The goal location * @return The list of intersections that form the shortest path from * start to goal (including both start and goal). */ public List<GeographicPoint> dijkstra(GeographicPoint start, GeographicPoint goal) { // Dummy variable for calling the search algorithms // You do not need to change this method. Consumer<GeographicPoint> temp = (x) -> {}; return dijkstra(start, goal, temp); } /** Find the path from start to goal using Dijkstra's algorithm * * @param start The starting location * @param goal The goal location * @param nodeSearched A hook for visualization. See assignment instructions for how to use it. * @return The list of intersections that form the shortest path from * start to goal (including both start and goal). */ public List<GeographicPoint> dijkstra(GeographicPoint start, GeographicPoint goal, Consumer<GeographicPoint> nodeSearched) { return executeAlgorithm(start, goal, nodeSearched, new DijkstraAlgo(new HashMap<>(graph))); } /** Find the path from start to goal using A-Star search * * @param start The starting location * @param goal The goal location * @return The list of intersections that form the shortest path from * start to goal (including both start and goal). */ public List<GeographicPoint> aStarSearch(GeographicPoint start, GeographicPoint goal) { // Dummy variable for calling the search algorithms Consumer<GeographicPoint> temp = (x) -> {}; return aStarSearch(start, goal, temp); } /** Find the path from start to goal using A-Star search * * @param start The starting location * @param goal The goal location * @param nodeSearched A hook for visualization. See assignment instructions for how to use it. * @return The list of intersections that form the shortest path from * start to goal (including both start and goal). */ public List<GeographicPoint> aStarSearch(GeographicPoint start, GeographicPoint goal, Consumer<GeographicPoint> nodeSearched) { return executeAlgorithm(start, goal, nodeSearched, new AStarAlgo(new HashMap<>(graph))); } public static void main(String[] args) { System.out.print("Making a new map..."); MapGraph firstMap = new MapGraph(); System.out.print("DONE. \nLoading the map..."); GraphLoader.loadRoadMap("data/testdata/simpletest.map", firstMap); System.out.println("DONE."); // You can use this method for testing. /* Here are some test cases you should try before you attempt * the Week 3 End of Week Quiz, EVEN IF you score 100% on the * programming assignment. */ MapGraph simpleTestMap = new MapGraph(); GraphLoader.loadRoadMap("data/testdata/simpletest.map", simpleTestMap); GeographicPoint testStart = new GeographicPoint(1.0, 1.0); GeographicPoint testEnd = new GeographicPoint(8.0, -1.0); System.out.println("Test 1 using simpletest: DijkstraAlgo should be 9 and AStarAlgo should be 5"); List<GeographicPoint> testroute = simpleTestMap.dijkstra(testStart,testEnd); List<GeographicPoint> testroute2 = simpleTestMap.aStarSearch(testStart,testEnd); MapGraph testMap = new MapGraph(); GraphLoader.loadRoadMap("data/maps/utc.map", testMap); // A very simple test using real data testStart = new GeographicPoint(32.869423, -117.220917); testEnd = new GeographicPoint(32.869255, -117.216927); System.out.println("Test 2 using utc: DijkstraAlgo should be 13 and AStarAlgo should be 5"); testroute = testMap.dijkstra(testStart,testEnd); testroute2 = testMap.aStarSearch(testStart,testEnd); // A slightly more complex test using real data testStart = new GeographicPoint(32.8674388, -117.2190213); testEnd = new GeographicPoint(32.8697828, -117.2244506); System.out.println("Test 3 using utc: DijkstraAlgo should be 37 and AStarAlgo should be 10"); testroute = testMap.dijkstra(testStart,testEnd); testroute2 = testMap.aStarSearch(testStart,testEnd); /* Use this code in Week 3 End of Week Quiz */ MapGraph theMap = new MapGraph(); System.out.print("DONE. \nLoading the map..."); GraphLoader.loadRoadMap("data/maps/utc.map", theMap); System.out.println("DONE."); GeographicPoint start = new GeographicPoint(32.8648772, -117.2254046); GeographicPoint end = new GeographicPoint(32.8660691, -117.217393); List<GeographicPoint> route = theMap.dijkstra(start,end); List<GeographicPoint> route2 = theMap.aStarSearch(start,end); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.history.core.tree; import com.intellij.history.core.Content; import com.intellij.history.core.Paths; import com.intellij.history.core.StreamUtil; import com.intellij.history.core.revisions.Difference; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.newvfs.impl.FileNameCache; import com.intellij.util.SmartList; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import static java.lang.String.format; public abstract class Entry { private int myNameId; private int myNameHash; // case insensitive private DirectoryEntry myParent; public Entry(String name) { this(toNameId(name), calcNameHash(name)); } public Entry(int nameId) { this(nameId, calcNameHash(fromNameId(nameId))); } private Entry(int nameId, int nameHash) { myNameId = nameId; myNameHash = nameHash; } private static final int NULL_NAME_ID = -1; private static final int EMPTY_NAME_ID = 0; protected static int toNameId(String name) { if (name == null) return NULL_NAME_ID; if (name.isEmpty()) return EMPTY_NAME_ID; return FileNameCache.storeName(name); } private static CharSequence fromNameId(int nameId) { if (nameId == NULL_NAME_ID) return null; if (nameId == EMPTY_NAME_ID) return ""; return FileNameCache.getVFileName(nameId); } public Entry(DataInput in) throws IOException { String name = StreamUtil.readString(in); myNameId = toNameId(name); myNameHash = calcNameHash(name); } public void write(DataOutput out) throws IOException { StreamUtil.writeString(out, getName()); } public String getName() { CharSequence sequence = fromNameId(myNameId); if (sequence != null && !(sequence instanceof String)) { return sequence.toString(); } return (String)sequence; } public CharSequence getNameSequence() { return fromNameId(myNameId); } public int getNameId() { return myNameId; } public int getNameHash() { return myNameHash; } public String getPath() { StringBuilder builder = new StringBuilder(); buildPath(this, builder); return builder.toString(); } private static void buildPath(Entry e, StringBuilder builder) { if (e == null) return; buildPath(e.getParent(), builder); if (builder.length() > 0 && builder.charAt(builder.length() - 1) != Paths.DELIM) builder.append(Paths.DELIM); builder.append(e.getNameSequence()); } public boolean nameEquals(String name) { return Paths.equals(getName(), name); } public boolean pathEquals(String path) { return Paths.equals(getPath(), path); } public abstract long getTimestamp(); public boolean isReadOnly() { throw new UnsupportedOperationException(formatPath()); } public void setReadOnly(boolean isReadOnly) { throw new UnsupportedOperationException(formatPath()); } public boolean isOutdated(long timestamp) { return getTimestamp() != timestamp; } public Content getContent() { throw new UnsupportedOperationException(formatPath()); } public boolean hasUnavailableContent() { return hasUnavailableContent(new ArrayList<>()); } public boolean hasUnavailableContent(List<? super Entry> entriesWithUnavailableContent) { return false; } public Entry getParent() { return myParent; } protected void setParent(DirectoryEntry parent) { myParent = parent; } public boolean isDirectory() { return false; } public void addChild(Entry child) { throw new UnsupportedOperationException(formatAddRemove(child)); } public void addChildren(Collection<? extends Entry> children) { throw new UnsupportedOperationException(); } public void removeChild(Entry child) { throw new UnsupportedOperationException(formatAddRemove(child)); } private String formatAddRemove(Entry child) { return "add/remove " + child.formatPath() + " to " + formatPath(); } public List<Entry> getChildren() { return Collections.emptyList(); } public Entry findChild(String name) { int nameHash = calcNameHash(name); for (Entry e : getChildren()) { if (nameHash == e.getNameHash() && e.nameEquals(name)) return e; } return null; } protected static int calcNameHash(@Nullable CharSequence name) { return name == null ? -1 : StringUtil.stringHashCodeInsensitive(name); } public boolean hasEntry(String path) { return findEntry(path) != null; } public Entry getEntry(String path) { Entry result = findEntry(path); if (result == null) { throw new RuntimeException(format("entry '%s' not found", path)); } return result; } public Entry findEntry(String relativePath) { Iterable<String> parts = Paths.split(relativePath); Entry result = this; for (String each : parts) { result = result.findChild(each); if (result == null) return null; } return result; } @NotNull public abstract Entry copy(); public void setName(String newName) { if (myParent != null) myParent.checkDoesNotExist(this, newName); myNameId = toNameId(newName); myNameHash = calcNameHash(newName); } public void setContent(Content newContent, long timestamp) { throw new UnsupportedOperationException(formatPath()); } public static List<Difference> getDifferencesBetween(Entry left, Entry right) { return getDifferencesBetween(left, right, false); } public static List<Difference> getDifferencesBetween(Entry left, Entry right, boolean isRightContentCurrent) { List<Difference> result = new SmartList<>(); if (left == null) right.collectCreatedDifferences(result, isRightContentCurrent); else if (right == null) left.collectDeletedDifferences(result, isRightContentCurrent); else left.collectDifferencesWith(right, result, isRightContentCurrent); return result; } protected abstract void collectDifferencesWith(@NotNull Entry e, @NotNull List<? super Difference> result, boolean isRightContentCurrent); protected abstract void collectCreatedDifferences(@NotNull List<? super Difference> result, boolean isRightContentCurrent); protected abstract void collectDeletedDifferences(@NotNull List<? super Difference> result, boolean isRightContentCurrent); @Override public String toString() { return getName(); } private String formatPath() { String type = isDirectory() ? "dir: " : "file: "; return type + getPath(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.server.coordinator; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.ListeningExecutorService; import it.unimi.dsi.fastutil.objects.Object2IntMap; import it.unimi.dsi.fastutil.objects.Object2LongMap; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.PathChildrenCache; import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; import org.apache.druid.client.BatchServerInventoryView; import org.apache.druid.client.DataSourcesSnapshot; import org.apache.druid.client.DruidDataSource; import org.apache.druid.client.DruidServer; import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.client.ImmutableDruidServer; import org.apache.druid.common.config.JacksonConfigManager; import org.apache.druid.curator.CuratorTestBase; import org.apache.druid.curator.CuratorUtils; import org.apache.druid.curator.ZkEnablementConfig; import org.apache.druid.curator.discovery.NoopServiceAnnouncer; import org.apache.druid.discovery.DruidLeaderSelector; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.concurrent.ScheduledExecutorFactory; import org.apache.druid.java.util.emitter.core.Event; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.metadata.MetadataRuleManager; import org.apache.druid.metadata.SegmentsMetadataManager; import org.apache.druid.server.DruidNode; import org.apache.druid.server.coordination.DruidServerMetadata; import org.apache.druid.server.coordination.ServerType; import org.apache.druid.server.coordinator.duty.CompactSegments; import org.apache.druid.server.coordinator.duty.CoordinatorCustomDuty; import org.apache.druid.server.coordinator.duty.CoordinatorCustomDutyGroup; import org.apache.druid.server.coordinator.duty.CoordinatorCustomDutyGroups; import org.apache.druid.server.coordinator.duty.CoordinatorDuty; import org.apache.druid.server.coordinator.duty.KillSupervisorsCustomDuty; import org.apache.druid.server.coordinator.rules.ForeverBroadcastDistributionRule; import org.apache.druid.server.coordinator.rules.ForeverLoadRule; import org.apache.druid.server.coordinator.rules.IntervalLoadRule; import org.apache.druid.server.coordinator.rules.Rule; import org.apache.druid.server.initialization.ZkPathsConfig; import org.apache.druid.server.lookup.cache.LookupCoordinatorManager; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.SegmentId; import org.easymock.Capture; import org.easymock.EasyMock; import org.joda.time.Duration; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import javax.annotation.Nullable; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicReference; /** */ public class DruidCoordinatorTest extends CuratorTestBase { private static final String LOADPATH = "/druid/loadqueue/localhost:1234"; private static final long COORDINATOR_START_DELAY = 1; private static final long COORDINATOR_PERIOD = 100; private DruidCoordinator coordinator; private SegmentsMetadataManager segmentsMetadataManager; private DataSourcesSnapshot dataSourcesSnapshot; private DruidCoordinatorRuntimeParams coordinatorRuntimeParams; private BatchServerInventoryView serverInventoryView; private ScheduledExecutorFactory scheduledExecutorFactory; private DruidServer druidServer; private ConcurrentMap<String, LoadQueuePeon> loadManagementPeons; private LoadQueuePeon loadQueuePeon; private MetadataRuleManager metadataRuleManager; private CountDownLatch leaderAnnouncerLatch; private CountDownLatch leaderUnannouncerLatch; private PathChildrenCache pathChildrenCache; private DruidCoordinatorConfig druidCoordinatorConfig; private ObjectMapper objectMapper; private DruidNode druidNode; private LatchableServiceEmitter serviceEmitter = new LatchableServiceEmitter(); @Before public void setUp() throws Exception { druidServer = EasyMock.createMock(DruidServer.class); serverInventoryView = EasyMock.createMock(BatchServerInventoryView.class); segmentsMetadataManager = EasyMock.createNiceMock(SegmentsMetadataManager.class); dataSourcesSnapshot = EasyMock.createNiceMock(DataSourcesSnapshot.class); coordinatorRuntimeParams = EasyMock.createNiceMock(DruidCoordinatorRuntimeParams.class); metadataRuleManager = EasyMock.createNiceMock(MetadataRuleManager.class); JacksonConfigManager configManager = EasyMock.createNiceMock(JacksonConfigManager.class); EasyMock.expect( configManager.watch( EasyMock.eq(CoordinatorDynamicConfig.CONFIG_KEY), EasyMock.anyObject(Class.class), EasyMock.anyObject() ) ).andReturn(new AtomicReference(CoordinatorDynamicConfig.builder().build())).anyTimes(); EasyMock.expect( configManager.watch( EasyMock.eq(CoordinatorCompactionConfig.CONFIG_KEY), EasyMock.anyObject(Class.class), EasyMock.anyObject() ) ).andReturn(new AtomicReference(CoordinatorCompactionConfig.empty())).anyTimes(); EasyMock.replay(configManager); setupServerAndCurator(); curator.start(); curator.blockUntilConnected(); curator.create().creatingParentsIfNeeded().forPath(LOADPATH); objectMapper = new DefaultObjectMapper(); druidCoordinatorConfig = new TestDruidCoordinatorConfig( new Duration(COORDINATOR_START_DELAY), new Duration(COORDINATOR_PERIOD), null, null, null, new Duration(COORDINATOR_PERIOD), null, null, null, null, null, null, null, null, null, null, 10, new Duration("PT0s") ); pathChildrenCache = new PathChildrenCache( curator, LOADPATH, true, true, Execs.singleThreaded("coordinator_test_path_children_cache-%d") ); loadQueuePeon = new CuratorLoadQueuePeon( curator, LOADPATH, objectMapper, Execs.scheduledSingleThreaded("coordinator_test_load_queue_peon_scheduled-%d"), Execs.singleThreaded("coordinator_test_load_queue_peon-%d"), druidCoordinatorConfig ); loadQueuePeon.start(); druidNode = new DruidNode("hey", "what", false, 1234, null, true, false); loadManagementPeons = new ConcurrentHashMap<>(); scheduledExecutorFactory = new ScheduledExecutorFactory() { @Override public ScheduledExecutorService create(int corePoolSize, final String nameFormat) { return Executors.newSingleThreadScheduledExecutor(); } }; leaderAnnouncerLatch = new CountDownLatch(1); leaderUnannouncerLatch = new CountDownLatch(1); coordinator = new DruidCoordinator( druidCoordinatorConfig, new ZkPathsConfig() { @Override public String getBase() { return "druid"; } }, configManager, segmentsMetadataManager, serverInventoryView, metadataRuleManager, () -> curator, serviceEmitter, scheduledExecutorFactory, null, null, new NoopServiceAnnouncer() { @Override public void announce(DruidNode node) { // count down when this coordinator becomes the leader leaderAnnouncerLatch.countDown(); } @Override public void unannounce(DruidNode node) { leaderUnannouncerLatch.countDown(); } }, druidNode, loadManagementPeons, null, new HashSet<>(), new CoordinatorCustomDutyGroups(ImmutableSet.of()), new CostBalancerStrategyFactory(), EasyMock.createNiceMock(LookupCoordinatorManager.class), new TestDruidLeaderSelector(), null, ZkEnablementConfig.ENABLED ); } @After public void tearDown() throws Exception { loadQueuePeon.stop(); pathChildrenCache.close(); tearDownServerAndCurator(); } @Test public void testMoveSegment() { final DataSegment segment = EasyMock.createNiceMock(DataSegment.class); EasyMock.expect(segment.getId()).andReturn(SegmentId.dummy("dummySegment")); EasyMock.expect(segment.getDataSource()).andReturn("dummyDataSource"); EasyMock.replay(segment); loadQueuePeon = EasyMock.createNiceMock(LoadQueuePeon.class); EasyMock.expect(loadQueuePeon.getLoadQueueSize()).andReturn(new Long(1)); loadQueuePeon.markSegmentToDrop(segment); EasyMock.expectLastCall().once(); Capture<LoadPeonCallback> loadCallbackCapture = Capture.newInstance(); Capture<LoadPeonCallback> dropCallbackCapture = Capture.newInstance(); loadQueuePeon.loadSegment(EasyMock.anyObject(DataSegment.class), EasyMock.capture(loadCallbackCapture)); EasyMock.expectLastCall().once(); loadQueuePeon.dropSegment(EasyMock.anyObject(DataSegment.class), EasyMock.capture(dropCallbackCapture)); EasyMock.expectLastCall().once(); loadQueuePeon.unmarkSegmentToDrop(segment); EasyMock.expectLastCall().once(); EasyMock.expect(loadQueuePeon.getSegmentsToDrop()).andReturn(new HashSet<>()).once(); EasyMock.replay(loadQueuePeon); ImmutableDruidDataSource druidDataSource = EasyMock.createNiceMock(ImmutableDruidDataSource.class); EasyMock.expect(druidDataSource.getSegment(EasyMock.anyObject(SegmentId.class))).andReturn(segment); EasyMock.replay(druidDataSource); EasyMock .expect(segmentsMetadataManager.getImmutableDataSourceWithUsedSegments(EasyMock.anyString())) .andReturn(druidDataSource); EasyMock.replay(segmentsMetadataManager); EasyMock.expect(dataSourcesSnapshot.getDataSource(EasyMock.anyString())).andReturn(druidDataSource).anyTimes(); EasyMock.replay(dataSourcesSnapshot); scheduledExecutorFactory = EasyMock.createNiceMock(ScheduledExecutorFactory.class); EasyMock.replay(scheduledExecutorFactory); EasyMock.replay(metadataRuleManager); ImmutableDruidDataSource dataSource = EasyMock.createMock(ImmutableDruidDataSource.class); EasyMock.expect(dataSource.getSegments()).andReturn(Collections.singletonList(segment)).anyTimes(); EasyMock.replay(dataSource); EasyMock.expect(druidServer.toImmutableDruidServer()).andReturn( new ImmutableDruidServer( new DruidServerMetadata("from", null, null, 5L, ServerType.HISTORICAL, null, 0), 1L, ImmutableMap.of("dummyDataSource", dataSource), 1 ) ).atLeastOnce(); EasyMock.replay(druidServer); DruidServer druidServer2 = EasyMock.createMock(DruidServer.class); EasyMock.expect(druidServer2.toImmutableDruidServer()).andReturn( new ImmutableDruidServer( new DruidServerMetadata("to", null, null, 5L, ServerType.HISTORICAL, null, 0), 1L, ImmutableMap.of("dummyDataSource", dataSource), 1 ) ).atLeastOnce(); EasyMock.replay(druidServer2); loadManagementPeons.put("from", loadQueuePeon); loadManagementPeons.put("to", loadQueuePeon); EasyMock.expect(serverInventoryView.isSegmentLoadedByServer("to", segment)).andReturn(true).once(); EasyMock.replay(serverInventoryView); mockCoordinatorRuntimeParams(); coordinator.moveSegment( coordinatorRuntimeParams, druidServer.toImmutableDruidServer(), druidServer2.toImmutableDruidServer(), segment, null ); LoadPeonCallback loadCallback = loadCallbackCapture.getValue(); loadCallback.execute(); LoadPeonCallback dropCallback = dropCallbackCapture.getValue(); dropCallback.execute(); EasyMock.verify(druidServer, druidServer2, loadQueuePeon, serverInventoryView, metadataRuleManager); EasyMock.verify(coordinatorRuntimeParams); } private void mockCoordinatorRuntimeParams() { EasyMock.expect(coordinatorRuntimeParams.getDataSourcesSnapshot()).andReturn(this.dataSourcesSnapshot).anyTimes(); EasyMock.replay(coordinatorRuntimeParams); } @Test(timeout = 60_000L) public void testCoordinatorRun() throws Exception { String dataSource = "dataSource1"; String tier = "hot"; // Setup MetadataRuleManager Rule foreverLoadRule = new ForeverLoadRule(ImmutableMap.of(tier, 2)); EasyMock.expect(metadataRuleManager.getRulesWithDefault(EasyMock.anyString())) .andReturn(ImmutableList.of(foreverLoadRule)).atLeastOnce(); EasyMock.expect(metadataRuleManager.getAllRules()) .andReturn(ImmutableMap.of(dataSource, ImmutableList.of(foreverLoadRule))).atLeastOnce(); metadataRuleManager.stop(); EasyMock.expectLastCall().once(); EasyMock.replay(metadataRuleManager); // Setup SegmentsMetadataManager DruidDataSource[] dataSources = { new DruidDataSource(dataSource, Collections.emptyMap()) }; final DataSegment dataSegment = new DataSegment( dataSource, Intervals.of("2010-01-01/P1D"), "v1", null, null, null, null, 0x9, 0 ); dataSources[0].addSegment(dataSegment); setupSegmentsMetadataMock(dataSources[0]); ImmutableDruidDataSource immutableDruidDataSource = EasyMock.createNiceMock(ImmutableDruidDataSource.class); EasyMock.expect(immutableDruidDataSource.getSegments()) .andReturn(ImmutableSet.of(dataSegment)).atLeastOnce(); EasyMock.replay(immutableDruidDataSource); // Setup ServerInventoryView druidServer = new DruidServer("server1", "localhost", null, 5L, ServerType.HISTORICAL, tier, 0); loadManagementPeons.put("server1", loadQueuePeon); EasyMock.expect(serverInventoryView.getInventory()).andReturn( ImmutableList.of(druidServer) ).atLeastOnce(); EasyMock.expect(serverInventoryView.isStarted()).andReturn(true).anyTimes(); EasyMock.replay(serverInventoryView); coordinator.start(); // Wait for this coordinator to become leader leaderAnnouncerLatch.await(); // This coordinator should be leader by now Assert.assertTrue(coordinator.isLeader()); Assert.assertEquals(druidNode.getHostAndPort(), coordinator.getCurrentLeader()); pathChildrenCache.start(); final CountDownLatch assignSegmentLatch = createCountDownLatchAndSetPathChildrenCacheListenerWithLatch( 1, pathChildrenCache, ImmutableMap.of("2010-01-01T00:00:00.000Z_2010-01-02T00:00:00.000Z", dataSegment), druidServer ); assignSegmentLatch.await(); final CountDownLatch coordinatorRunLatch = new CountDownLatch(2); serviceEmitter.latch = coordinatorRunLatch; coordinatorRunLatch.await(); Assert.assertEquals(ImmutableMap.of(dataSource, 100.0), coordinator.getLoadStatus()); Object2IntMap<String> numsUnavailableUsedSegmentsPerDataSource = coordinator.computeNumsUnavailableUsedSegmentsPerDataSource(); Assert.assertEquals(1, numsUnavailableUsedSegmentsPerDataSource.size()); Assert.assertEquals(0, numsUnavailableUsedSegmentsPerDataSource.getInt(dataSource)); Map<String, Object2LongMap<String>> underReplicationCountsPerDataSourcePerTier = coordinator.computeUnderReplicationCountsPerDataSourcePerTier(); Assert.assertNotNull(underReplicationCountsPerDataSourcePerTier); Assert.assertEquals(1, underReplicationCountsPerDataSourcePerTier.size()); Object2LongMap<String> underRepliicationCountsPerDataSource = underReplicationCountsPerDataSourcePerTier.get(tier); Assert.assertNotNull(underRepliicationCountsPerDataSource); Assert.assertEquals(1, underRepliicationCountsPerDataSource.size()); //noinspection deprecation Assert.assertNotNull(underRepliicationCountsPerDataSource.get(dataSource)); // Simulated the adding of segment to druidServer during SegmentChangeRequestLoad event // The load rules asks for 2 replicas, therefore 1 replica should still be pending Assert.assertEquals(1L, underRepliicationCountsPerDataSource.getLong(dataSource)); Map<String, Object2LongMap<String>> underReplicationCountsPerDataSourcePerTierUsingClusterView = coordinator.computeUnderReplicationCountsPerDataSourcePerTierUsingClusterView(); Assert.assertNotNull(underReplicationCountsPerDataSourcePerTier); Assert.assertEquals(1, underReplicationCountsPerDataSourcePerTier.size()); Object2LongMap<String> underRepliicationCountsPerDataSourceUsingClusterView = underReplicationCountsPerDataSourcePerTierUsingClusterView.get(tier); Assert.assertNotNull(underRepliicationCountsPerDataSourceUsingClusterView); Assert.assertEquals(1, underRepliicationCountsPerDataSourceUsingClusterView.size()); //noinspection deprecation Assert.assertNotNull(underRepliicationCountsPerDataSourceUsingClusterView.get(dataSource)); // Simulated the adding of segment to druidServer during SegmentChangeRequestLoad event // The load rules asks for 2 replicas, but only 1 historical server in cluster. Since computing using cluster view // the segments are replicated as many times as they can be given state of cluster, therefore should not be // under-replicated. Assert.assertEquals(0L, underRepliicationCountsPerDataSourceUsingClusterView.getLong(dataSource)); coordinator.stop(); leaderUnannouncerLatch.await(); Assert.assertFalse(coordinator.isLeader()); Assert.assertNull(coordinator.getCurrentLeader()); EasyMock.verify(serverInventoryView); EasyMock.verify(metadataRuleManager); } @Test(timeout = 60_000L) public void testCoordinatorTieredRun() throws Exception { final String dataSource = "dataSource", hotTierName = "hot", coldTierName = "cold"; final Rule hotTier = new IntervalLoadRule(Intervals.of("2018-01-01/P1M"), ImmutableMap.of(hotTierName, 1)); final Rule coldTier = new ForeverLoadRule(ImmutableMap.of(coldTierName, 1)); final String loadPathCold = "/druid/loadqueue/cold:1234"; final DruidServer hotServer = new DruidServer("hot", "hot", null, 5L, ServerType.HISTORICAL, hotTierName, 0); final DruidServer coldServer = new DruidServer("cold", "cold", null, 5L, ServerType.HISTORICAL, coldTierName, 0); final Map<String, DataSegment> dataSegments = ImmutableMap.of( "2018-01-02T00:00:00.000Z_2018-01-03T00:00:00.000Z", new DataSegment(dataSource, Intervals.of("2018-01-02/P1D"), "v1", null, null, null, null, 0x9, 0), "2018-01-03T00:00:00.000Z_2018-01-04T00:00:00.000Z", new DataSegment(dataSource, Intervals.of("2018-01-03/P1D"), "v1", null, null, null, null, 0x9, 0), "2017-01-01T00:00:00.000Z_2017-01-02T00:00:00.000Z", new DataSegment(dataSource, Intervals.of("2017-01-01/P1D"), "v1", null, null, null, null, 0x9, 0) ); final LoadQueuePeon loadQueuePeonCold = new CuratorLoadQueuePeon( curator, loadPathCold, objectMapper, Execs.scheduledSingleThreaded("coordinator_test_load_queue_peon_cold_scheduled-%d"), Execs.singleThreaded("coordinator_test_load_queue_peon_cold-%d"), druidCoordinatorConfig ); final PathChildrenCache pathChildrenCacheCold = new PathChildrenCache( curator, loadPathCold, true, true, Execs.singleThreaded("coordinator_test_path_children_cache_cold-%d") ); loadManagementPeons.putAll(ImmutableMap.of("hot", loadQueuePeon, "cold", loadQueuePeonCold)); loadQueuePeonCold.start(); pathChildrenCache.start(); pathChildrenCacheCold.start(); DruidDataSource[] druidDataSources = {new DruidDataSource(dataSource, Collections.emptyMap())}; dataSegments.values().forEach(druidDataSources[0]::addSegment); setupSegmentsMetadataMock(druidDataSources[0]); EasyMock.expect(metadataRuleManager.getRulesWithDefault(EasyMock.anyString())) .andReturn(ImmutableList.of(hotTier, coldTier)).atLeastOnce(); EasyMock.expect(metadataRuleManager.getAllRules()) .andReturn(ImmutableMap.of(dataSource, ImmutableList.of(hotTier, coldTier))).atLeastOnce(); EasyMock.expect(serverInventoryView.getInventory()) .andReturn(ImmutableList.of(hotServer, coldServer)) .atLeastOnce(); EasyMock.expect(serverInventoryView.isStarted()).andReturn(true).anyTimes(); EasyMock.replay(metadataRuleManager, serverInventoryView); coordinator.start(); leaderAnnouncerLatch.await(); // Wait for this coordinator to become leader final CountDownLatch assignSegmentLatchHot = createCountDownLatchAndSetPathChildrenCacheListenerWithLatch(2, pathChildrenCache, dataSegments, hotServer); final CountDownLatch assignSegmentLatchCold = createCountDownLatchAndSetPathChildrenCacheListenerWithLatch(1, pathChildrenCacheCold, dataSegments, coldServer); assignSegmentLatchHot.await(); assignSegmentLatchCold.await(); final CountDownLatch coordinatorRunLatch = new CountDownLatch(2); serviceEmitter.latch = coordinatorRunLatch; coordinatorRunLatch.await(); Assert.assertEquals(ImmutableMap.of(dataSource, 100.0), coordinator.getLoadStatus()); Map<String, Object2LongMap<String>> underReplicationCountsPerDataSourcePerTier = coordinator.computeUnderReplicationCountsPerDataSourcePerTier(); Assert.assertEquals(2, underReplicationCountsPerDataSourcePerTier.size()); Assert.assertEquals(0L, underReplicationCountsPerDataSourcePerTier.get(hotTierName).getLong(dataSource)); Assert.assertEquals(0L, underReplicationCountsPerDataSourcePerTier.get(coldTierName).getLong(dataSource)); Map<String, Object2LongMap<String>> underReplicationCountsPerDataSourcePerTierUsingClusterView = coordinator.computeUnderReplicationCountsPerDataSourcePerTierUsingClusterView(); Assert.assertEquals(2, underReplicationCountsPerDataSourcePerTierUsingClusterView.size()); Assert.assertEquals(0L, underReplicationCountsPerDataSourcePerTierUsingClusterView.get(hotTierName).getLong(dataSource)); Assert.assertEquals(0L, underReplicationCountsPerDataSourcePerTierUsingClusterView.get(coldTierName).getLong(dataSource)); coordinator.stop(); leaderUnannouncerLatch.await(); EasyMock.verify(serverInventoryView); EasyMock.verify(segmentsMetadataManager); EasyMock.verify(metadataRuleManager); } @Test(timeout = 60_000L) public void testComputeUnderReplicationCountsPerDataSourcePerTierForSegmentsWithBroadcastRule() throws Exception { final String dataSource = "dataSource"; final String hotTierName = "hot"; final String coldTierName = "cold"; final String tierName1 = "tier1"; final String tierName2 = "tier2"; final Rule broadcastDistributionRule = new ForeverBroadcastDistributionRule(); final String loadPathCold = "/druid/loadqueue/cold:1234"; final String loadPathBroker1 = "/druid/loadqueue/broker1:1234"; final String loadPathBroker2 = "/druid/loadqueue/broker2:1234"; final String loadPathPeon = "/druid/loadqueue/peon:1234"; final DruidServer hotServer = new DruidServer("hot", "hot", null, 5L, ServerType.HISTORICAL, hotTierName, 0); final DruidServer coldServer = new DruidServer("cold", "cold", null, 5L, ServerType.HISTORICAL, coldTierName, 0); final DruidServer brokerServer1 = new DruidServer("broker1", "broker1", null, 5L, ServerType.BROKER, tierName1, 0); final DruidServer brokerServer2 = new DruidServer("broker2", "broker2", null, 5L, ServerType.BROKER, tierName2, 0); final DruidServer peonServer = new DruidServer("peon", "peon", null, 5L, ServerType.INDEXER_EXECUTOR, tierName2, 0); final Map<String, DataSegment> dataSegments = ImmutableMap.of( "2018-01-02T00:00:00.000Z_2018-01-03T00:00:00.000Z", new DataSegment(dataSource, Intervals.of("2018-01-02/P1D"), "v1", null, null, null, null, 0x9, 0), "2018-01-03T00:00:00.000Z_2018-01-04T00:00:00.000Z", new DataSegment(dataSource, Intervals.of("2018-01-03/P1D"), "v1", null, null, null, null, 0x9, 0), "2017-01-01T00:00:00.000Z_2017-01-02T00:00:00.000Z", new DataSegment(dataSource, Intervals.of("2017-01-01/P1D"), "v1", null, null, null, null, 0x9, 0) ); final LoadQueuePeon loadQueuePeonCold = new CuratorLoadQueuePeon( curator, loadPathCold, objectMapper, Execs.scheduledSingleThreaded("coordinator_test_load_queue_peon_cold_scheduled-%d"), Execs.singleThreaded("coordinator_test_load_queue_peon_cold-%d"), druidCoordinatorConfig ); final LoadQueuePeon loadQueuePeonBroker1 = new CuratorLoadQueuePeon( curator, loadPathBroker1, objectMapper, Execs.scheduledSingleThreaded("coordinator_test_load_queue_peon_broker1_scheduled-%d"), Execs.singleThreaded("coordinator_test_load_queue_peon_broker1-%d"), druidCoordinatorConfig ); final LoadQueuePeon loadQueuePeonBroker2 = new CuratorLoadQueuePeon( curator, loadPathBroker2, objectMapper, Execs.scheduledSingleThreaded("coordinator_test_load_queue_peon_broker2_scheduled-%d"), Execs.singleThreaded("coordinator_test_load_queue_peon_broker2-%d"), druidCoordinatorConfig ); final LoadQueuePeon loadQueuePeonPoenServer = new CuratorLoadQueuePeon( curator, loadPathPeon, objectMapper, Execs.scheduledSingleThreaded("coordinator_test_load_queue_peon_peon_scheduled-%d"), Execs.singleThreaded("coordinator_test_load_queue_peon_peon-%d"), druidCoordinatorConfig ); final PathChildrenCache pathChildrenCacheCold = new PathChildrenCache( curator, loadPathCold, true, true, Execs.singleThreaded("coordinator_test_path_children_cache_cold-%d") ); final PathChildrenCache pathChildrenCacheBroker1 = new PathChildrenCache( curator, loadPathBroker1, true, true, Execs.singleThreaded("coordinator_test_path_children_cache_broker1-%d") ); final PathChildrenCache pathChildrenCacheBroker2 = new PathChildrenCache( curator, loadPathBroker2, true, true, Execs.singleThreaded("coordinator_test_path_children_cache_broker2-%d") ); final PathChildrenCache pathChildrenCachePeon = new PathChildrenCache( curator, loadPathPeon, true, true, Execs.singleThreaded("coordinator_test_path_children_cache_peon-%d") ); loadManagementPeons.putAll(ImmutableMap.of("hot", loadQueuePeon, "cold", loadQueuePeonCold, "broker1", loadQueuePeonBroker1, "broker2", loadQueuePeonBroker2, "peon", loadQueuePeonPoenServer)); loadQueuePeonCold.start(); loadQueuePeonBroker1.start(); loadQueuePeonBroker2.start(); loadQueuePeonPoenServer.start(); pathChildrenCache.start(); pathChildrenCacheCold.start(); pathChildrenCacheBroker1.start(); pathChildrenCacheBroker2.start(); pathChildrenCachePeon.start(); DruidDataSource[] druidDataSources = {new DruidDataSource(dataSource, Collections.emptyMap())}; dataSegments.values().forEach(druidDataSources[0]::addSegment); setupSegmentsMetadataMock(druidDataSources[0]); EasyMock.expect(metadataRuleManager.getRulesWithDefault(EasyMock.anyString())) .andReturn(ImmutableList.of(broadcastDistributionRule)).atLeastOnce(); EasyMock.expect(metadataRuleManager.getAllRules()) .andReturn(ImmutableMap.of(dataSource, ImmutableList.of(broadcastDistributionRule))).atLeastOnce(); EasyMock.expect(serverInventoryView.getInventory()) .andReturn(ImmutableList.of(hotServer, coldServer, brokerServer1, brokerServer2, peonServer)) .atLeastOnce(); EasyMock.expect(serverInventoryView.isStarted()).andReturn(true).anyTimes(); EasyMock.replay(metadataRuleManager, serverInventoryView); coordinator.start(); leaderAnnouncerLatch.await(); // Wait for this coordinator to become leader final CountDownLatch assignSegmentLatchHot = createCountDownLatchAndSetPathChildrenCacheListenerWithLatch(3, pathChildrenCache, dataSegments, hotServer); final CountDownLatch assignSegmentLatchCold = createCountDownLatchAndSetPathChildrenCacheListenerWithLatch(3, pathChildrenCacheCold, dataSegments, coldServer); final CountDownLatch assignSegmentLatchBroker1 = createCountDownLatchAndSetPathChildrenCacheListenerWithLatch(3, pathChildrenCacheBroker1, dataSegments, brokerServer1); final CountDownLatch assignSegmentLatchBroker2 = createCountDownLatchAndSetPathChildrenCacheListenerWithLatch(3, pathChildrenCacheBroker2, dataSegments, brokerServer2); final CountDownLatch assignSegmentLatchPeon = createCountDownLatchAndSetPathChildrenCacheListenerWithLatch(3, pathChildrenCachePeon, dataSegments, peonServer); assignSegmentLatchHot.await(); assignSegmentLatchCold.await(); assignSegmentLatchBroker1.await(); assignSegmentLatchBroker2.await(); assignSegmentLatchPeon.await(); final CountDownLatch coordinatorRunLatch = new CountDownLatch(2); serviceEmitter.latch = coordinatorRunLatch; coordinatorRunLatch.await(); Assert.assertEquals(ImmutableMap.of(dataSource, 100.0), coordinator.getLoadStatus()); Map<String, Object2LongMap<String>> underReplicationCountsPerDataSourcePerTier = coordinator.computeUnderReplicationCountsPerDataSourcePerTier(); Assert.assertEquals(4, underReplicationCountsPerDataSourcePerTier.size()); Assert.assertEquals(0L, underReplicationCountsPerDataSourcePerTier.get(hotTierName).getLong(dataSource)); Assert.assertEquals(0L, underReplicationCountsPerDataSourcePerTier.get(coldTierName).getLong(dataSource)); Assert.assertEquals(0L, underReplicationCountsPerDataSourcePerTier.get(tierName1).getLong(dataSource)); Assert.assertEquals(0L, underReplicationCountsPerDataSourcePerTier.get(tierName2).getLong(dataSource)); Map<String, Object2LongMap<String>> underReplicationCountsPerDataSourcePerTierUsingClusterView = coordinator.computeUnderReplicationCountsPerDataSourcePerTierUsingClusterView(); Assert.assertEquals(4, underReplicationCountsPerDataSourcePerTierUsingClusterView.size()); Assert.assertEquals(0L, underReplicationCountsPerDataSourcePerTierUsingClusterView.get(hotTierName).getLong(dataSource)); Assert.assertEquals(0L, underReplicationCountsPerDataSourcePerTierUsingClusterView.get(coldTierName).getLong(dataSource)); Assert.assertEquals(0L, underReplicationCountsPerDataSourcePerTierUsingClusterView.get(tierName1).getLong(dataSource)); Assert.assertEquals(0L, underReplicationCountsPerDataSourcePerTierUsingClusterView.get(tierName2).getLong(dataSource)); coordinator.stop(); leaderUnannouncerLatch.await(); EasyMock.verify(serverInventoryView); EasyMock.verify(segmentsMetadataManager); EasyMock.verify(metadataRuleManager); } @Test public void testBalancerThreadNumber() { CoordinatorDynamicConfig dynamicConfig = EasyMock.createNiceMock(CoordinatorDynamicConfig.class); EasyMock.expect(dynamicConfig.getBalancerComputeThreads()).andReturn(5).times(2); EasyMock.expect(dynamicConfig.getBalancerComputeThreads()).andReturn(10).once(); JacksonConfigManager configManager = EasyMock.createNiceMock(JacksonConfigManager.class); EasyMock.expect( configManager.watch( EasyMock.eq(CoordinatorDynamicConfig.CONFIG_KEY), EasyMock.anyObject(Class.class), EasyMock.anyObject() ) ).andReturn(new AtomicReference(dynamicConfig)).anyTimes(); ScheduledExecutorFactory scheduledExecutorFactory = EasyMock.createNiceMock(ScheduledExecutorFactory.class); EasyMock.replay(configManager, dynamicConfig, scheduledExecutorFactory); DruidCoordinator c = new DruidCoordinator( druidCoordinatorConfig, null, configManager, null, null, null, () -> null, null, scheduledExecutorFactory, null, null, null, null, null, null, new CoordinatorCustomDutyGroups(ImmutableSet.of()), null, null, null, null, ZkEnablementConfig.ENABLED ); DruidCoordinator.DutiesRunnable duty = c.new DutiesRunnable(Collections.emptyList(), 0, "TEST"); // before initialization Assert.assertEquals(0, c.getCachedBalancerThreadNumber()); Assert.assertNull(c.getBalancerExec()); // first initialization duty.initBalancerExecutor(); System.out.println("c.getCachedBalancerThreadNumber(): " + c.getCachedBalancerThreadNumber()); Assert.assertEquals(5, c.getCachedBalancerThreadNumber()); ListeningExecutorService firstExec = c.getBalancerExec(); Assert.assertNotNull(firstExec); // second initialization, expect no changes as cachedBalancerThreadNumber is not changed duty.initBalancerExecutor(); Assert.assertEquals(5, c.getCachedBalancerThreadNumber()); ListeningExecutorService secondExec = c.getBalancerExec(); Assert.assertNotNull(secondExec); Assert.assertTrue(firstExec == secondExec); // third initialization, expect executor recreated as cachedBalancerThreadNumber is changed to 10 duty.initBalancerExecutor(); Assert.assertEquals(10, c.getCachedBalancerThreadNumber()); ListeningExecutorService thirdExec = c.getBalancerExec(); Assert.assertNotNull(thirdExec); Assert.assertFalse(secondExec == thirdExec); Assert.assertFalse(firstExec == thirdExec); } @Test public void testCompactSegmentsDutyWhenCustomDutyGroupEmpty() { CoordinatorCustomDutyGroups emptyCustomDutyGroups = new CoordinatorCustomDutyGroups(ImmutableSet.of()); coordinator = new DruidCoordinator( druidCoordinatorConfig, new ZkPathsConfig() { @Override public String getBase() { return "druid"; } }, null, segmentsMetadataManager, serverInventoryView, metadataRuleManager, () -> curator, serviceEmitter, scheduledExecutorFactory, null, null, new NoopServiceAnnouncer() { @Override public void announce(DruidNode node) { // count down when this coordinator becomes the leader leaderAnnouncerLatch.countDown(); } @Override public void unannounce(DruidNode node) { leaderUnannouncerLatch.countDown(); } }, druidNode, loadManagementPeons, ImmutableSet.of(), new HashSet<>(), emptyCustomDutyGroups, new CostBalancerStrategyFactory(), EasyMock.createNiceMock(LookupCoordinatorManager.class), new TestDruidLeaderSelector(), null, ZkEnablementConfig.ENABLED ); // Since CompactSegments is not enabled in Custom Duty Group, then CompactSegments must be created in IndexingServiceDuties List<CoordinatorDuty> indexingDuties = coordinator.makeIndexingServiceDuties(); Assert.assertTrue(indexingDuties.stream().anyMatch(coordinatorDuty -> coordinatorDuty instanceof CompactSegments)); // CompactSegments should not exist in Custom Duty Group List<CompactSegments> compactSegmentsDutyFromCustomGroups = coordinator.getCompactSegmentsDutyFromCustomGroups(); Assert.assertTrue(compactSegmentsDutyFromCustomGroups.isEmpty()); // CompactSegments returned by this method should be created using the DruidCoordinatorConfig in the DruidCoordinator CompactSegments duty = coordinator.initializeCompactSegmentsDuty(); Assert.assertNotNull(duty); Assert.assertEquals(druidCoordinatorConfig.getCompactionSkipLockedIntervals(), duty.isSkipLockedIntervals()); } @Test public void testInitializeCompactSegmentsDutyWhenCustomDutyGroupDoesNotContainsCompactSegments() { CoordinatorCustomDutyGroup group = new CoordinatorCustomDutyGroup("group1", Duration.standardSeconds(1), ImmutableList.of(new KillSupervisorsCustomDuty(new Duration("PT1S"), null))); CoordinatorCustomDutyGroups customDutyGroups = new CoordinatorCustomDutyGroups(ImmutableSet.of(group)); coordinator = new DruidCoordinator( druidCoordinatorConfig, new ZkPathsConfig() { @Override public String getBase() { return "druid"; } }, null, segmentsMetadataManager, serverInventoryView, metadataRuleManager, () -> curator, serviceEmitter, scheduledExecutorFactory, null, null, new NoopServiceAnnouncer() { @Override public void announce(DruidNode node) { // count down when this coordinator becomes the leader leaderAnnouncerLatch.countDown(); } @Override public void unannounce(DruidNode node) { leaderUnannouncerLatch.countDown(); } }, druidNode, loadManagementPeons, ImmutableSet.of(), new HashSet<>(), customDutyGroups, new CostBalancerStrategyFactory(), EasyMock.createNiceMock(LookupCoordinatorManager.class), new TestDruidLeaderSelector(), null, ZkEnablementConfig.ENABLED ); // Since CompactSegments is not enabled in Custom Duty Group, then CompactSegments must be created in IndexingServiceDuties List<CoordinatorDuty> indexingDuties = coordinator.makeIndexingServiceDuties(); Assert.assertTrue(indexingDuties.stream().anyMatch(coordinatorDuty -> coordinatorDuty instanceof CompactSegments)); // CompactSegments should not exist in Custom Duty Group List<CompactSegments> compactSegmentsDutyFromCustomGroups = coordinator.getCompactSegmentsDutyFromCustomGroups(); Assert.assertTrue(compactSegmentsDutyFromCustomGroups.isEmpty()); // CompactSegments returned by this method should be created using the DruidCoordinatorConfig in the DruidCoordinator CompactSegments duty = coordinator.initializeCompactSegmentsDuty(); Assert.assertNotNull(duty); Assert.assertEquals(druidCoordinatorConfig.getCompactionSkipLockedIntervals(), duty.isSkipLockedIntervals()); } @Test public void testInitializeCompactSegmentsDutyWhenCustomDutyGroupContainsCompactSegments() { DruidCoordinatorConfig differentConfigUsedInCustomGroup = new TestDruidCoordinatorConfig( new Duration(COORDINATOR_START_DELAY), new Duration(COORDINATOR_PERIOD), null, null, null, new Duration(COORDINATOR_PERIOD), null, null, null, null, null, null, null, null, null, null, 10, new Duration("PT0s"), false ); CoordinatorCustomDutyGroup compactSegmentCustomGroup = new CoordinatorCustomDutyGroup("group1", Duration.standardSeconds(1), ImmutableList.of(new CompactSegments(differentConfigUsedInCustomGroup, null, null))); CoordinatorCustomDutyGroups customDutyGroups = new CoordinatorCustomDutyGroups(ImmutableSet.of(compactSegmentCustomGroup)); coordinator = new DruidCoordinator( druidCoordinatorConfig, new ZkPathsConfig() { @Override public String getBase() { return "druid"; } }, null, segmentsMetadataManager, serverInventoryView, metadataRuleManager, () -> curator, serviceEmitter, scheduledExecutorFactory, null, null, new NoopServiceAnnouncer() { @Override public void announce(DruidNode node) { // count down when this coordinator becomes the leader leaderAnnouncerLatch.countDown(); } @Override public void unannounce(DruidNode node) { leaderUnannouncerLatch.countDown(); } }, druidNode, loadManagementPeons, ImmutableSet.of(), new HashSet<>(), customDutyGroups, new CostBalancerStrategyFactory(), EasyMock.createNiceMock(LookupCoordinatorManager.class), new TestDruidLeaderSelector(), null, ZkEnablementConfig.ENABLED ); // Since CompactSegments is enabled in Custom Duty Group, then CompactSegments must not be created in IndexingServiceDuties List<CoordinatorDuty> indexingDuties = coordinator.makeIndexingServiceDuties(); Assert.assertTrue(indexingDuties.stream().noneMatch(coordinatorDuty -> coordinatorDuty instanceof CompactSegments)); // CompactSegments should exist in Custom Duty Group List<CompactSegments> compactSegmentsDutyFromCustomGroups = coordinator.getCompactSegmentsDutyFromCustomGroups(); Assert.assertFalse(compactSegmentsDutyFromCustomGroups.isEmpty()); Assert.assertEquals(1, compactSegmentsDutyFromCustomGroups.size()); Assert.assertNotNull(compactSegmentsDutyFromCustomGroups.get(0)); Assert.assertTrue(compactSegmentsDutyFromCustomGroups.get(0) instanceof CompactSegments); // CompactSegments returned by this method should be from the Custom Duty Group CompactSegments duty = coordinator.initializeCompactSegmentsDuty(); Assert.assertNotNull(duty); Assert.assertNotEquals(druidCoordinatorConfig.getCompactionSkipLockedIntervals(), duty.isSkipLockedIntervals()); // We should get the CompactSegment from the custom duty group which was created with a different config than the config in DruidCoordinator Assert.assertEquals(differentConfigUsedInCustomGroup.getCompactionSkipLockedIntervals(), duty.isSkipLockedIntervals()); } @Test(timeout = 3000) public void testCoordinatorCustomDutyGroupsRunAsExpected() throws Exception { // Some nessesary setup to start the Coordinator JacksonConfigManager configManager = EasyMock.createNiceMock(JacksonConfigManager.class); EasyMock.expect( configManager.watch( EasyMock.eq(CoordinatorDynamicConfig.CONFIG_KEY), EasyMock.anyObject(Class.class), EasyMock.anyObject() ) ).andReturn(new AtomicReference(CoordinatorDynamicConfig.builder().build())).anyTimes(); EasyMock.expect( configManager.watch( EasyMock.eq(CoordinatorCompactionConfig.CONFIG_KEY), EasyMock.anyObject(Class.class), EasyMock.anyObject() ) ).andReturn(new AtomicReference(CoordinatorCompactionConfig.empty())).anyTimes(); EasyMock.replay(configManager); EasyMock.expect(segmentsMetadataManager.isPollingDatabasePeriodically()).andReturn(true).anyTimes(); DruidDataSource dataSource = new DruidDataSource("dataSource1", Collections.emptyMap()); DataSegment dataSegment = new DataSegment( "dataSource1", Intervals.of("2010-01-01/P1D"), "v1", null, null, null, null, 0x9, 0 ); dataSource.addSegment(dataSegment); DataSourcesSnapshot dataSourcesSnapshot = new DataSourcesSnapshot(ImmutableMap.of(dataSource.getName(), dataSource.toImmutableDruidDataSource())); EasyMock .expect(segmentsMetadataManager.getSnapshotOfDataSourcesWithAllUsedSegments()) .andReturn(dataSourcesSnapshot) .anyTimes(); EasyMock.replay(segmentsMetadataManager); EasyMock.expect(serverInventoryView.isStarted()).andReturn(true).anyTimes(); EasyMock.replay(serverInventoryView); // Create CoordinatorCustomDutyGroups // We will have two groups and each group has one duty CountDownLatch latch1 = new CountDownLatch(1); CoordinatorCustomDuty duty1 = new CoordinatorCustomDuty() { @Override public DruidCoordinatorRuntimeParams run(DruidCoordinatorRuntimeParams params) { latch1.countDown(); return params; } }; CoordinatorCustomDutyGroup group1 = new CoordinatorCustomDutyGroup("group1", Duration.standardSeconds(1), ImmutableList.of(duty1)); CountDownLatch latch2 = new CountDownLatch(1); CoordinatorCustomDuty duty2 = new CoordinatorCustomDuty() { @Override public DruidCoordinatorRuntimeParams run(DruidCoordinatorRuntimeParams params) { latch2.countDown(); return params; } }; CoordinatorCustomDutyGroup group2 = new CoordinatorCustomDutyGroup("group2", Duration.standardSeconds(1), ImmutableList.of(duty2)); CoordinatorCustomDutyGroups groups = new CoordinatorCustomDutyGroups(ImmutableSet.of(group1, group2)); coordinator = new DruidCoordinator( druidCoordinatorConfig, new ZkPathsConfig() { @Override public String getBase() { return "druid"; } }, configManager, segmentsMetadataManager, serverInventoryView, metadataRuleManager, () -> curator, serviceEmitter, scheduledExecutorFactory, null, null, new NoopServiceAnnouncer() { @Override public void announce(DruidNode node) { // count down when this coordinator becomes the leader leaderAnnouncerLatch.countDown(); } @Override public void unannounce(DruidNode node) { leaderUnannouncerLatch.countDown(); } }, druidNode, loadManagementPeons, null, new HashSet<>(), groups, new CostBalancerStrategyFactory(), EasyMock.createNiceMock(LookupCoordinatorManager.class), new TestDruidLeaderSelector(), null, ZkEnablementConfig.ENABLED ); coordinator.start(); // Wait until group 1 duty ran for latch1 to countdown latch1.await(); // Wait until group 2 duty ran for latch2 to countdown latch2.await(); } private CountDownLatch createCountDownLatchAndSetPathChildrenCacheListenerWithLatch(int latchCount, PathChildrenCache pathChildrenCache, Map<String, DataSegment> segments, DruidServer server) { final CountDownLatch countDownLatch = new CountDownLatch(latchCount); pathChildrenCache.getListenable().addListener( (CuratorFramework client, PathChildrenCacheEvent event) -> { if (CuratorUtils.isChildAdded(event)) { DataSegment segment = findSegmentRelatedToCuratorEvent(segments, event); if (segment != null && server.getSegment(segment.getId()) == null) { if (countDownLatch.getCount() > 0) { server.addDataSegment(segment); curator.delete().guaranteed().forPath(event.getData().getPath()); countDownLatch.countDown(); } else { Assert.fail("The segment path " + event.getData().getPath() + " is not expected"); } } } } ); return countDownLatch; } private void setupSegmentsMetadataMock(DruidDataSource dataSource) { EasyMock.expect(segmentsMetadataManager.isPollingDatabasePeriodically()).andReturn(true).anyTimes(); EasyMock .expect(segmentsMetadataManager.iterateAllUsedSegments()) .andReturn(dataSource.getSegments()) .anyTimes(); EasyMock .expect(segmentsMetadataManager.getImmutableDataSourcesWithAllUsedSegments()) .andReturn(Collections.singleton(dataSource.toImmutableDruidDataSource())) .anyTimes(); DataSourcesSnapshot dataSourcesSnapshot = new DataSourcesSnapshot(ImmutableMap.of(dataSource.getName(), dataSource.toImmutableDruidDataSource())); EasyMock .expect(segmentsMetadataManager.getSnapshotOfDataSourcesWithAllUsedSegments()) .andReturn(dataSourcesSnapshot) .anyTimes(); EasyMock .expect(segmentsMetadataManager.retrieveAllDataSourceNames()) .andReturn(Collections.singleton(dataSource.getName())) .anyTimes(); EasyMock.replay(segmentsMetadataManager); EasyMock .expect(this.dataSourcesSnapshot.iterateAllUsedSegmentsInSnapshot()) .andReturn(dataSource.getSegments()) .anyTimes(); EasyMock .expect(this.dataSourcesSnapshot.getDataSourcesWithAllUsedSegments()) .andReturn(Collections.singleton(dataSource.toImmutableDruidDataSource())) .anyTimes(); EasyMock.replay(this.dataSourcesSnapshot); } @Nullable private static DataSegment findSegmentRelatedToCuratorEvent( Map<String, DataSegment> dataSegments, PathChildrenCacheEvent event ) { return dataSegments .entrySet() .stream() .filter(x -> event.getData().getPath().contains(x.getKey())) .map(Map.Entry::getValue) .findFirst() .orElse(null); } private static class TestDruidLeaderSelector implements DruidLeaderSelector { private volatile Listener listener; private volatile String leader; @Override public String getCurrentLeader() { return leader; } @Override public boolean isLeader() { return leader != null; } @Override public int localTerm() { return 0; } @Override public void registerListener(Listener listener) { this.listener = listener; leader = "what:1234"; listener.becomeLeader(); } @Override public void unregisterListener() { leader = null; listener.stopBeingLeader(); } } private static class LatchableServiceEmitter extends ServiceEmitter { private CountDownLatch latch; private LatchableServiceEmitter() { super("", "", null); } @Override public void emit(Event event) { if (latch != null && "segment/count".equals(event.toMap().get("metric"))) { latch.countDown(); } } } }
package org.jgroups.blocks.executor; import org.jgroups.JChannel; import org.jgroups.protocols.Executing; import org.jgroups.util.FutureListener; import org.jgroups.util.NotifyingFuture; import org.jgroups.util.Streamable; import org.jgroups.util.Util; import java.io.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.AbstractQueuedSynchronizer; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * This is a JGroups implementation of an ExecutorService, where the consumers * are running on any number of nodes. The nodes should run * {@link ExecutionRunner} to start picking up requests. * <p> * Every future object returned will be a {@link NotifyingFuture} which * allows for not having to query the future and have a callback instead. This * can then be used as a workflow to submit other tasks sequentially or also to * query the future for the value at that time. * <p> * Every callable or runnable submitted must be either {@link Serializable} or * {@link Streamable}. Also the value returned from * a callable must {@link Serializable} or * {@link Streamable}. Unfortunately if the value returned is not serializable * then a {@link NotSerializableException} will be thrown as the cause. * @author wburns * @since 2.12.0 */ public class ExecutionService extends AbstractExecutorService { protected JChannel ch; protected Executing _execProt; protected Lock _unfinishedLock = new ReentrantLock(); protected Condition _unfinishedCondition = _unfinishedLock.newCondition(); protected Set<Future<?>> _unfinishedFutures = new HashSet<>(); protected AtomicBoolean _shutdown = new AtomicBoolean(false); public ExecutionService() { } public ExecutionService(JChannel ch) { setChannel(ch); } public void setChannel(JChannel ch) { this.ch=ch; _execProt=ch.getProtocolStack().findProtocol(Executing.class); if(_execProt == null) throw new IllegalStateException("Channel configuration must include a executing protocol " + "(subclass of " + Executing.class.getName() + ")"); } // @see java.util.concurrent.AbstractExecutorService#submit(java.lang.Runnable, java.lang.Object) @Override public <T> NotifyingFuture<T> submit(Runnable task, T result) { // This cast is okay cause we control creation of the task return (NotifyingFuture<T>)super.submit(task, result); } // @see java.util.concurrent.AbstractExecutorService#submit(java.util.concurrent.Callable) @Override public <T> NotifyingFuture<T> submit(Callable<T> task) { // This cast is okay cause we control creation of the task return (NotifyingFuture<T>)super.submit(task); } /** * This is basically a copy of the FutureTask in java.util.concurrent but * added serializable to it. Also added in the NotifyingFuture * so that the channel can update the future when the value is calculated. * * @param <V> * @author wburns */ public static class DistributedFuture<V> implements RunnableFuture<V>, ExecutorNotification, NotifyingFuture<V> { // @see java.lang.Object#toString() @Override public String toString() { return "DistributedFuture [callable=" + sync.callable + "]"; } /** Synchronization control for FutureTask */ protected final Sync<V> sync; /** The following values are only used on the client side */ private final JChannel channel; private final Set<Future<?>> _unfinishedFutures; private final Lock _unfinishedLock; private final Condition _unfinishedCondition; private volatile FutureListener<V> _listener; /** * Creates a <tt>FutureTask</tt> that will upon running, execute the * given <tt>Callable</tt>. * * @param channel The channel that messages are sent down * @param unfinishedLock The lock which protects the futuresToFinish * set object. * @param condition The condition to signal when this future finishes * @param futuresToFinish The set to remove this future from when * it is finished. * @param callable The callable to actually run on the server side */ public DistributedFuture(JChannel channel, Lock unfinishedLock, Condition condition, Set<Future<?>> futuresToFinish, Callable<V> callable) { if (callable == null) throw new NullPointerException(); sync = new Sync<>(this, callable); this.channel = channel; // We keep the real copy to update the outside _unfinishedFutures = futuresToFinish; _unfinishedLock = unfinishedLock; _unfinishedCondition = condition; } /** * Creates a <tt>FutureTask</tt> that will upon running, execute the * given <tt>Runnable</tt>, and arrange that <tt>get</tt> will return the * given result on successful completion. * * @param channel The channel that messages are sent down * @param unfinishedLock The lock which protects the futuresToFinish * set object. * @param condition The condition to signal when this future finishes * @param futuresToFinish The set to remove this future from when * it is finished. * @param runnable the runnable task * @param result the result to return on successful completion. If * you don't need a particular result, consider using * constructions of the form: * <tt>Future&lt;?&gt; f = new FutureTask&lt;Object&gt;(runnable, null)</tt> * @throws NullPointerException if runnable is null */ public DistributedFuture(JChannel channel, Lock unfinishedLock, Condition condition, Set<Future<?>> futuresToFinish, Runnable runnable, V result) { sync = new Sync<>(this, new RunnableAdapter<>(runnable, result)); this.channel = channel; // We keep the real copy to update the outside _unfinishedFutures = futuresToFinish; _unfinishedLock = unfinishedLock; _unfinishedCondition = condition; } public Callable<V> getCallable() { return sync.callable; } public boolean isCancelled() { return sync.innerIsCancelled(); } public boolean isDone() { return sync.innerIsDone(); } public boolean cancel(boolean mayInterruptIfRunning) { if (sync.innerIsDone()) { return false; } // This will only happen on calling side since it is transient if (channel != null) { return (Boolean)channel.down(new ExecutorEvent( ExecutorEvent.TASK_CANCEL, new Object[] {this, mayInterruptIfRunning})); } return sync.innerCancel(mayInterruptIfRunning); } /** * @throws CancellationException {@inheritDoc} */ public V get() throws InterruptedException, ExecutionException { return sync.innerGet(); } /** * @throws CancellationException {@inheritDoc} */ public V get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return sync.innerGet(unit.toNanos(timeout)); } /** * Protected method invoked when this task transitions to state * <tt>isDone</tt> (whether normally or via cancellation). The * default implementation does nothing. Subclasses may override * this method to invoke completion callbacks or perform * bookkeeping. Note that you can query status inside the * implementation of this method to determine whether this task * has been cancelled. */ protected void done() { _unfinishedLock.lock(); try { _unfinishedFutures.remove(this); _unfinishedCondition.signalAll(); } finally { _unfinishedLock.unlock(); } // We assign the listener to a local variable so we don't have to // worry about it becoming null inside the if FutureListener<V> listener = _listener; // We don't want this to run on server if (listener != null) { listener.futureDone(this); } } @Override public NotifyingFuture<V> setListener(FutureListener<V> listener) { _listener = listener; if (sync.innerIsDone()) { _listener.futureDone(this); } return this; } /** * Sets the result of this Future to the given value unless * this future has already been set or has been cancelled. * This method is invoked internally by the <tt>run</tt> method * upon successful completion of the computation. * @param v the value */ protected void set(V v) { sync.innerSet(v); } /** * Causes this future to report an <tt>ExecutionException</tt> * with the given throwable as its cause, unless this Future has * already been set or has been cancelled. * This method is invoked internally by the <tt>run</tt> method * upon failure of the computation. * @param t the cause of failure */ protected void setException(Throwable t) { sync.innerSetException(t); } // The following (duplicated) doc comment can be removed once // // 6270645: Javadoc comments should be inherited from most derived // superinterface or superclass // is fixed. /** * Sets this Future to the result of its computation * unless it has been cancelled. */ public void run() { sync.innerRun(); } /** * Synchronization control for FutureTask. Note that this must be * a non-static inner class in order to invoke the protected * <tt>done</tt> method. For clarity, all inner class support * methods are same as outer, prefixed with "inner". * * Uses AQS sync state to represent run status */ protected static final class Sync<V> extends AbstractQueuedSynchronizer { private static final long serialVersionUID = -7828117401763700385L; /** State value representing that task is running */ protected static final int RUNNING = 1; /** State value representing that task ran */ protected static final int RAN = 2; /** State value representing that task was cancelled */ protected static final int CANCELLED = 4; /** The containing future */ protected final DistributedFuture<V> future; /** The underlying callable */ protected final Callable<V> callable; /** The result to return from get() */ protected V result; /** The exception to throw from get() */ protected Throwable exception; /** * The thread running task. When nulled after set/cancel, this * indicates that the results are accessible. Must be * volatile, to ensure visibility upon completion. */ protected transient volatile Thread runner; public Sync(DistributedFuture<V> future, Callable<V> callable) { this.future = future; this.callable = callable; } private static boolean ranOrCancelled(int state) { return (state & (RAN | CANCELLED)) != 0; } /** * Implements AQS base acquire to succeed if ran or cancelled */ protected int tryAcquireShared(int ignore) { return innerIsDone()? 1 : -1; } /** * Implements AQS base release to always signal after setting * final done status by nulling runner thread. */ protected boolean tryReleaseShared(int ignore) { runner = null; return true; } boolean innerIsCancelled() { return getState() == CANCELLED; } boolean innerIsDone() { return ranOrCancelled(getState()) && runner == null; } V innerGet() throws InterruptedException, ExecutionException { acquireSharedInterruptibly(0); if (getState() == CANCELLED) throw new CancellationException(); if (exception != null) throw new ExecutionException(exception); return result; } V innerGet(long nanosTimeout) throws InterruptedException, ExecutionException, TimeoutException { if (!tryAcquireSharedNanos(0, nanosTimeout)) throw new TimeoutException(); if (getState() == CANCELLED) throw new CancellationException(); if (exception != null) throw new ExecutionException(exception); return result; } void innerSet(V v) { for (;;) { int s = getState(); if (s == RAN) return; if (s == CANCELLED) { // aggressively release to set runner to null, // in case we are racing with a cancel request // that will try to interrupt runner releaseShared(0); return; } if (compareAndSetState(s, RAN)) { result = v; releaseShared(0); future.done(); return; } } } void innerSetException(Throwable t) { for (;;) { int s = getState(); if (s == RAN) return; if (s == CANCELLED) { // aggressively release to set runner to null, // in case we are racing with a cancel request // that will try to interrupt runner releaseShared(0); return; } if (compareAndSetState(s, RAN)) { exception = t; result = null; releaseShared(0); future.done(); return; } } } boolean innerCancel(boolean mayInterruptIfRunning) { for (;;) { int s = getState(); if (ranOrCancelled(s)) return false; if (compareAndSetState(s, CANCELLED)) break; } if (mayInterruptIfRunning) { Thread r = runner; if (r != null) r.interrupt(); } releaseShared(0); future.done(); return true; } void innerRun() { if (!compareAndSetState(0, RUNNING)) return; try { runner = Thread.currentThread(); if (getState() == RUNNING) // recheck after setting thread innerSet(callable.call()); else releaseShared(0); // cancel } catch (Throwable ex) { innerSetException(ex); } } boolean innerRunAndReset() { if (!compareAndSetState(0, RUNNING)) return false; try { runner = Thread.currentThread(); if (getState() == RUNNING) callable.call(); // don't set result runner = null; return compareAndSetState(RUNNING, 0); } catch (Throwable ex) { innerSetException(ex); return false; } } } // @see org.jgroups.blocks.executor.ExecutorNotification#resultReturned(java.lang.Object) @SuppressWarnings("unchecked") @Override public void resultReturned(Object obj) { set((V)obj); } // @see org.jgroups.blocks.executor.ExecutorNotification#throwableEncountered(java.lang.Throwable) @Override public void throwableEncountered(Throwable t) { setException(t); } @Override public void interrupted(Runnable runnable) { _unfinishedLock.lock(); try { _unfinishedFutures.remove(this); _unfinishedCondition.signalAll(); } finally { _unfinishedLock.unlock(); } // We assign the listener to a local variable so we don't have to // worry about it becoming null inside the if FutureListener<V> listener = _listener; // We don't want this to run on server if (listener != null) { listener.futureDone(this); } } } // @see java.util.concurrent.ExecutorService#shutdown() @Override public void shutdown() { _realShutdown(false); } @SuppressWarnings("unchecked") private List<Runnable> _realShutdown(boolean interrupt) { _shutdown.set(true); _unfinishedLock.lock(); Set<Future<?>> futures; try { futures = new HashSet<>(_unfinishedFutures); } finally { _unfinishedLock.unlock(); } return (List<Runnable>)ch.down(new ExecutorEvent( ExecutorEvent.ALL_TASK_CANCEL, new Object[]{futures, interrupt})); } // @see java.util.concurrent.ExecutorService#shutdownNow() @Override public List<Runnable> shutdownNow() { return _realShutdown(true); } // @see java.util.concurrent.ExecutorService#isShutdown() @Override public boolean isShutdown() { return _shutdown.get(); } // @see java.util.concurrent.ExecutorService#isTerminated() @Override public boolean isTerminated() { if (_shutdown.get()) { _unfinishedLock.lock(); try { return _unfinishedFutures.isEmpty(); } finally { _unfinishedLock.unlock(); } } return false; } // @see java.util.concurrent.ExecutorService#awaitTermination(long, java.util.concurrent.TimeUnit) @Override public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { long nanoTimeWait = unit.toNanos(timeout); _unfinishedLock.lock(); try { while (!_unfinishedFutures.isEmpty()) { if ((nanoTimeWait = _unfinishedCondition.awaitNanos( nanoTimeWait)) <= 0) { return false; } } } finally { _unfinishedLock.unlock(); } return true; } // @see java.util.concurrent.AbstractExecutorService#invokeAny(java.util.Collection) @Override public <T> T invokeAny(Collection<? extends Callable<T>> tasks) throws InterruptedException, ExecutionException { try { return doInvokeAny(tasks, false, 0); } catch (TimeoutException cannotHappen) { assert false; return null; } } // @see java.util.concurrent.AbstractExecutorService#invokeAny(java.util.Collection, long, java.util.concurrent.TimeUnit) @Override public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return doInvokeAny(tasks, true, unit.toNanos(timeout)); } /** * the main mechanics of invokeAny. * This was essentially copied from {@link AbstractExecutorService} * doInvokeAny except that we replaced the {@link ExecutorCompletionService} * with an {@link ExecutionCompletionService}. */ private <T> T doInvokeAny(Collection<? extends Callable<T>> tasks, boolean timed, long nanos) throws InterruptedException, ExecutionException, TimeoutException { if (tasks == null) throw new NullPointerException(); int ntasks = tasks.size(); if (ntasks == 0) throw new IllegalArgumentException(); List<Future<T>> futures= new ArrayList<>(ntasks); CompletionService<T> ecs = new ExecutionCompletionService<>(this); // For efficiency, especially in executors with limited // parallelism, check to see if previously submitted tasks are // done before submitting more of them. This interleaving // plus the exception mechanics account for messiness of main // loop. try { // Record exceptions so that if we fail to obtain any // result, we can throw the last exception we got. ExecutionException ee = null; long lastTime = (timed)? System.nanoTime() : 0; Iterator<? extends Callable<T>> it = tasks.iterator(); // Start one task for sure; the rest incrementally futures.add(ecs.submit(it.next())); --ntasks; int active = 1; for (;;) { Future<T> f = ecs.poll(); if (f == null) { if (ntasks > 0) { --ntasks; futures.add(ecs.submit(it.next())); ++active; } else if (active == 0) break; else if (timed) { f = ecs.poll(nanos, TimeUnit.NANOSECONDS); if (f == null) throw new TimeoutException(); long now = System.nanoTime(); nanos -= now - lastTime; lastTime = now; } else f = ecs.take(); } if (f != null) { --active; try { return f.get(); } catch (InterruptedException ie) { throw ie; } catch (ExecutionException eex) { ee = eex; } catch (RuntimeException rex) { ee = new ExecutionException(rex); } } } if (ee == null) ee = new ExecutionException() { private static final long serialVersionUID = 200818694545553992L; }; throw ee; } finally { for (Future<T> f : futures) f.cancel(true); } } // @see java.util.concurrent.Executor#execute(java.lang.Runnable) @Override public void execute(Runnable command) { if (!_shutdown.get()) { Object serializeCheck; DistributedFuture<?> distFuture = null; // If it is wrapped by our future, then we have to make sure to // check the actual callable/runnable given to us for serialization if (command instanceof DistributedFuture) { distFuture = (DistributedFuture<?>)command; serializeCheck = distFuture.getCallable(); if (serializeCheck instanceof RunnableAdapter) { serializeCheck = ((RunnableAdapter<?>)serializeCheck).task; } } else { serializeCheck = command; } if (serializeCheck instanceof Serializable || serializeCheck instanceof Streamable) { if (distFuture != null) { _execProt.addExecutorListener(distFuture, distFuture); _unfinishedLock.lock(); try { _unfinishedFutures.add(distFuture); } finally { _unfinishedLock.unlock(); } } ch.down(new ExecutorEvent(ExecutorEvent.TASK_SUBMIT, command)); } else { throw new IllegalArgumentException( "Command was not Serializable or Streamable - " + serializeCheck); } } else { throw new RejectedExecutionException(); } } /** * This is copied from {@link java.util.concurrent.Executors} class which * contains RunnableAdapter. However that adapter isn't serializable, and * is final and package level so we can' reference. */ protected static final class RunnableAdapter<T> implements Callable<T>, Streamable { protected Runnable task; protected T result; public RunnableAdapter() { } protected RunnableAdapter(Runnable task, T result) { this.task = task; this.result = result; } public T call() { task.run(); return result; } @Override public void writeTo(DataOutput out) throws Exception { try { Util.writeObject(task, out); } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException("Exception encountered while writing execution runnable", e); } try { Util.writeObject(result, out); } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException("Exception encountered while writing execution result", e); } } @SuppressWarnings("unchecked") @Override public void readFrom(DataInput in) throws Exception { // We can't use Util.readObject since it's size is limited to 2^15-1 // The runner could be larger than that possibly try { task = (Runnable)Util.readObject(in); } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException("Exception encountered while reading execution runnable", e); } try { result = (T)Util.readObject(in); } catch (IOException e) { throw e; } catch (Exception e) { throw new IOException("Exception encountered while reading execution result", e); } } } // @see java.util.concurrent.AbstractExecutorService#newTaskFor(java.lang.Runnable, java.lang.Object) @Override protected <T> RunnableFuture<T> newTaskFor(Runnable runnable, T value) { return new DistributedFuture<>(ch, _unfinishedLock, _unfinishedCondition, _unfinishedFutures, runnable, value); } // @see java.util.concurrent.AbstractExecutorService#newTaskFor(java.util.concurrent.Callable) @Override protected <T> RunnableFuture<T> newTaskFor(Callable<T> callable) { return new DistributedFuture<>(ch, _unfinishedLock, _unfinishedCondition, _unfinishedFutures, callable); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.synapse.implementation; import com.azure.core.annotation.BodyParam; import com.azure.core.annotation.Delete; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.Put; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.synapse.fluent.WorkspaceManagedSqlServerVulnerabilityAssessmentsClient; import com.azure.resourcemanager.synapse.fluent.models.ServerVulnerabilityAssessmentInner; import com.azure.resourcemanager.synapse.models.ServerVulnerabilityAssessmentListResult; import com.azure.resourcemanager.synapse.models.VulnerabilityAssessmentName; import reactor.core.publisher.Mono; /** * An instance of this class provides access to all the operations defined in * WorkspaceManagedSqlServerVulnerabilityAssessmentsClient. */ public final class WorkspaceManagedSqlServerVulnerabilityAssessmentsClientImpl implements WorkspaceManagedSqlServerVulnerabilityAssessmentsClient { private final ClientLogger logger = new ClientLogger(WorkspaceManagedSqlServerVulnerabilityAssessmentsClientImpl.class); /** The proxy service used to perform REST calls. */ private final WorkspaceManagedSqlServerVulnerabilityAssessmentsService service; /** The service client containing this operation class. */ private final SynapseManagementClientImpl client; /** * Initializes an instance of WorkspaceManagedSqlServerVulnerabilityAssessmentsClientImpl. * * @param client the instance of the service client containing this operation class. */ WorkspaceManagedSqlServerVulnerabilityAssessmentsClientImpl(SynapseManagementClientImpl client) { this.service = RestProxy .create( WorkspaceManagedSqlServerVulnerabilityAssessmentsService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for * SynapseManagementClientWorkspaceManagedSqlServerVulnerabilityAssessments to be used by the proxy service to * perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "SynapseManagementCli") private interface WorkspaceManagedSqlServerVulnerabilityAssessmentsService { @Headers({"Content-Type: application/json"}) @Get( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces" + "/{workspaceName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<ServerVulnerabilityAssessmentInner>> get( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @PathParam("vulnerabilityAssessmentName") VulnerabilityAssessmentName vulnerabilityAssessmentName, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Put( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces" + "/{workspaceName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}") @ExpectedResponses({200, 201}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<ServerVulnerabilityAssessmentInner>> createOrUpdate( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @PathParam("vulnerabilityAssessmentName") VulnerabilityAssessmentName vulnerabilityAssessmentName, @BodyParam("application/json") ServerVulnerabilityAssessmentInner parameters, @HeaderParam("Accept") String accept, Context context); @Headers({"Accept: application/json;q=0.9", "Content-Type: application/json"}) @Delete( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces" + "/{workspaceName}/vulnerabilityAssessments/{vulnerabilityAssessmentName}") @ExpectedResponses({200, 204}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<Void>> delete( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @PathParam("vulnerabilityAssessmentName") VulnerabilityAssessmentName vulnerabilityAssessmentName, Context context); @Headers({"Content-Type: application/json"}) @Get( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces" + "/{workspaceName}/vulnerabilityAssessments") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<ServerVulnerabilityAssessmentListResult>> list( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<ServerVulnerabilityAssessmentListResult>> listNext( @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint, @HeaderParam("Accept") String accept, Context context); } /** * Get workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return workspace managed sql server's vulnerability assessment along with {@link Response} on successful * completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<ServerVulnerabilityAssessmentInner>> getWithResponseAsync( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (vulnerabilityAssessmentName == null) { return Mono .error( new IllegalArgumentException( "Parameter vulnerabilityAssessmentName is required and cannot be null.")); } final String apiVersion = "2021-06-01"; final String accept = "application/json"; return FluxUtil .withContext( context -> service .get( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, vulnerabilityAssessmentName, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return workspace managed sql server's vulnerability assessment along with {@link Response} on successful * completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<ServerVulnerabilityAssessmentInner>> getWithResponseAsync( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (vulnerabilityAssessmentName == null) { return Mono .error( new IllegalArgumentException( "Parameter vulnerabilityAssessmentName is required and cannot be null.")); } final String apiVersion = "2021-06-01"; final String accept = "application/json"; context = this.client.mergeContext(context); return service .get( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, vulnerabilityAssessmentName, accept, context); } /** * Get workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return workspace managed sql server's vulnerability assessment on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<ServerVulnerabilityAssessmentInner> getAsync( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName) { return getWithResponseAsync(resourceGroupName, workspaceName, vulnerabilityAssessmentName) .flatMap( (Response<ServerVulnerabilityAssessmentInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Get workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return workspace managed sql server's vulnerability assessment. */ @ServiceMethod(returns = ReturnType.SINGLE) public ServerVulnerabilityAssessmentInner get( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName) { return getAsync(resourceGroupName, workspaceName, vulnerabilityAssessmentName).block(); } /** * Get workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return workspace managed sql server's vulnerability assessment along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<ServerVulnerabilityAssessmentInner> getWithResponse( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName, Context context) { return getWithResponseAsync(resourceGroupName, workspaceName, vulnerabilityAssessmentName, context).block(); } /** * Create or Update workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @param parameters Properties for vulnerability assessment. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a server vulnerability assessment along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<ServerVulnerabilityAssessmentInner>> createOrUpdateWithResponseAsync( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName, ServerVulnerabilityAssessmentInner parameters) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (vulnerabilityAssessmentName == null) { return Mono .error( new IllegalArgumentException( "Parameter vulnerabilityAssessmentName is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String apiVersion = "2021-06-01"; final String accept = "application/json"; return FluxUtil .withContext( context -> service .createOrUpdate( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, vulnerabilityAssessmentName, parameters, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Create or Update workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @param parameters Properties for vulnerability assessment. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a server vulnerability assessment along with {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<ServerVulnerabilityAssessmentInner>> createOrUpdateWithResponseAsync( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName, ServerVulnerabilityAssessmentInner parameters, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (vulnerabilityAssessmentName == null) { return Mono .error( new IllegalArgumentException( "Parameter vulnerabilityAssessmentName is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String apiVersion = "2021-06-01"; final String accept = "application/json"; context = this.client.mergeContext(context); return service .createOrUpdate( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, vulnerabilityAssessmentName, parameters, accept, context); } /** * Create or Update workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @param parameters Properties for vulnerability assessment. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a server vulnerability assessment on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<ServerVulnerabilityAssessmentInner> createOrUpdateAsync( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName, ServerVulnerabilityAssessmentInner parameters) { return createOrUpdateWithResponseAsync( resourceGroupName, workspaceName, vulnerabilityAssessmentName, parameters) .flatMap( (Response<ServerVulnerabilityAssessmentInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Create or Update workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @param parameters Properties for vulnerability assessment. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a server vulnerability assessment. */ @ServiceMethod(returns = ReturnType.SINGLE) public ServerVulnerabilityAssessmentInner createOrUpdate( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName, ServerVulnerabilityAssessmentInner parameters) { return createOrUpdateAsync(resourceGroupName, workspaceName, vulnerabilityAssessmentName, parameters).block(); } /** * Create or Update workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @param parameters Properties for vulnerability assessment. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a server vulnerability assessment along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<ServerVulnerabilityAssessmentInner> createOrUpdateWithResponse( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName, ServerVulnerabilityAssessmentInner parameters, Context context) { return createOrUpdateWithResponseAsync( resourceGroupName, workspaceName, vulnerabilityAssessmentName, parameters, context) .block(); } /** * Remove workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> deleteWithResponseAsync( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (vulnerabilityAssessmentName == null) { return Mono .error( new IllegalArgumentException( "Parameter vulnerabilityAssessmentName is required and cannot be null.")); } final String apiVersion = "2021-06-01"; return FluxUtil .withContext( context -> service .delete( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, vulnerabilityAssessmentName, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Remove workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> deleteWithResponseAsync( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (vulnerabilityAssessmentName == null) { return Mono .error( new IllegalArgumentException( "Parameter vulnerabilityAssessmentName is required and cannot be null.")); } final String apiVersion = "2021-06-01"; context = this.client.mergeContext(context); return service .delete( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, vulnerabilityAssessmentName, context); } /** * Remove workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return A {@link Mono} that completes when a successful response is received. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Void> deleteAsync( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName) { return deleteWithResponseAsync(resourceGroupName, workspaceName, vulnerabilityAssessmentName) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Remove workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) public void delete( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName) { deleteAsync(resourceGroupName, workspaceName, vulnerabilityAssessmentName).block(); } /** * Remove workspace managed sql server's vulnerability assessment. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param vulnerabilityAssessmentName The name of the vulnerability assessment. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteWithResponse( String resourceGroupName, String workspaceName, VulnerabilityAssessmentName vulnerabilityAssessmentName, Context context) { return deleteWithResponseAsync(resourceGroupName, workspaceName, vulnerabilityAssessmentName, context).block(); } /** * Lists the vulnerability assessment policies associated with a workspace managed sql server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of the server's vulnerability assessments along with {@link PagedResponse} on successful * completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<ServerVulnerabilityAssessmentInner>> listSinglePageAsync( String resourceGroupName, String workspaceName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } final String apiVersion = "2021-06-01"; final String accept = "application/json"; return FluxUtil .withContext( context -> service .list( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, accept, context)) .<PagedResponse<ServerVulnerabilityAssessmentInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Lists the vulnerability assessment policies associated with a workspace managed sql server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of the server's vulnerability assessments along with {@link PagedResponse} on successful * completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<ServerVulnerabilityAssessmentInner>> listSinglePageAsync( String resourceGroupName, String workspaceName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } final String apiVersion = "2021-06-01"; final String accept = "application/json"; context = this.client.mergeContext(context); return service .list( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } /** * Lists the vulnerability assessment policies associated with a workspace managed sql server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of the server's vulnerability assessments. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<ServerVulnerabilityAssessmentInner> listAsync(String resourceGroupName, String workspaceName) { return new PagedFlux<>( () -> listSinglePageAsync(resourceGroupName, workspaceName), nextLink -> listNextSinglePageAsync(nextLink)); } /** * Lists the vulnerability assessment policies associated with a workspace managed sql server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of the server's vulnerability assessments. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<ServerVulnerabilityAssessmentInner> listAsync( String resourceGroupName, String workspaceName, Context context) { return new PagedFlux<>( () -> listSinglePageAsync(resourceGroupName, workspaceName, context), nextLink -> listNextSinglePageAsync(nextLink, context)); } /** * Lists the vulnerability assessment policies associated with a workspace managed sql server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of the server's vulnerability assessments. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<ServerVulnerabilityAssessmentInner> list(String resourceGroupName, String workspaceName) { return new PagedIterable<>(listAsync(resourceGroupName, workspaceName)); } /** * Lists the vulnerability assessment policies associated with a workspace managed sql server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of the server's vulnerability assessments. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<ServerVulnerabilityAssessmentInner> list( String resourceGroupName, String workspaceName, Context context) { return new PagedIterable<>(listAsync(resourceGroupName, workspaceName, context)); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of the server's vulnerability assessments along with {@link PagedResponse} on successful * completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<ServerVulnerabilityAssessmentInner>> listNextSinglePageAsync(String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context)) .<PagedResponse<ServerVulnerabilityAssessmentInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of the server's vulnerability assessments along with {@link PagedResponse} on successful * completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<ServerVulnerabilityAssessmentInner>> listNextSinglePageAsync( String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.identitymanagement.model; import java.io.Serializable; /** * <p> * Contains the results of a simulation. * </p> * <p> * This data type is used by the return parameter of <code> * SimulatePolicy </code> . * </p> */ public class EvaluationResult implements Serializable, Cloneable { /** * The name of the API action tested on the indicated resource. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>3 - 128<br/> */ private String evalActionName; /** * The ARN of the resource that the indicated API action was tested on. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 2048<br/> */ private String evalResourceName; /** * The result of the simulation. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>allowed, explicitDeny, implicitDeny */ private String evalDecision; /** * A list of the statements in the input policies that determine the * result for this scenario. Remember that even if multiple statements * allow the action on the resource, if only one statement denies that * action, then the explicit deny overrides any allow, and the deny * statement is the only entry included in the result. */ private com.amazonaws.internal.ListWithAutoConstructFlag<Statement> matchedStatements; /** * A list of context keys that are required by the included input * policies but that were not provided by one of the input parameters. To * discover the context keys used by a set of policies, you can call * <a>GetContextKeysForCustomPolicy</a> or * <a>GetContextKeysForPrincipalPolicy</a>. <caution> <p>If the response * includes any keys in this list, then the reported results might be * untrustworthy because the simulation could not completely evaluate all * of the conditions specified in the policies that would occur in a real * world request. </caution> */ private com.amazonaws.internal.ListWithAutoConstructFlag<String> missingContextValues; /** * Additional details about the results of the evaluation decision. When * there are both IAM policies and resource policies, this parameter * explains how each set of policies contributes to the final evaluation * decision. When simulating cross-account access to a resource, both the * resource-based policy and the caller's IAM policy must grant access. * See <ulink * href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_compare-resource-policies.html">How * IAM Roles Differ from Resource-based Policies</ulink> */ private java.util.Map<String,String> evalDecisionDetails; /** * The name of the API action tested on the indicated resource. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>3 - 128<br/> * * @return The name of the API action tested on the indicated resource. */ public String getEvalActionName() { return evalActionName; } /** * The name of the API action tested on the indicated resource. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>3 - 128<br/> * * @param evalActionName The name of the API action tested on the indicated resource. */ public void setEvalActionName(String evalActionName) { this.evalActionName = evalActionName; } /** * The name of the API action tested on the indicated resource. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>3 - 128<br/> * * @param evalActionName The name of the API action tested on the indicated resource. * * @return A reference to this updated object so that method calls can be chained * together. */ public EvaluationResult withEvalActionName(String evalActionName) { this.evalActionName = evalActionName; return this; } /** * The ARN of the resource that the indicated API action was tested on. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 2048<br/> * * @return The ARN of the resource that the indicated API action was tested on. */ public String getEvalResourceName() { return evalResourceName; } /** * The ARN of the resource that the indicated API action was tested on. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 2048<br/> * * @param evalResourceName The ARN of the resource that the indicated API action was tested on. */ public void setEvalResourceName(String evalResourceName) { this.evalResourceName = evalResourceName; } /** * The ARN of the resource that the indicated API action was tested on. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 2048<br/> * * @param evalResourceName The ARN of the resource that the indicated API action was tested on. * * @return A reference to this updated object so that method calls can be chained * together. */ public EvaluationResult withEvalResourceName(String evalResourceName) { this.evalResourceName = evalResourceName; return this; } /** * The result of the simulation. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>allowed, explicitDeny, implicitDeny * * @return The result of the simulation. * * @see PolicyEvaluationDecisionType */ public String getEvalDecision() { return evalDecision; } /** * The result of the simulation. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>allowed, explicitDeny, implicitDeny * * @param evalDecision The result of the simulation. * * @see PolicyEvaluationDecisionType */ public void setEvalDecision(String evalDecision) { this.evalDecision = evalDecision; } /** * The result of the simulation. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>allowed, explicitDeny, implicitDeny * * @param evalDecision The result of the simulation. * * @return A reference to this updated object so that method calls can be chained * together. * * @see PolicyEvaluationDecisionType */ public EvaluationResult withEvalDecision(String evalDecision) { this.evalDecision = evalDecision; return this; } /** * The result of the simulation. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>allowed, explicitDeny, implicitDeny * * @param evalDecision The result of the simulation. * * @see PolicyEvaluationDecisionType */ public void setEvalDecision(PolicyEvaluationDecisionType evalDecision) { this.evalDecision = evalDecision.toString(); } /** * The result of the simulation. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Allowed Values: </b>allowed, explicitDeny, implicitDeny * * @param evalDecision The result of the simulation. * * @return A reference to this updated object so that method calls can be chained * together. * * @see PolicyEvaluationDecisionType */ public EvaluationResult withEvalDecision(PolicyEvaluationDecisionType evalDecision) { this.evalDecision = evalDecision.toString(); return this; } /** * A list of the statements in the input policies that determine the * result for this scenario. Remember that even if multiple statements * allow the action on the resource, if only one statement denies that * action, then the explicit deny overrides any allow, and the deny * statement is the only entry included in the result. * * @return A list of the statements in the input policies that determine the * result for this scenario. Remember that even if multiple statements * allow the action on the resource, if only one statement denies that * action, then the explicit deny overrides any allow, and the deny * statement is the only entry included in the result. */ public java.util.List<Statement> getMatchedStatements() { if (matchedStatements == null) { matchedStatements = new com.amazonaws.internal.ListWithAutoConstructFlag<Statement>(); matchedStatements.setAutoConstruct(true); } return matchedStatements; } /** * A list of the statements in the input policies that determine the * result for this scenario. Remember that even if multiple statements * allow the action on the resource, if only one statement denies that * action, then the explicit deny overrides any allow, and the deny * statement is the only entry included in the result. * * @param matchedStatements A list of the statements in the input policies that determine the * result for this scenario. Remember that even if multiple statements * allow the action on the resource, if only one statement denies that * action, then the explicit deny overrides any allow, and the deny * statement is the only entry included in the result. */ public void setMatchedStatements(java.util.Collection<Statement> matchedStatements) { if (matchedStatements == null) { this.matchedStatements = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<Statement> matchedStatementsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Statement>(matchedStatements.size()); matchedStatementsCopy.addAll(matchedStatements); this.matchedStatements = matchedStatementsCopy; } /** * A list of the statements in the input policies that determine the * result for this scenario. Remember that even if multiple statements * allow the action on the resource, if only one statement denies that * action, then the explicit deny overrides any allow, and the deny * statement is the only entry included in the result. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setMatchedStatements(java.util.Collection)} or * {@link #withMatchedStatements(java.util.Collection)} if you want to * override the existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param matchedStatements A list of the statements in the input policies that determine the * result for this scenario. Remember that even if multiple statements * allow the action on the resource, if only one statement denies that * action, then the explicit deny overrides any allow, and the deny * statement is the only entry included in the result. * * @return A reference to this updated object so that method calls can be chained * together. */ public EvaluationResult withMatchedStatements(Statement... matchedStatements) { if (getMatchedStatements() == null) setMatchedStatements(new java.util.ArrayList<Statement>(matchedStatements.length)); for (Statement value : matchedStatements) { getMatchedStatements().add(value); } return this; } /** * A list of the statements in the input policies that determine the * result for this scenario. Remember that even if multiple statements * allow the action on the resource, if only one statement denies that * action, then the explicit deny overrides any allow, and the deny * statement is the only entry included in the result. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param matchedStatements A list of the statements in the input policies that determine the * result for this scenario. Remember that even if multiple statements * allow the action on the resource, if only one statement denies that * action, then the explicit deny overrides any allow, and the deny * statement is the only entry included in the result. * * @return A reference to this updated object so that method calls can be chained * together. */ public EvaluationResult withMatchedStatements(java.util.Collection<Statement> matchedStatements) { if (matchedStatements == null) { this.matchedStatements = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<Statement> matchedStatementsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Statement>(matchedStatements.size()); matchedStatementsCopy.addAll(matchedStatements); this.matchedStatements = matchedStatementsCopy; } return this; } /** * A list of context keys that are required by the included input * policies but that were not provided by one of the input parameters. To * discover the context keys used by a set of policies, you can call * <a>GetContextKeysForCustomPolicy</a> or * <a>GetContextKeysForPrincipalPolicy</a>. <caution> <p>If the response * includes any keys in this list, then the reported results might be * untrustworthy because the simulation could not completely evaluate all * of the conditions specified in the policies that would occur in a real * world request. </caution> * * @return A list of context keys that are required by the included input * policies but that were not provided by one of the input parameters. To * discover the context keys used by a set of policies, you can call * <a>GetContextKeysForCustomPolicy</a> or * <a>GetContextKeysForPrincipalPolicy</a>. <caution> <p>If the response * includes any keys in this list, then the reported results might be * untrustworthy because the simulation could not completely evaluate all * of the conditions specified in the policies that would occur in a real * world request. </caution> */ public java.util.List<String> getMissingContextValues() { if (missingContextValues == null) { missingContextValues = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(); missingContextValues.setAutoConstruct(true); } return missingContextValues; } /** * A list of context keys that are required by the included input * policies but that were not provided by one of the input parameters. To * discover the context keys used by a set of policies, you can call * <a>GetContextKeysForCustomPolicy</a> or * <a>GetContextKeysForPrincipalPolicy</a>. <caution> <p>If the response * includes any keys in this list, then the reported results might be * untrustworthy because the simulation could not completely evaluate all * of the conditions specified in the policies that would occur in a real * world request. </caution> * * @param missingContextValues A list of context keys that are required by the included input * policies but that were not provided by one of the input parameters. To * discover the context keys used by a set of policies, you can call * <a>GetContextKeysForCustomPolicy</a> or * <a>GetContextKeysForPrincipalPolicy</a>. <caution> <p>If the response * includes any keys in this list, then the reported results might be * untrustworthy because the simulation could not completely evaluate all * of the conditions specified in the policies that would occur in a real * world request. </caution> */ public void setMissingContextValues(java.util.Collection<String> missingContextValues) { if (missingContextValues == null) { this.missingContextValues = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<String> missingContextValuesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(missingContextValues.size()); missingContextValuesCopy.addAll(missingContextValues); this.missingContextValues = missingContextValuesCopy; } /** * A list of context keys that are required by the included input * policies but that were not provided by one of the input parameters. To * discover the context keys used by a set of policies, you can call * <a>GetContextKeysForCustomPolicy</a> or * <a>GetContextKeysForPrincipalPolicy</a>. <caution> <p>If the response * includes any keys in this list, then the reported results might be * untrustworthy because the simulation could not completely evaluate all * of the conditions specified in the policies that would occur in a real * world request. </caution> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setMissingContextValues(java.util.Collection)} or * {@link #withMissingContextValues(java.util.Collection)} if you want to * override the existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param missingContextValues A list of context keys that are required by the included input * policies but that were not provided by one of the input parameters. To * discover the context keys used by a set of policies, you can call * <a>GetContextKeysForCustomPolicy</a> or * <a>GetContextKeysForPrincipalPolicy</a>. <caution> <p>If the response * includes any keys in this list, then the reported results might be * untrustworthy because the simulation could not completely evaluate all * of the conditions specified in the policies that would occur in a real * world request. </caution> * * @return A reference to this updated object so that method calls can be chained * together. */ public EvaluationResult withMissingContextValues(String... missingContextValues) { if (getMissingContextValues() == null) setMissingContextValues(new java.util.ArrayList<String>(missingContextValues.length)); for (String value : missingContextValues) { getMissingContextValues().add(value); } return this; } /** * A list of context keys that are required by the included input * policies but that were not provided by one of the input parameters. To * discover the context keys used by a set of policies, you can call * <a>GetContextKeysForCustomPolicy</a> or * <a>GetContextKeysForPrincipalPolicy</a>. <caution> <p>If the response * includes any keys in this list, then the reported results might be * untrustworthy because the simulation could not completely evaluate all * of the conditions specified in the policies that would occur in a real * world request. </caution> * <p> * Returns a reference to this object so that method calls can be chained together. * * @param missingContextValues A list of context keys that are required by the included input * policies but that were not provided by one of the input parameters. To * discover the context keys used by a set of policies, you can call * <a>GetContextKeysForCustomPolicy</a> or * <a>GetContextKeysForPrincipalPolicy</a>. <caution> <p>If the response * includes any keys in this list, then the reported results might be * untrustworthy because the simulation could not completely evaluate all * of the conditions specified in the policies that would occur in a real * world request. </caution> * * @return A reference to this updated object so that method calls can be chained * together. */ public EvaluationResult withMissingContextValues(java.util.Collection<String> missingContextValues) { if (missingContextValues == null) { this.missingContextValues = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<String> missingContextValuesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(missingContextValues.size()); missingContextValuesCopy.addAll(missingContextValues); this.missingContextValues = missingContextValuesCopy; } return this; } /** * Additional details about the results of the evaluation decision. When * there are both IAM policies and resource policies, this parameter * explains how each set of policies contributes to the final evaluation * decision. When simulating cross-account access to a resource, both the * resource-based policy and the caller's IAM policy must grant access. * See <ulink * href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_compare-resource-policies.html">How * IAM Roles Differ from Resource-based Policies</ulink> * * @return Additional details about the results of the evaluation decision. When * there are both IAM policies and resource policies, this parameter * explains how each set of policies contributes to the final evaluation * decision. When simulating cross-account access to a resource, both the * resource-based policy and the caller's IAM policy must grant access. * See <ulink * href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_compare-resource-policies.html">How * IAM Roles Differ from Resource-based Policies</ulink> */ public java.util.Map<String,String> getEvalDecisionDetails() { if (evalDecisionDetails == null) { evalDecisionDetails = new java.util.HashMap<String,String>(); } return evalDecisionDetails; } /** * Additional details about the results of the evaluation decision. When * there are both IAM policies and resource policies, this parameter * explains how each set of policies contributes to the final evaluation * decision. When simulating cross-account access to a resource, both the * resource-based policy and the caller's IAM policy must grant access. * See <ulink * href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_compare-resource-policies.html">How * IAM Roles Differ from Resource-based Policies</ulink> * * @param evalDecisionDetails Additional details about the results of the evaluation decision. When * there are both IAM policies and resource policies, this parameter * explains how each set of policies contributes to the final evaluation * decision. When simulating cross-account access to a resource, both the * resource-based policy and the caller's IAM policy must grant access. * See <ulink * href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_compare-resource-policies.html">How * IAM Roles Differ from Resource-based Policies</ulink> */ public void setEvalDecisionDetails(java.util.Map<String,String> evalDecisionDetails) { this.evalDecisionDetails = evalDecisionDetails; } /** * Additional details about the results of the evaluation decision. When * there are both IAM policies and resource policies, this parameter * explains how each set of policies contributes to the final evaluation * decision. When simulating cross-account access to a resource, both the * resource-based policy and the caller's IAM policy must grant access. * See <ulink * href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_compare-resource-policies.html">How * IAM Roles Differ from Resource-based Policies</ulink> * <p> * Returns a reference to this object so that method calls can be chained together. * * @param evalDecisionDetails Additional details about the results of the evaluation decision. When * there are both IAM policies and resource policies, this parameter * explains how each set of policies contributes to the final evaluation * decision. When simulating cross-account access to a resource, both the * resource-based policy and the caller's IAM policy must grant access. * See <ulink * href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_compare-resource-policies.html">How * IAM Roles Differ from Resource-based Policies</ulink> * * @return A reference to this updated object so that method calls can be chained * together. */ public EvaluationResult withEvalDecisionDetails(java.util.Map<String,String> evalDecisionDetails) { setEvalDecisionDetails(evalDecisionDetails); return this; } /** * Additional details about the results of the evaluation decision. When * there are both IAM policies and resource policies, this parameter * explains how each set of policies contributes to the final evaluation * decision. When simulating cross-account access to a resource, both the * resource-based policy and the caller's IAM policy must grant access. * See <ulink * href="http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_compare-resource-policies.html">How * IAM Roles Differ from Resource-based Policies</ulink> * <p> * The method adds a new key-value pair into EvalDecisionDetails * parameter, and returns a reference to this object so that method calls * can be chained together. * * @param key The key of the entry to be added into EvalDecisionDetails. * @param value The corresponding value of the entry to be added into EvalDecisionDetails. */ public EvaluationResult addEvalDecisionDetailsEntry(String key, String value) { if (null == this.evalDecisionDetails) { this.evalDecisionDetails = new java.util.HashMap<String,String>(); } if (this.evalDecisionDetails.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.evalDecisionDetails.put(key, value); return this; } /** * Removes all the entries added into EvalDecisionDetails. * <p> * Returns a reference to this object so that method calls can be chained together. */ public EvaluationResult clearEvalDecisionDetailsEntries() { this.evalDecisionDetails = null; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getEvalActionName() != null) sb.append("EvalActionName: " + getEvalActionName() + ","); if (getEvalResourceName() != null) sb.append("EvalResourceName: " + getEvalResourceName() + ","); if (getEvalDecision() != null) sb.append("EvalDecision: " + getEvalDecision() + ","); if (getMatchedStatements() != null) sb.append("MatchedStatements: " + getMatchedStatements() + ","); if (getMissingContextValues() != null) sb.append("MissingContextValues: " + getMissingContextValues() + ","); if (getEvalDecisionDetails() != null) sb.append("EvalDecisionDetails: " + getEvalDecisionDetails() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getEvalActionName() == null) ? 0 : getEvalActionName().hashCode()); hashCode = prime * hashCode + ((getEvalResourceName() == null) ? 0 : getEvalResourceName().hashCode()); hashCode = prime * hashCode + ((getEvalDecision() == null) ? 0 : getEvalDecision().hashCode()); hashCode = prime * hashCode + ((getMatchedStatements() == null) ? 0 : getMatchedStatements().hashCode()); hashCode = prime * hashCode + ((getMissingContextValues() == null) ? 0 : getMissingContextValues().hashCode()); hashCode = prime * hashCode + ((getEvalDecisionDetails() == null) ? 0 : getEvalDecisionDetails().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof EvaluationResult == false) return false; EvaluationResult other = (EvaluationResult)obj; if (other.getEvalActionName() == null ^ this.getEvalActionName() == null) return false; if (other.getEvalActionName() != null && other.getEvalActionName().equals(this.getEvalActionName()) == false) return false; if (other.getEvalResourceName() == null ^ this.getEvalResourceName() == null) return false; if (other.getEvalResourceName() != null && other.getEvalResourceName().equals(this.getEvalResourceName()) == false) return false; if (other.getEvalDecision() == null ^ this.getEvalDecision() == null) return false; if (other.getEvalDecision() != null && other.getEvalDecision().equals(this.getEvalDecision()) == false) return false; if (other.getMatchedStatements() == null ^ this.getMatchedStatements() == null) return false; if (other.getMatchedStatements() != null && other.getMatchedStatements().equals(this.getMatchedStatements()) == false) return false; if (other.getMissingContextValues() == null ^ this.getMissingContextValues() == null) return false; if (other.getMissingContextValues() != null && other.getMissingContextValues().equals(this.getMissingContextValues()) == false) return false; if (other.getEvalDecisionDetails() == null ^ this.getEvalDecisionDetails() == null) return false; if (other.getEvalDecisionDetails() != null && other.getEvalDecisionDetails().equals(this.getEvalDecisionDetails()) == false) return false; return true; } @Override public EvaluationResult clone() { try { return (EvaluationResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.afp.modca; import java.io.IOException; import java.io.OutputStream; import java.util.List; import org.apache.fop.afp.AFPDataObjectInfo; import org.apache.fop.afp.Factory; import org.apache.fop.afp.fonts.AFPFont; import org.apache.fop.afp.modca.triplets.AbstractTriplet; import org.apache.fop.afp.modca.triplets.EncodingTriplet; import org.apache.fop.afp.modca.triplets.FullyQualifiedNameTriplet; import org.apache.fop.afp.modca.triplets.ObjectClassificationTriplet; import org.apache.fop.afp.util.BinaryUtils; import org.apache.fop.apps.MimeConstants; import org.apache.fop.fonts.FontType; import org.apache.fop.render.afp.AFPFontConfig; /** * An Active Environment Group (AEG) is associated with each page, * and is contained in the page's begin-end envelope in the data stream. * The active environment group contains layout and formatting information * that defines the measurement units and size of the page, and may contain * resource information. * * Any objects that are required for page presentation and that are to be * treated as resource objects must be mapped with a map structured field * in the AEG. The scope of an active environment group is the scope of its * containing page or overlay. * */ public final class ActiveEnvironmentGroup extends AbstractEnvironmentGroup { /** The collection of MapCodedFont objects */ private final List/*<MapCodedFonts>*/ mapCodedFonts = new java.util.ArrayList/*<MapCodedFonts>*/(); /** the collection of MapPageSegments objects */ private List mapPageSegments; /** the Object Area Descriptor for the active environment group */ private ObjectAreaDescriptor objectAreaDescriptor; /** the Object Area Position for the active environment group */ private ObjectAreaPosition objectAreaPosition; /** the PresentationTextDescriptor for the active environment group */ private PresentationTextDescriptor presentationTextDataDescriptor; /** the PageDescriptor for the active environment group */ private PageDescriptor pageDescriptor; /** the resource manager */ private final Factory factory; private MapDataResource mdr; /** * Constructor for the ActiveEnvironmentGroup, this takes a * name parameter which must be 8 characters long. * * @param factory the object factory * @param name the active environment group name * @param width the page width * @param height the page height * @param widthRes the page width resolution * @param heightRes the page height resolution */ public ActiveEnvironmentGroup(Factory factory, String name, int width, int height, int widthRes, int heightRes) { super(name); this.factory = factory; // Create PageDescriptor this.pageDescriptor = factory.createPageDescriptor(width, height, widthRes, heightRes); // Create ObjectAreaDescriptor this.objectAreaDescriptor = factory.createObjectAreaDescriptor(width, height, widthRes, heightRes); // Create PresentationTextDataDescriptor this.presentationTextDataDescriptor = factory.createPresentationTextDataDescriptor(width, height, widthRes, heightRes); } /** * Set the position of the object area * * @param x the x offset * @param y the y offset * @param rotation the rotation */ public void setObjectAreaPosition(int x, int y, int rotation) { this.objectAreaPosition = factory.createObjectAreaPosition(x, y, rotation); } /** * Accessor method to obtain the PageDescriptor object of the * active environment group. * * @return the page descriptor object */ public PageDescriptor getPageDescriptor() { return pageDescriptor; } /** * Accessor method to obtain the PresentationTextDataDescriptor object of * the active environment group. * * @return the presentation text descriptor */ public PresentationTextDescriptor getPresentationTextDataDescriptor() { return presentationTextDataDescriptor; } /** {@inheritDoc} */ public void writeContent(OutputStream os) throws IOException { super.writeTriplets(os); writeObjects(mapCodedFonts, os); writeObjects(mapDataResources, os); writeObjects(mapPageOverlays, os); writeObjects(mapPageSegments, os); if (pageDescriptor != null) { pageDescriptor.writeToStream(os); } if (objectAreaDescriptor != null && objectAreaPosition != null) { objectAreaDescriptor.writeToStream(os); objectAreaPosition.writeToStream(os); } if (presentationTextDataDescriptor != null) { presentationTextDataDescriptor.writeToStream(os); } } /** {@inheritDoc} */ protected void writeStart(OutputStream os) throws IOException { byte[] data = new byte[17]; copySF(data, Type.BEGIN, Category.ACTIVE_ENVIRONMENT_GROUP); os.write(data); } /** {@inheritDoc} */ protected void writeEnd(OutputStream os) throws IOException { byte[] data = new byte[17]; copySF(data, Type.END, Category.ACTIVE_ENVIRONMENT_GROUP); os.write(data); } /** * Method to create a map coded font object * * @param fontRef the font number used as the resource identifier * @param font the font * @param size the point size of the font * @param orientation the orientation of the font (e.g. 0, 90, 180, 270) */ public void createFont(int fontRef, AFPFont font, int size, int orientation) { if (font.getFontType() == FontType.TRUETYPE) { if (mdr == null) { mdr = factory.createMapDataResource(); mapCodedFonts.add(mdr); } mdr.addTriplet(new EncodingTriplet(1200)); String name = font.getFontName(); if (((AFPFontConfig.AFPTrueTypeFont)font).getTTC() != null) { name = ((AFPFontConfig.AFPTrueTypeFont)font).getTTC(); } mdr.setFullyQualifiedName(FullyQualifiedNameTriplet.TYPE_DATA_OBJECT_EXTERNAL_RESOURCE_REF, FullyQualifiedNameTriplet.FORMAT_CHARSTR, name, true); mdr.addTriplet(new FontFullyQualifiedNameTriplet((byte) fontRef)); setupTruetypeMDR(mdr, false); mdr.addTriplet(new DataObjectFontTriplet(size / 1000)); mdr.finishElement(); } else { MapCodedFont mapCodedFont = getCurrentMapCodedFont(); if (mapCodedFont == null) { mapCodedFont = factory.createMapCodedFont(); mapCodedFonts.add(mapCodedFont); } try { mapCodedFont.addFont(fontRef, font, size, orientation); } catch (MaximumSizeExceededException msee) { mapCodedFont = factory.createMapCodedFont(); mapCodedFonts.add(mapCodedFont); try { mapCodedFont.addFont(fontRef, font, size, orientation); } catch (MaximumSizeExceededException ex) { // Should never happen (but log just in case) LOG.error("createFont():: resulted in a MaximumSizeExceededException"); } } } } public static void setupTruetypeMDR(AbstractTripletStructuredObject mdr, boolean res) { AFPDataObjectInfo dataInfo = new AFPDataObjectInfo(); dataInfo.setMimeType(MimeConstants.MIME_AFP_TRUETYPE); mdr.setObjectClassification(ObjectClassificationTriplet.CLASS_DATA_OBJECT_FONT, dataInfo.getObjectType(), res, false, res); } public static class FontFullyQualifiedNameTriplet extends AbstractTriplet { private byte fqName; public FontFullyQualifiedNameTriplet(byte fqName) { super(FULLY_QUALIFIED_NAME); this.fqName = fqName; } public int getDataLength() { return 5; } public void writeToStream(OutputStream os) throws IOException { byte[] data = getData(); data[2] = FullyQualifiedNameTriplet.TYPE_DATA_OBJECT_INTERNAL_RESOURCE_REF; data[3] = FullyQualifiedNameTriplet.FORMAT_CHARSTR; data[4] = fqName; os.write(data); } } static class DataObjectFontTriplet extends AbstractTriplet { private int pointSize; public DataObjectFontTriplet(int size) { super(DATA_OBJECT_FONT_DESCRIPTOR); pointSize = size; } public int getDataLength() { return 16; } public void writeToStream(OutputStream os) throws IOException { byte[] data = getData(); data[3] = 0x20; byte[] pointSizeBytes = BinaryUtils.convert(pointSize * 20, 2); data[4] = pointSizeBytes[0]; //vfs data[5] = pointSizeBytes[1]; // data[6] = pointSizeBytes[0]; //hsf // data[7] = pointSizeBytes[1]; //charrot data[11] = 0x03; //encenv data[13] = 0x01; //encid os.write(data); } } /** * Getter method for the most recent MapCodedFont added to the * Active Environment Group (returns null if no MapCodedFonts exist) * * @return the most recent Map Coded Font. */ private MapCodedFont getCurrentMapCodedFont() { int size = mapCodedFonts.size(); if (size > 0) { return (MapCodedFont)mapCodedFonts.get(size - 1); } else { return null; } } /** * Add map page segment. * @param name of segment to add */ public void addMapPageSegment(String name) { try { needMapPageSegment().addPageSegment(name); } catch (MaximumSizeExceededException e) { //Should not happen, handled internally throw new IllegalStateException("Internal error: " + e.getMessage()); } } private MapPageSegment getCurrentMapPageSegment() { return (MapPageSegment)getLastElement(this.mapPageSegments); } private MapPageSegment needMapPageSegment() { if (this.mapPageSegments == null) { this.mapPageSegments = new java.util.ArrayList(); } MapPageSegment seg = getCurrentMapPageSegment(); if (seg == null || seg.isFull()) { seg = new MapPageSegment(); this.mapPageSegments.add(seg); } return seg; } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.controls.resultset; import org.eclipse.core.runtime.IAdaptable; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.DBPContextProvider; import org.jkiss.dbeaver.model.DBPDataSource; import org.jkiss.dbeaver.model.DBUtils; import org.jkiss.dbeaver.model.data.*; import org.jkiss.dbeaver.model.exec.*; import org.jkiss.dbeaver.model.impl.local.LocalResultSetMeta; import org.jkiss.dbeaver.model.struct.DBSDataContainer; import org.jkiss.dbeaver.model.data.DBDAttributeFilter; import org.jkiss.dbeaver.model.struct.DBSObject; import org.jkiss.utils.CommonUtils; import java.util.ArrayList; import java.util.List; /** * Client-side data container. * Wraps RSV model and original data container. */ public class ResultSetDataContainer implements DBSDataContainer, DBPContextProvider, IAdaptable, DBDAttributeFilter { private static final Log log = Log.getLog(ResultSetDataContainer.class); private final IResultSetController controller; private final DBSDataContainer dataContainer; private final ResultSetModel model; private ResultSetDataContainerOptions options; private boolean filterAttributes; public ResultSetDataContainer(IResultSetController controller, ResultSetDataContainerOptions options) { this.controller = controller; this.dataContainer = controller.getDataContainer(); this.model = controller.getModel(); this.options = options; } @Override public String getDescription() { return dataContainer.getDescription(); } @Override public DBSObject getParentObject() { return dataContainer.getParentObject(); } @Override public DBPDataSource getDataSource() { return dataContainer.getDataSource(); } @Override public int getSupportedFeatures() { return DATA_SELECT | DATA_COUNT; } public ResultSetDataContainerOptions getOptions() { return options; } @Override public DBCStatistics readData(DBCExecutionSource source, DBCSession session, DBDDataReceiver dataReceiver, DBDDataFilter dataFilter, long firstRow, long maxRows, long flags, int fetchSize) throws DBCException { filterAttributes = proceedSelectedRowsOnly(flags); if (filterAttributes || proceedSelectedColumnsOnly(flags)) { long startTime = System.currentTimeMillis(); DBCStatistics statistics = new DBCStatistics(); statistics.setExecuteTime(System.currentTimeMillis() - startTime); //LocalSta ModelResultSet resultSet = new ModelResultSet(session, flags); long resultCount = 0; try { dataReceiver.fetchStart(session, resultSet, firstRow, maxRows); while (resultSet.nextRow()) { if (!proceedSelectedRowsOnly(flags) || options.getSelectedRows().contains(resultCount)) { dataReceiver.fetchRow(session, resultSet); } resultCount++; } } finally { try { dataReceiver.fetchEnd(session, resultSet); } catch (DBCException e) { log.error("Error while finishing result set fetch", e); //$NON-NLS-1$ } resultSet.close(); dataReceiver.close(); } statistics.setFetchTime(System.currentTimeMillis() - startTime); statistics.setRowsFetched(resultCount); return statistics; } else { return dataContainer.readData(source, session, dataReceiver, dataFilter, firstRow, maxRows, flags, fetchSize); } } private boolean proceedSelectedColumnsOnly(long flags) { return (flags & DBSDataContainer.FLAG_USE_SELECTED_COLUMNS) != 0 && !CommonUtils.isEmpty(options.getSelectedColumns()); } private boolean proceedSelectedRowsOnly(long flags) { return (flags & DBSDataContainer.FLAG_USE_SELECTED_ROWS) != 0 && !CommonUtils.isEmpty(options.getSelectedRows()); } @Override public long countData(DBCExecutionSource source, DBCSession session, DBDDataFilter dataFilter, long flags) throws DBCException { if (proceedSelectedRowsOnly(flags)) { return options.getSelectedRows().size(); } else if (proceedSelectedColumnsOnly(flags)) { return model.getRowCount(); } else { return dataContainer.countData(source, session, dataFilter, flags); } } @Override public String getName() { return dataContainer.getName(); } @Override public boolean isPersisted() { return dataContainer.isPersisted(); } @Override public <T> T getAdapter(Class<T> adapter) { if (adapter.isInstance(dataContainer)) { return adapter.cast(dataContainer); } return null; } @Nullable @Override public DBCExecutionContext getExecutionContext() { return controller.getExecutionContext(); } @Override public DBDAttributeBinding[] filterAttributeBindings(DBDAttributeBinding[] attributes) { DBDDataFilter dataFilter = model.getDataFilter(); List<DBDAttributeBinding> filtered = new ArrayList<>(); for (DBDAttributeBinding attr : attributes) { DBDAttributeConstraint ac = dataFilter.getConstraint(attr); if (ac != null && !ac.isVisible()) { continue; } if (!filterAttributes || options.getSelectedColumns().contains(attr.getName())) { filtered.add(attr); } } filtered.sort((o1, o2) -> { DBDAttributeConstraint c1 = dataFilter.getConstraint(o1.getName()); DBDAttributeConstraint c2 = dataFilter.getConstraint(o2.getName()); return c1 == null || c2 == null ? 0 : c1.getVisualPosition() - c2.getVisualPosition(); }); return filtered.toArray(new DBDAttributeBinding[0]); } private class ModelResultSet implements DBCResultSet { private final DBCSession session; private final long flags; private ResultSetRow curRow; ModelResultSet(DBCSession session, long flags) { this.session = session; this.flags = flags; } @Override public DBCSession getSession() { return session; } @Override public DBCStatement getSourceStatement() { return null; } @Override public Object getAttributeValue(int index) throws DBCException { return model.getCellValue(model.getVisibleAttribute(index), curRow); } @Override public Object getAttributeValue(String name) throws DBCException { DBDAttributeBinding attr = DBUtils.findObject(model.getVisibleAttributes(), name); if (attr == null) { throw new DBCException("Attribute '" + name + "' not found"); } return model.getCellValue(attr, curRow); } @Override public DBDValueMeta getAttributeValueMeta(int index) throws DBCException { return null; } @Override public DBDValueMeta getRowMeta() throws DBCException { return null; } @Override public boolean nextRow() throws DBCException { if (curRow == null) { if (model.getRowCount() == 0) { return false; } curRow = model.getRow(0); } else { if (curRow.getRowNumber() >= model.getRowCount() - 1) { return false; } curRow = model.getRow(curRow.getRowNumber() + 1); } return true; } @Override public boolean moveTo(int position) throws DBCException { if (position >= model.getRowCount() - 1) { return false; } curRow = model.getRow(position); return true; } @NotNull @Override public DBCResultSetMetaData getMeta() throws DBCException { List<DBDAttributeBinding> attributes = model.getVisibleAttributes(); List<DBCAttributeMetaData> meta = new ArrayList<>(attributes.size()); for (DBDAttributeBinding attribute : attributes) { DBCAttributeMetaData metaAttribute = attribute.getMetaAttribute(); if (metaAttribute != null) { meta.add(metaAttribute); } } return new CustomResultSetMeta(meta); } @Override public String getResultSetName() throws DBCException { return "ClientResults"; } @Override public Object getFeature(String name) { if (FEATURE_NAME_LOCAL.equals(name)) { return true; } return null; } @Override public void close() { // do nothing } private class CustomResultSetMeta extends LocalResultSetMeta { public CustomResultSetMeta(List<DBCAttributeMetaData> meta) { super(meta); } } } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.guvnor.common.services.project.backend.server; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.Collection; import java.util.Set; import javax.enterprise.inject.Instance; import org.apache.maven.project.MavenProject; import org.appformer.maven.integration.embedder.MavenProjectLoader; import org.appformer.maven.integration.embedder.MavenSettings; import org.guvnor.common.services.project.model.GAV; import org.guvnor.common.services.project.model.MavenRepositoryMetadata; import org.guvnor.common.services.project.model.MavenRepositorySource; import org.guvnor.common.services.project.model.Project; import org.guvnor.common.services.project.preferences.GAVPreferences; import org.guvnor.common.services.shared.preferences.WorkbenchPreferenceScopeResolutionStrategies; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import org.uberfire.io.IOService; import org.uberfire.java.nio.file.Path; import static org.guvnor.common.services.project.backend.server.MavenLocalRepositoryUtils.*; import static org.guvnor.common.services.project.backend.server.RepositoryResolverTestUtils.*; import static org.junit.Assert.*; import static org.mockito.Mockito.*; @RunWith(MockitoJUnitRunner.class) // *** NOTE *** // The transient Maven Repository used by these tests is only cleared after all the tests have ran. // Therefore each test should use a unique GAV to avoid potential conflicts between tests. public class ProjectRepositoryResolverImplTest { @Mock private IOService ioService; @Mock private Instance<GAVPreferences> gavPreferencesProvider; @Mock private GAVPreferences gavPreferences; @Mock private WorkbenchPreferenceScopeResolutionStrategies scopeResolutionStrategies; private ProjectRepositoryResolverImpl service; private static java.nio.file.Path m2Folder = null; private static java.nio.file.Path settingsXmlPath = null; @BeforeClass public static void setupSystemProperties() { //These are not needed for the tests System.setProperty( "org.uberfire.nio.git.daemon.enabled", "false" ); System.setProperty( "org.uberfire.nio.git.ssh.enabled", "false" ); System.setProperty( "org.uberfire.sys.repo.monitor.disabled", "true" ); } @BeforeClass public static void setupMavenRepository() { try { m2Folder = Files.createTempDirectory( "temp-m2" ); settingsXmlPath = generateSettingsXml( m2Folder ); } catch ( IOException ioe ) { fail( ioe.getMessage() ); } } @Before public void setup() { service = new ProjectRepositoryResolverImpl( ioService, gavPreferencesProvider, scopeResolutionStrategies ); doReturn( gavPreferences ).when( gavPreferencesProvider ).get(); } @AfterClass public static void teardownMavenRepository() { tearDownMavenRepository( m2Folder ); MavenSettings.reinitSettings(); } @Test public void testGetRemoteRepositoriesMetaData_WithoutExplicitProjectRepository() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path pomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.1</version>\n" + "</project>"; when( project.getPomXMLPath() ).thenReturn( pomXmlPath ); when( pomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final Set<MavenRepositoryMetadata> metadata = service.getRemoteRepositoriesMetaData( project ); assertNotNull( metadata ); assertEquals( 6, metadata.size() ); assertContainsRepository( "local", m2Folder.toString(), MavenRepositorySource.LOCAL, metadata ); assertContainsRepository( "jboss-developer-repository-group", "https://repository.jboss.org/nexus/content/groups/developer/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-public-repository-group", "http://repository.jboss.org/nexus/content/groups/public/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-origin-repository-group", "https://origin-repository.jboss.org/nexus/content/groups/ea/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-public-repository-group", "https://repository.jboss.org/nexus/content/repositories/snapshots/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "central", "https://repo.maven.apache.org/maven2", MavenRepositorySource.PROJECT, metadata ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRemoteRepositoriesMetaData_WithExplicitProjectRepository() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path pomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.2</version>\n" + " <repositories>\n" + " <repository>\n" + " <id>explicit-repo</id>\n" + " <name>Explicit Repository</name>\n" + " <url>http://localhost/maven2/</url>\n" + " </repository>\n" + " </repositories>\n" + "</project>"; when( project.getPomXMLPath() ).thenReturn( pomXmlPath ); when( pomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final Set<MavenRepositoryMetadata> metadata = service.getRemoteRepositoriesMetaData( project ); assertNotNull( metadata ); assertEquals( 7, metadata.size() ); assertContainsRepository( "local", m2Folder.toString(), MavenRepositorySource.LOCAL, metadata ); assertContainsRepository( "jboss-developer-repository-group", "https://repository.jboss.org/nexus/content/groups/developer/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-public-repository-group", "http://repository.jboss.org/nexus/content/groups/public/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-origin-repository-group", "https://origin-repository.jboss.org/nexus/content/groups/ea/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-public-repository-group", "https://repository.jboss.org/nexus/content/repositories/snapshots/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "central", "https://repo.maven.apache.org/maven2", MavenRepositorySource.PROJECT, metadata ); assertContainsRepository( "explicit-repo", "http://localhost/maven2/", MavenRepositorySource.PROJECT, metadata ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRemoteRepositoriesMetaData_WithDistributionManagementRepository() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path pomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.3</version>\n" + " <distributionManagement>\n" + " <repository>\n" + " <id>distribution-repo</id>\n" + " <name>Distribution Repository</name>\n" + " <url>http://distribution-host/maven2/</url>\n" + " </repository>\n" + " </distributionManagement>\n" + "</project>"; when( project.getPomXMLPath() ).thenReturn( pomXmlPath ); when( pomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final Set<MavenRepositoryMetadata> metadata = service.getRemoteRepositoriesMetaData( project ); assertNotNull( metadata ); assertEquals( 7, metadata.size() ); assertContainsRepository( "local", m2Folder.toString(), MavenRepositorySource.LOCAL, metadata ); assertContainsRepository( "jboss-developer-repository-group", "https://repository.jboss.org/nexus/content/groups/developer/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-public-repository-group", "http://repository.jboss.org/nexus/content/groups/public/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-origin-repository-group", "https://origin-repository.jboss.org/nexus/content/groups/ea/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-public-repository-group", "https://repository.jboss.org/nexus/content/repositories/snapshots/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "central", "https://repo.maven.apache.org/maven2", MavenRepositorySource.PROJECT, metadata ); assertContainsRepository( "distribution-repo", "http://distribution-host/maven2/", MavenRepositorySource.DISTRIBUTION_MANAGEMENT, metadata ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRemoteRepositoriesMetaData_WithDistributionManagementSnapshotRepository_NonSnapshotVersion() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path pomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.4</version>\n" + " <distributionManagement>\n" + " <snapshotRepository>\n" + " <id>distribution-repo</id>\n" + " <name>Distribution Repository</name>\n" + " <url>http://distribution-host/maven2/</url>\n" + " </snapshotRepository>\n" + " </distributionManagement>\n" + "</project>"; when( project.getPomXMLPath() ).thenReturn( pomXmlPath ); when( pomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final Set<MavenRepositoryMetadata> metadata = service.getRemoteRepositoriesMetaData( project ); assertNotNull( metadata ); assertEquals( 6, metadata.size() ); assertContainsRepository( "local", m2Folder.toString(), MavenRepositorySource.LOCAL, metadata ); assertContainsRepository( "jboss-developer-repository-group", "https://repository.jboss.org/nexus/content/groups/developer/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-public-repository-group", "http://repository.jboss.org/nexus/content/groups/public/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-origin-repository-group", "https://origin-repository.jboss.org/nexus/content/groups/ea/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-public-repository-group", "https://repository.jboss.org/nexus/content/repositories/snapshots/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "central", "https://repo.maven.apache.org/maven2", MavenRepositorySource.PROJECT, metadata ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRemoteRepositoriesMetaData_WithDistributionManagementSnapshotRepository_SnapshotVersion() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path pomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.1-SNAPSHOT</version>\n" + " <distributionManagement>\n" + " <snapshotRepository>\n" + " <id>distribution-repo</id>\n" + " <name>Distribution Repository</name>\n" + " <url>http://distribution-host/maven2/</url>\n" + " </snapshotRepository>\n" + " </distributionManagement>\n" + "</project>"; when( project.getPomXMLPath() ).thenReturn( pomXmlPath ); when( pomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final Set<MavenRepositoryMetadata> metadata = service.getRemoteRepositoriesMetaData( project ); assertNotNull( metadata ); assertEquals( 7, metadata.size() ); assertContainsRepository( "local", m2Folder.toString(), MavenRepositorySource.LOCAL, metadata ); assertContainsRepository( "jboss-developer-repository-group", "https://repository.jboss.org/nexus/content/groups/developer/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-public-repository-group", "http://repository.jboss.org/nexus/content/groups/public/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-origin-repository-group", "https://origin-repository.jboss.org/nexus/content/groups/ea/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "jboss-public-repository-group", "https://repository.jboss.org/nexus/content/repositories/snapshots/", MavenRepositorySource.SETTINGS, metadata ); assertContainsRepository( "central", "https://repo.maven.apache.org/maven2", MavenRepositorySource.PROJECT, metadata ); assertContainsRepository( "distribution-repo", "http://distribution-host/maven2/", MavenRepositorySource.DISTRIBUTION_MANAGEMENT, metadata ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_NewGAV_NotInstalledNotDeployed() { final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { final GAV gav = new GAV( "org.guvnor", "test", "0.0.5" ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( gav ); assertNotNull( metadata ); assertEquals( 0, metadata.size() ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_NewGAV_IsInstalledNotDeployed() { final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.6</version>\n" + "</project>"; final GAV gav = new GAV( "org.guvnor", "test", "0.0.6" ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); installArtifact( mavenProject, pomXml ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( gav ); assertNotNull( metadata ); assertEquals( 1, metadata.size() ); assertContainsRepository( "local", m2Folder.toString(), MavenRepositorySource.LOCAL, metadata ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_NewGAV_IsInstalledIsDeployed() { final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); java.nio.file.Path remoteRepositoryFolder = null; try { remoteRepositoryFolder = Files.createTempDirectory( "distribution-repo" ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.7</version>\n" + " <distributionManagement>\n" + " <repository>\n" + " <id>distribution-repo</id>\n" + " <name>Distribution Repository</name>\n" + " <url>file://" + remoteRepositoryFolder.toString() + "</url>\n" + " </repository>\n" + " </distributionManagement>\n" + "</project>"; final GAV gav = new GAV( "org.guvnor", "test", "0.0.7" ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); installArtifact( mavenProject, pomXml ); deployArtifact( mavenProject, pomXml ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( gav ); assertNotNull( metadata ); assertEquals( 1, metadata.size() ); assertContainsRepository( "local", m2Folder.toString(), MavenRepositorySource.LOCAL, metadata ); } catch ( IOException ioe ) { fail( ioe.getMessage() ); } finally { tearDownMavenRepository( remoteRepositoryFolder ); resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_NewGAV_NotInstalledIsDeployed() { final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); java.nio.file.Path remoteRepositoryFolder = null; try { remoteRepositoryFolder = Files.createTempDirectory( "distribution-repo" ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.8</version>\n" + " <distributionManagement>\n" + " <repository>\n" + " <id>distribution-repo</id>\n" + " <name>Distribution Repository</name>\n" + " <url>file://" + remoteRepositoryFolder.toString() + "</url>\n" + " </repository>\n" + " </distributionManagement>\n" + "</project>"; final GAV gav = new GAV( "org.guvnor", "test", "0.0.8" ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); deployArtifact( mavenProject, pomXml ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( gav ); assertNotNull( metadata ); assertEquals( 0, metadata.size() ); } catch ( IOException ioe ) { fail( ioe.getMessage() ); } finally { tearDownMavenRepository( remoteRepositoryFolder ); resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_NewGAV_IsInstalledIsDeployed_Filtered() { final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); java.nio.file.Path remoteRepositoryFolder = null; try { remoteRepositoryFolder = Files.createTempDirectory( "distribution-repo" ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.9</version>\n" + " <distributionManagement>\n" + " <repository>\n" + " <id>distribution-repo</id>\n" + " <name>Distribution Repository</name>\n" + " <url>file://" + remoteRepositoryFolder.toString() + "</url>\n" + " </repository>\n" + " </distributionManagement>\n" + "</project>"; final GAV gav = new GAV( "org.guvnor", "test", "0.0.9" ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); installArtifact( mavenProject, pomXml ); deployArtifact( mavenProject, pomXml ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( gav, new MavenRepositoryMetadata( "local", m2Folder.toString(), MavenRepositorySource.LOCAL ) ); assertNotNull( metadata ); assertEquals( 1, metadata.size() ); assertContainsRepository( "local", m2Folder.toString(), MavenRepositorySource.LOCAL, metadata ); } catch ( IOException ioe ) { fail( ioe.getMessage() ); } finally { tearDownMavenRepository( remoteRepositoryFolder ); resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_ExplicitGAV_NotInstalledNotDeployed() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path pomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.10</version>\n" + "</project>"; final GAV gav = new GAV( "org.guvnor", "test", "0.0.10" ); when( project.getPomXMLPath() ).thenReturn( pomXmlPath ); when( pomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( gav, project ); assertNotNull( metadata ); assertEquals( 0, metadata.size() ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_ExplicitGAV_IsInstalledNotDeployed() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path vfsPomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.11</version>\n" + "</project>"; final GAV gav = new GAV( "org.guvnor", "test", "0.0.11" ); when( project.getPomXMLPath() ).thenReturn( vfsPomXmlPath ); when( vfsPomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); installArtifact( mavenProject, pomXml ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( gav, project ); assertNotNull( metadata ); assertEquals( 1, metadata.size() ); assertContainsRepository( "local", m2Folder.toString(), MavenRepositorySource.LOCAL, metadata ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_ExplicitGAV_IsInstalledIsDeployed() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path vfsPomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); java.nio.file.Path remoteRepositoryFolder = null; try { remoteRepositoryFolder = Files.createTempDirectory( "distribution-repo" ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.12</version>\n" + " <distributionManagement>\n" + " <repository>\n" + " <id>distribution-repo</id>\n" + " <name>Distribution Repository</name>\n" + " <url>file://" + remoteRepositoryFolder.toString() + "</url>\n" + " </repository>\n" + " </distributionManagement>\n" + "</project>"; final GAV gav = new GAV( "org.guvnor", "test", "0.0.12" ); when( project.getPomXMLPath() ).thenReturn( vfsPomXmlPath ); when( vfsPomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); installArtifact( mavenProject, pomXml ); deployArtifact( mavenProject, pomXml ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( gav, project ); assertNotNull( metadata ); assertEquals( 2, metadata.size() ); assertContainsRepository( "local", m2Folder.toString(), MavenRepositorySource.LOCAL, metadata ); assertContainsRepository( "distribution-repo", "file://" + remoteRepositoryFolder.toString(), MavenRepositorySource.DISTRIBUTION_MANAGEMENT, metadata ); } catch ( IOException ioe ) { fail( ioe.getMessage() ); } finally { tearDownMavenRepository( remoteRepositoryFolder ); resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_ExplicitGAV_NotInstalledIsDeployed() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path vfsPomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); java.nio.file.Path remoteRepositoryFolder = null; try { remoteRepositoryFolder = Files.createTempDirectory( "distribution-repo" ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.13</version>\n" + " <distributionManagement>\n" + " <repository>\n" + " <id>distribution-repo</id>\n" + " <name>Distribution Repository</name>\n" + " <url>file://" + remoteRepositoryFolder.toString() + "</url>\n" + " </repository>\n" + " </distributionManagement>\n" + "</project>"; final GAV gav = new GAV( "org.guvnor", "test", "0.0.13" ); when( project.getPomXMLPath() ).thenReturn( vfsPomXmlPath ); when( vfsPomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); deployArtifact( mavenProject, pomXml ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( gav, project ); assertNotNull( metadata ); assertEquals( 1, metadata.size() ); assertContainsRepository( "distribution-repo", "file://" + remoteRepositoryFolder.toString(), MavenRepositorySource.DISTRIBUTION_MANAGEMENT, metadata ); final Set<MavenRepositoryMetadata> metadata2 = service.getRepositoriesResolvingArtifact( gav, project ); assertNotNull( metadata2 ); assertEquals( 1, metadata2.size() ); assertContainsRepository( "distribution-repo", "file://" + remoteRepositoryFolder.toString(), MavenRepositorySource.DISTRIBUTION_MANAGEMENT, metadata2 ); } catch ( IOException ioe ) { fail( ioe.getMessage() ); } finally { tearDownMavenRepository( remoteRepositoryFolder ); resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_ExplicitGAV_IsInstalledIsDeployed_Filtered() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path vfsPomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); java.nio.file.Path remoteRepositoryFolder = null; try { remoteRepositoryFolder = Files.createTempDirectory( "distribution-repo" ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.14</version>\n" + " <distributionManagement>\n" + " <repository>\n" + " <id>distribution-repo</id>\n" + " <name>Distribution Repository</name>\n" + " <url>file://" + remoteRepositoryFolder.toString() + "</url>\n" + " </repository>\n" + " </distributionManagement>\n" + "</project>"; final GAV gav = new GAV( "org.guvnor", "test", "0.0.14" ); when( project.getPomXMLPath() ).thenReturn( vfsPomXmlPath ); when( vfsPomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); installArtifact( mavenProject, pomXml ); deployArtifact( mavenProject, pomXml ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( gav, project, new MavenRepositoryMetadata( "distribution-repo", "file://" + remoteRepositoryFolder.toString(), MavenRepositorySource.DISTRIBUTION_MANAGEMENT ) ); assertNotNull( metadata ); assertEquals( 1, metadata.size() ); assertContainsRepository( "distribution-repo", "file://" + remoteRepositoryFolder.toString(), MavenRepositorySource.DISTRIBUTION_MANAGEMENT, metadata ); } catch ( IOException ioe ) { fail( ioe.getMessage() ); } finally { tearDownMavenRepository( remoteRepositoryFolder ); resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_ImplicitGAV_NotInstalledNotDeployed() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path pomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.15</version>\n" + "</project>"; when( project.getPomXMLPath() ).thenReturn( pomXmlPath ); when( pomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( pomXml ); assertNotNull( metadata ); assertEquals( 0, metadata.size() ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_ImplicitGAV_IsInstalledNotDeployed() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path vfsPomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.16</version>\n" + "</project>"; when( project.getPomXMLPath() ).thenReturn( vfsPomXmlPath ); when( vfsPomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); installArtifact( mavenProject, pomXml ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( pomXml ); assertNotNull( metadata ); assertEquals( 1, metadata.size() ); assertContainsRepository( "local", m2Folder.toString(), MavenRepositorySource.LOCAL, metadata ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_ImplicitGAV_IsInstalledIsDeployed() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path vfsPomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); java.nio.file.Path remoteRepositoryFolder = null; try { remoteRepositoryFolder = Files.createTempDirectory( "distribution-repo" ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.17</version>\n" + " <distributionManagement>\n" + " <repository>\n" + " <id>distribution-repo</id>\n" + " <name>Distribution Repository</name>\n" + " <url>file://" + remoteRepositoryFolder.toString() + "</url>\n" + " </repository>\n" + " </distributionManagement>\n" + "</project>"; when( project.getPomXMLPath() ).thenReturn( vfsPomXmlPath ); when( vfsPomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); installArtifact( mavenProject, pomXml ); deployArtifact( mavenProject, pomXml ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( pomXml ); assertNotNull( metadata ); assertEquals( 2, metadata.size() ); assertContainsRepository( "local", m2Folder.toString(), MavenRepositorySource.LOCAL, metadata ); assertContainsRepository( "distribution-repo", "file://" + remoteRepositoryFolder.toString(), MavenRepositorySource.DISTRIBUTION_MANAGEMENT, metadata ); } catch ( IOException ioe ) { fail( ioe.getMessage() ); } finally { tearDownMavenRepository( remoteRepositoryFolder ); resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_ImplicitGAV_NotInstalledIsDeployed() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path vfsPomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); java.nio.file.Path remoteRepositoryFolder = null; try { remoteRepositoryFolder = Files.createTempDirectory( "distribution-repo" ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.18</version>\n" + " <distributionManagement>\n" + " <repository>\n" + " <id>distribution-repo</id>\n" + " <name>Distribution Repository</name>\n" + " <url>file://" + remoteRepositoryFolder.toString() + "</url>\n" + " </repository>\n" + " </distributionManagement>\n" + "</project>"; when( project.getPomXMLPath() ).thenReturn( vfsPomXmlPath ); when( vfsPomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); deployArtifact( mavenProject, pomXml ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( pomXml ); assertNotNull( metadata ); assertEquals( 1, metadata.size() ); assertContainsRepository( "distribution-repo", "file://" + remoteRepositoryFolder.toString(), MavenRepositorySource.DISTRIBUTION_MANAGEMENT, metadata ); final Set<MavenRepositoryMetadata> metadata2 = service.getRepositoriesResolvingArtifact( pomXml ); assertNotNull( metadata2 ); assertEquals( 1, metadata2.size() ); assertContainsRepository( "distribution-repo", "file://" + remoteRepositoryFolder.toString(), MavenRepositorySource.DISTRIBUTION_MANAGEMENT, metadata2 ); } catch ( IOException ioe ) { fail( ioe.getMessage() ); } finally { tearDownMavenRepository( remoteRepositoryFolder ); resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_ImplicitGAV_IsInstalledIsDeployed_Filtered() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path vfsPomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); java.nio.file.Path remoteRepositoryFolder = null; try { remoteRepositoryFolder = Files.createTempDirectory( "distribution-repo" ); final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.19</version>\n" + " <distributionManagement>\n" + " <repository>\n" + " <id>distribution-repo</id>\n" + " <name>Distribution Repository</name>\n" + " <url>file://" + remoteRepositoryFolder.toString() + "</url>\n" + " </repository>\n" + " </distributionManagement>\n" + "</project>"; when( project.getPomXMLPath() ).thenReturn( vfsPomXmlPath ); when( vfsPomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); installArtifact( mavenProject, pomXml ); deployArtifact( mavenProject, pomXml ); final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( pomXml, new MavenRepositoryMetadata( "distribution-repo", "file://" + remoteRepositoryFolder.toString(), MavenRepositorySource.DISTRIBUTION_MANAGEMENT ) ); assertNotNull( metadata ); assertEquals( 1, metadata.size() ); assertContainsRepository( "distribution-repo", "file://" + remoteRepositoryFolder.toString(), MavenRepositorySource.DISTRIBUTION_MANAGEMENT, metadata ); } catch ( IOException ioe ) { fail( ioe.getMessage() ); } finally { tearDownMavenRepository( remoteRepositoryFolder ); resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_Disabled1() { final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.20</version>\n" + "</project>"; final GAV gav = new GAV( "org.guvnor", "test", "0.0.20" ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); doReturn( true ).when( gavPreferences ).isConflictingGAVCheckDisabled(); //Re-instantiate service to pick-up System Property service = new ProjectRepositoryResolverImpl( ioService, gavPreferencesProvider, scopeResolutionStrategies ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); installArtifact( mavenProject, pomXml ); //Without being disabled this would return one resolved (LOCAL) Repository final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( gav ); assertNotNull( metadata ); assertEquals( 0, metadata.size() ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_Disabled2() { final Project project = mock( Project.class ); doReturn( "default://master@a/a%20b" ).when( project ).getIdentifier(); doCallRealMethod().when( project ).getEncodedIdentifier(); final org.uberfire.backend.vfs.Path vfsPomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.21</version>\n" + "</project>"; final GAV gav = new GAV( "org.guvnor", "test", "0.0.21" ); when( project.getPomXMLPath() ).thenReturn( vfsPomXmlPath ); when( vfsPomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); doReturn( true ).when( gavPreferences ).isConflictingGAVCheckDisabled(); //Re-instantiate service to pick-up System Property service = new ProjectRepositoryResolverImpl( ioService, gavPreferencesProvider, scopeResolutionStrategies ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); installArtifact( mavenProject, pomXml ); //Without being disabled this would return one resolved (LOCAL) Repository final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( gav, project ); assertNotNull( metadata ); assertEquals( 0, metadata.size() ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } @Test public void testGetRepositoriesResolvingArtifact_Disabled3() { final Project project = mock( Project.class ); final org.uberfire.backend.vfs.Path vfsPomXmlPath = mock( org.uberfire.backend.vfs.Path.class ); final String oldSettingsXmlPath = System.getProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY ); try { final String pomXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + " <groupId>org.guvnor</groupId>\n" + " <artifactId>test</artifactId>\n" + " <version>0.0.22</version>\n" + "</project>"; when( project.getPomXMLPath() ).thenReturn( vfsPomXmlPath ); when( vfsPomXmlPath.toURI() ).thenReturn( "default://p0/pom.xml" ); when( ioService.readAllString( any( Path.class ) ) ).thenReturn( pomXml ); System.setProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, settingsXmlPath.toString() ); doReturn( true ).when( gavPreferences ).isConflictingGAVCheckDisabled(); //Re-instantiate service to pick-up System Property service = new ProjectRepositoryResolverImpl( ioService, gavPreferencesProvider, scopeResolutionStrategies ); final InputStream pomStream = new ByteArrayInputStream( pomXml.getBytes( StandardCharsets.UTF_8 ) ); final MavenProject mavenProject = MavenProjectLoader.parseMavenPom( pomStream ); installArtifact( mavenProject, pomXml ); //Without being disabled this would return one resolved (LOCAL) Repository final Set<MavenRepositoryMetadata> metadata = service.getRepositoriesResolvingArtifact( pomXml ); assertNotNull( metadata ); assertEquals( 0, metadata.size() ); } finally { resetSystemProperty( MavenSettings.CUSTOM_SETTINGS_PROPERTY, oldSettingsXmlPath ); } } private void assertContainsRepository( final String id, final String url, final MavenRepositorySource source, final Collection<MavenRepositoryMetadata> metadata ) { for ( MavenRepositoryMetadata md : metadata ) { if ( md.getId().equals( id ) && md.getUrl().equals( url ) && md.getSource().equals( source ) ) { return; } } fail( "Repository Id '" + id + "' not found." ); } private void resetSystemProperty( final String systemPropertyName, final String oldSystemPropertyValue ) { if ( oldSystemPropertyValue != null ) { System.setProperty( systemPropertyName, oldSystemPropertyValue ); } else { System.clearProperty( systemPropertyName ); } } }
/* * Licensed to GraphHopper GmbH under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper GmbH licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.jsprit.analysis.toolbox; import com.graphhopper.jsprit.core.problem.VehicleRoutingProblem; import com.graphhopper.jsprit.core.problem.job.Job; import com.graphhopper.jsprit.core.problem.job.Service; import com.graphhopper.jsprit.core.problem.job.Shipment; import com.graphhopper.jsprit.core.problem.solution.VehicleRoutingProblemSolution; import com.graphhopper.jsprit.core.problem.solution.route.VehicleRoute; import com.graphhopper.jsprit.core.problem.solution.route.activity.DeliveryActivity; import com.graphhopper.jsprit.core.problem.solution.route.activity.PickupActivity; import com.graphhopper.jsprit.core.problem.solution.route.activity.TourActivity; import com.graphhopper.jsprit.core.problem.solution.route.activity.TourActivity.JobActivity; import com.graphhopper.jsprit.core.problem.vehicle.Vehicle; import com.graphhopper.jsprit.core.util.Time; import org.graphstream.graph.Edge; import org.graphstream.graph.Graph; import org.graphstream.graph.Node; import org.graphstream.graph.implementations.MultiGraph; import org.graphstream.ui.swingViewer.ViewPanel; import org.graphstream.ui.view.View; import org.graphstream.ui.view.Viewer; import javax.swing.*; import java.awt.*; public class GraphStreamViewer { public static class StyleSheets { public static String BLUE_FOREST = "graph { fill-color: #141F2E; }" + "node {" + " size: 7px, 7px;" + " fill-color: #A0FFA0;" + " text-alignment: at-right;" + " stroke-mode: plain;" + " stroke-color: #999;" + " stroke-width: 1.0;" + " text-font: couriernew;" + " text-offset: 2,-5;" + " text-size: 8;" + "}" + "node.pickup {" + " fill-color: #6CC644;" + "}" + "node.delivery {" + " fill-color: #f93;" + "}" + "node.pickupInRoute {" + " fill-color: #6CC644;" + " stroke-mode: plain;" + " stroke-color: #333;" + " stroke-width: 2.0;" + "}" + "node.deliveryInRoute {" + " fill-color: #f93;" + " stroke-mode: plain;" + " stroke-color: #333;" + " stroke-width: 2.0;" + "}" + "node.depot {" + " fill-color: #BD2C00;" + " size: 10px, 10px;" + " shape: box;" + "}" + "node.removed {" + " fill-color: #FF8080;" + " size: 10px, 10px;" + " stroke-mode: plain;" + " stroke-color: #CCF;" + " stroke-width: 2.0;" + " shadow-mode: gradient-radial;" + " shadow-width: 10px; shadow-color: #EEF, #000; shadow-offset: 0px;" + "}" + "edge {" + " fill-color: #D3D3D3;" + " arrow-size: 6px,3px;" + "}" + // "edge.inserted {" + // " fill-color: #A0FFA0;" + // " arrow-size: 6px,3px;" + // " shadow-mode: gradient-radial;" + // " shadow-width: 10px; shadow-color: #EEF, #000; shadow-offset: 0px;" + // "}" + // "edge.removed {" + // " fill-color: #FF0000;" + // " arrow-size: 6px,3px;" + // " shadow-mode: gradient-radial;" + // " shadow-width: 10px; shadow-color: #EEF, #000; shadow-offset: 0px;" + // "}" + "edge.shipment {" + " fill-color: #999;" + " arrow-size: 6px,3px;" + "}"; @SuppressWarnings("UnusedDeclaration") public static String SIMPLE_WHITE = "node {" + " size: 10px, 10px;" + " fill-color: #6CC644;" + " text-alignment: at-right;" + " stroke-mode: plain;" + " stroke-color: #999;" + " stroke-width: 1.0;" + " text-font: couriernew;" + " text-offset: 2,-5;" + " text-size: 8;" + "}" + "node.pickup {" + " fill-color: #6CC644;" + "}" + "node.delivery {" + " fill-color: #f93;" + "}" + "node.pickupInRoute {" + " fill-color: #6CC644;" + " stroke-mode: plain;" + " stroke-color: #333;" + " stroke-width: 2.0;" + "}" + "node.deliveryInRoute {" + " fill-color: #f93;" + " stroke-mode: plain;" + " stroke-color: #333;" + " stroke-width: 2.0;" + "}" + "node.depot {" + " fill-color: #BD2C00;" + " size: 10px, 10px;" + " shape: box;" + "}" + "node.removed {" + " fill-color: #BD2C00;" + " size: 10px, 10px;" + " stroke-mode: plain;" + " stroke-color: #333;" + " stroke-width: 2.0;" + "}" + "edge {" + " fill-color: #333;" + " arrow-size: 6px,3px;" + "}" + "edge.shipment {" + " fill-color: #999;" + " arrow-size: 6px,3px;" + "}"; } public static Graph createMultiGraph(String name, String style) { Graph g = new MultiGraph(name); g.addAttribute("ui.quality"); g.addAttribute("ui.antialias"); g.addAttribute("ui.stylesheet", style); return g; } public static ViewPanel createEmbeddedView(Graph graph, double scaling) { Viewer viewer = new Viewer(graph, Viewer.ThreadingModel.GRAPH_IN_ANOTHER_THREAD); ViewPanel view = viewer.addDefaultView(false); view.setPreferredSize(new Dimension((int) (698 * scaling), (int) (440 * scaling))); return view; } public static String STYLESHEET = "node {" + " size: 10px, 10px;" + " fill-color: #6CC644;" + " text-alignment: at-right;" + " stroke-mode: plain;" + " stroke-color: #999;" + " stroke-width: 1.0;" + " text-font: couriernew;" + " text-offset: 2,-5;" + " text-size: 8;" + "}" + "node.pickup {" + " fill-color: #6CC644;" + "}" + "node.delivery {" + " fill-color: #f93;" + "}" + "node.pickupInRoute {" + " fill-color: #6CC644;" + " stroke-mode: plain;" + " stroke-color: #333;" + " stroke-width: 2.0;" + "}" + "node.deliveryInRoute {" + " fill-color: #f93;" + " stroke-mode: plain;" + " stroke-color: #333;" + " stroke-width: 2.0;" + "}" + "node.depot {" + " fill-color: #BD2C00;" + " size: 10px, 10px;" + " shape: box;" + "}" + "node.removed {" + " fill-color: #BD2C00;" + " size: 10px, 10px;" + " stroke-mode: plain;" + " stroke-color: #333;" + " stroke-width: 2.0;" + "}" + "edge {" + " fill-color: #333;" + " arrow-size: 6px,3px;" + "}" + "edge.shipment {" + " fill-color: #999;" + " arrow-size: 6px,3px;" + "}"; public static enum Label { NO_LABEL, ID, JOB_NAME, ARRIVAL_TIME, DEPARTURE_TIME, ACTIVITY } private static class Center { final double x; final double y; public Center(double x, double y) { super(); this.x = x; this.y = y; } } private Label label = Label.NO_LABEL; private long renderDelay_in_ms = 0; private boolean renderShipments = false; private Center center; private VehicleRoutingProblem vrp; private VehicleRoutingProblemSolution solution; private double zoomFactor; private double scaling = 1.0; public GraphStreamViewer(VehicleRoutingProblem vrp) { super(); this.vrp = vrp; } public GraphStreamViewer(VehicleRoutingProblem vrp, VehicleRoutingProblemSolution solution) { super(); this.vrp = vrp; this.solution = solution; } public GraphStreamViewer labelWith(Label label) { this.label = label; return this; } public GraphStreamViewer setRenderDelay(long ms) { this.renderDelay_in_ms = ms; return this; } public GraphStreamViewer setRenderShipments(boolean renderShipments) { this.renderShipments = renderShipments; return this; } public GraphStreamViewer setGraphStreamFrameScalingFactor(double factor) { this.scaling = factor; return this; } /** * Sets the camera-view. Center describes the center-focus of the camera and zoomFactor its * zoomFactor. * <p> * <p>a zoomFactor < 1 zooms in and > 1 out. * * @param centerX x coordinate of center * @param centerY y coordinate of center * @param zoomFactor zoom factor * @return the viewer */ public GraphStreamViewer setCameraView(double centerX, double centerY, double zoomFactor) { center = new Center(centerX, centerY); this.zoomFactor = zoomFactor; return this; } public void display() { System.setProperty("org.graphstream.ui.renderer", "org.graphstream.ui.j2dviewer.J2DGraphRenderer"); Graph g = createMultiGraph("g"); ViewPanel view = createEmbeddedView(g, scaling); createJFrame(view, scaling); render(g, view); } private JFrame createJFrame(ViewPanel view, double scaling) { JFrame jframe = new JFrame(); JPanel basicPanel = new JPanel(); basicPanel.setLayout(new BoxLayout(basicPanel, BoxLayout.Y_AXIS)); //result-panel JPanel resultPanel = createResultPanel(); //graphstream-panel JPanel graphStreamPanel = new JPanel(); graphStreamPanel.setPreferredSize(new Dimension((int) (800 * scaling), (int) (460 * scaling))); graphStreamPanel.setBackground(Color.WHITE); JPanel graphStreamBackPanel = new JPanel(); graphStreamBackPanel.setPreferredSize(new Dimension((int) (700 * scaling), (int) (450 * scaling))); graphStreamBackPanel.setBorder(BorderFactory.createLineBorder(Color.LIGHT_GRAY, 1)); graphStreamBackPanel.setBackground(Color.WHITE); graphStreamBackPanel.add(view); graphStreamPanel.add(graphStreamBackPanel); //setup basicPanel basicPanel.add(resultPanel); basicPanel.add(graphStreamPanel); // basicPanel.add(legendPanel); //put it together jframe.add(basicPanel); //conf jframe jframe.setSize((int) (800 * scaling), (int) (580 * scaling)); jframe.setLocationRelativeTo(null); jframe.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); jframe.setVisible(true); jframe.pack(); jframe.setTitle("jsprit - GraphStream"); return jframe; } private Graph createMultiGraph(String name) { return GraphStreamViewer.createMultiGraph(name, STYLESHEET); } private void render(Graph g, ViewPanel view) { if (center != null) { view.resizeFrame(view.getWidth(), view.getHeight()); alignCamera(view); } for (Vehicle vehicle : vrp.getVehicles()) { renderVehicle(g, vehicle, label); sleep(renderDelay_in_ms); } for (Job j : vrp.getJobs().values()) { if (j instanceof Service) { renderService(g, (Service) j, label); } else if (j instanceof Shipment) { renderShipment(g, (Shipment) j, label, renderShipments); } sleep(renderDelay_in_ms); } if (solution != null) { int routeId = 1; for (VehicleRoute route : solution.getRoutes()) { renderRoute(g, route, routeId, renderDelay_in_ms, label); sleep(renderDelay_in_ms); routeId++; } } } private void alignCamera(View view) { view.getCamera().setViewCenter(center.x, center.y, 0); view.getCamera().setViewPercent(zoomFactor); } private JLabel createEmptyLabel() { JLabel emptyLabel1 = new JLabel(); emptyLabel1.setPreferredSize(new Dimension((int) (40 * scaling), (int) (25 * scaling))); return emptyLabel1; } private JPanel createResultPanel() { int width = 800; int height = 50; JPanel panel = new JPanel(); panel.setPreferredSize(new Dimension((int) (width * scaling), (int) (height * scaling))); panel.setBackground(Color.WHITE); JPanel subpanel = new JPanel(); subpanel.setLayout(new FlowLayout()); subpanel.setPreferredSize(new Dimension((int) (700 * scaling), (int) (40 * scaling))); subpanel.setBackground(Color.WHITE); subpanel.setBorder(BorderFactory.createLineBorder(Color.LIGHT_GRAY, 1)); Font font = Font.decode("couriernew"); JLabel jobs = new JLabel("jobs"); jobs.setFont(font); jobs.setPreferredSize(new Dimension((int) (40 * scaling), (int) (25 * scaling))); int noJobs = 0; if (this.vrp != null) noJobs = this.vrp.getJobs().values().size(); JFormattedTextField nJobs = new JFormattedTextField(noJobs); nJobs.setFont(font); nJobs.setEditable(false); nJobs.setBorder(BorderFactory.createEmptyBorder()); nJobs.setBackground(new Color(230, 230, 230)); JLabel costs = new JLabel("costs"); costs.setFont(font); costs.setPreferredSize(new Dimension((int) (40 * scaling), (int) (25 * scaling))); JFormattedTextField costsVal = new JFormattedTextField(getSolutionCosts()); costsVal.setFont(font); costsVal.setEditable(false); costsVal.setBorder(BorderFactory.createEmptyBorder()); costsVal.setBackground(new Color(230, 230, 230)); JLabel vehicles = new JLabel("routes"); vehicles.setFont(font); vehicles.setPreferredSize(new Dimension((int) (40 * scaling), (int) (25 * scaling))); // vehicles.setForeground(Color.DARK_GRAY); JFormattedTextField vehVal = new JFormattedTextField(getNoRoutes()); vehVal.setFont(font); vehVal.setEditable(false); vehVal.setBorder(BorderFactory.createEmptyBorder()); // vehVal.setForeground(Color.DARK_GRAY); vehVal.setBackground(new Color(230, 230, 230)); //platzhalter JLabel placeholder1 = new JLabel(); placeholder1.setPreferredSize(new Dimension((int) (60 * scaling), (int) (25 * scaling))); JLabel emptyLabel1 = createEmptyLabel(); subpanel.add(jobs); subpanel.add(nJobs); subpanel.add(emptyLabel1); subpanel.add(costs); subpanel.add(costsVal); JLabel emptyLabel2 = createEmptyLabel(); subpanel.add(emptyLabel2); subpanel.add(vehicles); subpanel.add(vehVal); panel.add(subpanel); return panel; } private Integer getNoRoutes() { if (solution != null) return solution.getRoutes().size(); return 0; } private Double getSolutionCosts() { if (solution != null) return solution.getCost(); return 0.0; } private void renderShipment(Graph g, Shipment shipment, Label label, boolean renderShipments) { Node n1 = g.addNode(makeId(shipment.getId(), shipment.getPickupLocation().getId())); if (label.equals(Label.ID)) n1.addAttribute("ui.label", shipment.getId()); n1.addAttribute("x", shipment.getPickupLocation().getCoordinate().getX()); n1.addAttribute("y", shipment.getPickupLocation().getCoordinate().getY()); n1.setAttribute("ui.class", "pickup"); Node n2 = g.addNode(makeId(shipment.getId(), shipment.getDeliveryLocation().getId())); if (label.equals(Label.ID)) n2.addAttribute("ui.label", shipment.getId()); n2.addAttribute("x", shipment.getDeliveryLocation().getCoordinate().getX()); n2.addAttribute("y", shipment.getDeliveryLocation().getCoordinate().getY()); n2.setAttribute("ui.class", "delivery"); if (renderShipments) { Edge s = g.addEdge(shipment.getId(), makeId(shipment.getId(), shipment.getPickupLocation().getId()), makeId(shipment.getId(), shipment.getDeliveryLocation().getId()), true); s.addAttribute("ui.class", "shipment"); } } private void sleep(long renderDelay_in_ms2) { try { Thread.sleep(renderDelay_in_ms2); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } private void renderService(Graph g, Service service, Label label) { Node n = g.addNode(makeId(service.getId(), service.getLocation().getId())); if (label.equals(Label.ID)) n.addAttribute("ui.label", service.getId()); n.addAttribute("x", service.getLocation().getCoordinate().getX()); n.addAttribute("y", service.getLocation().getCoordinate().getY()); if (service.getType().equals("pickup")) n.setAttribute("ui.class", "pickup"); if (service.getType().equals("delivery")) n.setAttribute("ui.class", "delivery"); } private String makeId(String id, String locationId) { return id + "_" + locationId; } private void renderVehicle(Graph g, Vehicle vehicle, Label label) { String nodeId = makeId(vehicle.getId(), vehicle.getStartLocation().getId()); Node vehicleStart = g.addNode(nodeId); if (label.equals(Label.ID)) vehicleStart.addAttribute("ui.label", "depot"); // if(label.equals(Label.ACTIVITY)) n.addAttribute("ui.label", "start"); vehicleStart.addAttribute("x", vehicle.getStartLocation().getCoordinate().getX()); vehicleStart.addAttribute("y", vehicle.getStartLocation().getCoordinate().getY()); vehicleStart.setAttribute("ui.class", "depot"); if (!vehicle.getStartLocation().getId().equals(vehicle.getEndLocation().getId())) { Node vehicleEnd = g.addNode(makeId(vehicle.getId(), vehicle.getEndLocation().getId())); if (label.equals(Label.ID)) vehicleEnd.addAttribute("ui.label", "depot"); vehicleEnd.addAttribute("x", vehicle.getEndLocation().getCoordinate().getX()); vehicleEnd.addAttribute("y", vehicle.getEndLocation().getCoordinate().getY()); vehicleEnd.setAttribute("ui.class", "depot"); } } private void renderRoute(Graph g, VehicleRoute route, int routeId, long renderDelay_in_ms, Label label) { int vehicle_edgeId = 1; String prevIdentifier = makeId(route.getVehicle().getId(), route.getVehicle().getStartLocation().getId()); if (label.equals(Label.ACTIVITY) || label.equals(Label.JOB_NAME)) { Node n = g.getNode(prevIdentifier); n.addAttribute("ui.label", "start"); } for (TourActivity act : route.getActivities()) { Job job = ((JobActivity) act).getJob(); String currIdentifier = makeId(job.getId(), act.getLocation().getId()); if (label.equals(Label.ACTIVITY)) { Node actNode = g.getNode(currIdentifier); actNode.addAttribute("ui.label", act.getName()); } else if (label.equals(Label.JOB_NAME)) { Node actNode = g.getNode(currIdentifier); actNode.addAttribute("ui.label", job.getName()); } else if (label.equals(Label.ARRIVAL_TIME)) { Node actNode = g.getNode(currIdentifier); actNode.addAttribute("ui.label", Time.parseSecondsToTime(act.getArrTime())); } else if (label.equals(Label.DEPARTURE_TIME)) { Node actNode = g.getNode(currIdentifier); actNode.addAttribute("ui.label", Time.parseSecondsToTime(act.getEndTime())); } g.addEdge(makeEdgeId(routeId, vehicle_edgeId), prevIdentifier, currIdentifier, true); if (act instanceof PickupActivity) g.getNode(currIdentifier).addAttribute("ui.class", "pickupInRoute"); else if (act instanceof DeliveryActivity) g.getNode(currIdentifier).addAttribute("ui.class", "deliveryInRoute"); prevIdentifier = currIdentifier; vehicle_edgeId++; sleep(renderDelay_in_ms); } if (route.getVehicle().isReturnToDepot()) { String lastIdentifier = makeId(route.getVehicle().getId(), route.getVehicle().getEndLocation().getId()); g.addEdge(makeEdgeId(routeId, vehicle_edgeId), prevIdentifier, lastIdentifier, true); } } private String makeEdgeId(int routeId, int vehicle_edgeId) { return Integer.valueOf(routeId).toString() + "." + Integer.valueOf(vehicle_edgeId).toString(); } // public void saveAsPNG(String filename){ // // } }
/* * Copyright (C) 2010-2011, Gostai S.A.S. * * This software is provided "as is" without warranty of any kind, * either expressed or implied, including but not limited to the * implied warranties of fitness for a particular purpose. * * See the LICENSE file for more information. */ package examples.urbiplay; import java.io.IOException; import java.io.*; import urbi.*; import urbi.*; public class URBIPlay { static { System.loadLibrary("urbijava"); } static public char dumpMode; static public int devCount = 0; static public BufferedInputStream in; static public String devices[]; static public UClient robot = null; private static int convertByteToInt(byte[] b) { int res = 0; for (int i = b.length - 1; i >= 0; i--) { if (i == 3) res += b[i] * Math.pow(256, i); else res += (b[i] & 0xff) * Math.pow(256, i); } return res; } private static short convertByteToShort(byte[] b) { short res = 0; for (int i = b.length - 1; i >= 0; i--) { if (i == 1) res += b[i] * Math.pow(256, i); else res += (b[i] & 0xff) * Math.pow(256, i); } return res; } private static boolean parseHeader() { byte[] buff1 = new byte[4]; byte[] buff2 = new byte[4]; try { in.read(buff1, 0, 4); String tmp1 = new String(buff1); if (tmp1.equals("URBI") == false) return false; in.read(buff2, 0, 4); devCount = convertByteToInt(buff2); devices = new String[devCount]; for (int i = 0;i < devCount; i++) { byte[] device = new byte[256]; byte[] tmp2 = new byte[2]; byte[] tmp3 = new byte[4]; String name; short id; int type; int off = 0; do { in.read(device, off, 1); off++; } while (device[off - 1] != 0); name = new String(device, 0, off - 1); devices[i] = name; in.read(tmp2, 0, 2); id = convertByteToShort(tmp2); in.read(tmp3, 0, 1); type = (int)tmp3[0]; } } catch (IOException e) { return false; } return true; } private static void play() { int timestamp; long prevtime = 0; long starttime = 0; long ttime = 0; long sleepstop = 0; long tick = 0; short id; float angle; boolean pending = false; try { while (in.available() > 0) { int nb; byte[] tmp1 = new byte[4]; byte[] tmp2 = new byte[2]; byte[] tmp3 = new byte[8]; byte[] tmp4 = new byte[2]; int tmp5; String device; in.read(tmp1, 0, 4); timestamp = convertByteToInt(tmp1); if (starttime == 0) starttime = System.currentTimeMillis() - timestamp - 1; sleepstop = timestamp + starttime; in.read(tmp2, 0, 2); id = convertByteToShort(tmp2); if (id > devCount) { System.err.println("device id " + id + " not found"); continue ; } device = devices[id]; in.read(tmp4, 0, 2); in.read(tmp3, 0, 4); tmp5 = convertByteToInt(tmp3); angle = Float.intBitsToFloat(tmp5); if (robot != null) { if (sleepstop > System.currentTimeMillis()) { if (pending == true) { robot.send("noop;"); pending = false; } try { Thread.sleep(sleepstop - System.currentTimeMillis()); } catch (InterruptedException e) { } } if (pending == false) pending = true; robot.send(device + ".val = " + angle + "&"); } else { if (dumpMode == '-') System.out.println("[" + timestamp + "] " + device + ".val = " + angle); else { if ((timestamp != prevtime) && (prevtime != 0)) System.out.println("noop;"); prevtime = timestamp; System.out.println(device + ".val = " + angle + "&"); } } if ((tick % 1000) == 0) { if (tick != 0) { float tmp = 1000000 / (System.currentTimeMillis() - ttime); System.err.println(tmp + " cps"); } ttime = System.currentTimeMillis(); } tick++; tmp1 = null; tmp2 = null; tmp3 = null; tmp4 = null; } } catch (IOException e) { System.err.println("Error while reading"); } if ((robot != null) && (pending == true)) { robot.send("noop;"); pending = false; } if ((robot == null) && (dumpMode != '-')) System.out.println("noop;"); } public static void main(String[] args) { if (args.length < 2) { System.err.println("usage: urbiplay robot file [loop] "); System.err.println("\t Pass '-' as 'robotname' to dump to stdout in human-readable format,"); System.err.println("\t or '+' to dump to stdout in urbi format."); System.exit(1); } int loop = 0; if (args.length >= 3) loop = Integer.valueOf(args[2]).intValue(); if ((args[0].equals("-") == true) || (args[0].equals("+") == true)) { robot = null; dumpMode = args[0].charAt(0); } else { robot = new UClient(args[0]); robot.send("motoron;"); } try { in = new BufferedInputStream(new FileInputStream(args[1])); } catch (FileNotFoundException e) { System.err.println("error opening file"); System.exit(2); } if (parseHeader() == false) { System.err.println("error parsing header"); System.exit(3); } play(); try { in.close(); } catch (IOException e) { } urbi.execute (); } }
package com.github.sarxos.webcam.ds.raspberrypi.codes; import java.awt.Transparency; import java.awt.color.ColorSpace; import java.awt.image.BufferedImage; import java.awt.image.ColorModel; import java.awt.image.ComponentColorModel; import java.awt.image.ComponentSampleModel; import java.awt.image.DataBuffer; import java.awt.image.DataBufferByte; import java.awt.image.Raster; import java.awt.image.SampleModel; import java.awt.image.WritableRaster; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.zip.DataFormatException; import java.util.zip.Inflater; public class PNGDecoder { public enum TextureFormat { ALPHA, LUMINANCE, RGB, RGBA, ABGR } private static final byte[] SIGNATURE = { (byte) 137, 80, 78, 71, 13, 10, 26, 10 }; private static final int IHDR = 0x49484452; private static final int PLTE = 0x504C5445; private static final int tRNS = 0x74524E53; private static final int IDAT = 0x49444154; // private static final int IEND = 0x49454E44; public static final byte COLOR_GREYSCALE = 0; public static final byte COLOR_TRUECOLOR = 2; public static final byte COLOR_INDEXED = 3; public static final byte COLOR_GREYALPHA = 4; public static final byte COLOR_TRUEALPHA = 6; // ************************default decode image settings******************** private static final int DATA_TYPE = DataBuffer.TYPE_BYTE; private static final ColorSpace COLOR_SPACE = ColorSpace.getInstance(ColorSpace.CS_sRGB); private static int[] OFFSET = new int[] { 0 }; private static final int[] BITS = { 8, 8, 8 }; private static int[] BAND_OFFSETS = new int[] { 0, 1, 2 }; // ************************************************************************ private final InputStream input; // private final CRC32 crc; private final byte[] buffer; private int chunkLength; private int chunkType; private int chunkRemaining; private int width; private int height; private int bitdepth; private int colorType; private int bytesPerPixel; private byte[] palette; private byte[] paletteA; private byte[] transPixel; private boolean validPNG = false; /** * * Creates a new instance of PNGDecoder. * * @param input * png stream * @throws IOException */ public PNGDecoder(InputStream input) throws IOException { this.input = input; // this.crc = new CRC32(); this.buffer = new byte[4096]; int read = input.read(buffer, 0, SIGNATURE.length);// just skip sign // no check if (read != SIGNATURE.length || !checkSignature(buffer)) { // throw new IOException("Not a valid PNG file"); return; } else { validPNG = true; } openChunk(IHDR); readIHDR(); closeChunk(); searchIDAT: for (;;) { openChunk(); switch (chunkType) { case IDAT: break searchIDAT; case PLTE: readPLTE(); break; case tRNS: readtRNS(); break; } closeChunk(); } if (colorType == COLOR_INDEXED && palette == null) { throw new IOException("Missing PLTE chunk"); } } public int getHeight() { return height; } public int getWidth() { return width; } public boolean hasAlpha() { return colorType == COLOR_TRUEALPHA || paletteA != null || transPixel != null; } public boolean isRGB() { return colorType == COLOR_TRUEALPHA || colorType == COLOR_TRUECOLOR || colorType == COLOR_INDEXED; } /** * read just one png file, if invalid, return null * * @return * @throws IOException */ public final BufferedImage decode() throws IOException { if (!validPNG) { return null; } ColorModel cmodel = new ComponentColorModel(COLOR_SPACE, BITS, false, false, Transparency.OPAQUE, DATA_TYPE); SampleModel smodel = new ComponentSampleModel(DATA_TYPE, width, height, 3, width * 3, BAND_OFFSETS); byte[] bytes = new byte[width * height * 3];// must new each time! byte[][] data = new byte[][] { bytes }; ByteBuffer buffer = ByteBuffer.wrap(bytes); decode(buffer, TextureFormat.RGB); DataBufferByte dbuf = new DataBufferByte(data, bytes.length, OFFSET); WritableRaster raster = Raster.createWritableRaster(smodel, dbuf, null); BufferedImage bi = new BufferedImage(cmodel, raster, false, null); bi.flush(); return bi; } /** * decode image data to buffer, mapping to awt type byte. if rgb, * r,g,b,r,g,b...,r,g,b,... if abgr, a,b,g,r.... if rgba, r,g,b,a.... * * @param buffer * @param stride * @param fmt * @throws IOException */ private final void decode(ByteBuffer buffer, TextureFormat fmt) throws IOException { byte[] curLine = new byte[width * bytesPerPixel + 1]; byte[] prevLine = new byte[width * bytesPerPixel + 1]; final Inflater inflater = new Inflater(); try { for (int y = 0; y < height; y++) { readChunkUnzip(inflater, curLine, 0, curLine.length); unfilter(curLine, prevLine); switch (colorType) { case COLOR_TRUECOLOR: switch (fmt) { case ABGR: copyRGBtoABGR(buffer, curLine); break; case RGBA: copyRGBtoRGBA(buffer, curLine); break; case RGB: copy(buffer, curLine); break; default: throw new UnsupportedOperationException("Unsupported format for this image"); } break; case COLOR_TRUEALPHA: switch (fmt) { case ABGR: copyRGBAtoABGR(buffer, curLine); break; case RGBA: copy(buffer, curLine); break; case RGB: copyRGBAtoRGB(buffer, curLine); break; default: throw new UnsupportedOperationException("Unsupported format for this image"); } break; case COLOR_GREYSCALE: switch (fmt) { case LUMINANCE: case ALPHA: copy(buffer, curLine); break; default: throw new UnsupportedOperationException("Unsupported format for this image"); } break; case COLOR_INDEXED: switch (fmt) { case ABGR: copyPALtoABGR(buffer, curLine); break; case RGBA: copyPALtoRGBA(buffer, curLine); break; default: throw new UnsupportedOperationException("Unsupported format for this image"); } break; default: throw new UnsupportedOperationException("Not yet implemented"); } byte[] tmp = curLine; curLine = prevLine; prevLine = tmp; } } finally { inflater.end(); } } private void copy(ByteBuffer buffer, byte[] curLine) { buffer.put(curLine, 1, curLine.length - 1); } private void copyRGBtoABGR(ByteBuffer buffer, byte[] curLine) { if (transPixel != null) { byte tr = transPixel[1]; byte tg = transPixel[3]; byte tb = transPixel[5]; for (int i = 1, n = curLine.length; i < n; i += 3) { byte r = curLine[i]; byte g = curLine[i + 1]; byte b = curLine[i + 2]; byte a = (byte) 0xFF; if (r == tr && g == tg && b == tb) { a = 0; } buffer.put(a).put(b).put(g).put(r); } } else { for (int i = 1, n = curLine.length; i < n; i += 3) { buffer.put((byte) 0xFF).put(curLine[i + 2]).put(curLine[i + 1]).put(curLine[i]); } } } private void copyRGBtoRGBA(ByteBuffer buffer, byte[] curLine) { if (transPixel != null) { byte tr = transPixel[1]; byte tg = transPixel[3]; byte tb = transPixel[5]; for (int i = 1, n = curLine.length; i < n; i += 3) { byte r = curLine[i]; byte g = curLine[i + 1]; byte b = curLine[i + 2]; byte a = (byte) 0xFF; if (r == tr && g == tg && b == tb) { a = 0; } buffer.put(r).put(g).put(b).put(a); } } else { for (int i = 1, n = curLine.length; i < n; i += 3) { buffer.put(curLine[i]).put(curLine[i + 1]).put(curLine[i + 2]).put((byte) 0xFF); } } } private void copyRGBAtoABGR(ByteBuffer buffer, byte[] curLine) { for (int i = 1, n = curLine.length; i < n; i += 4) { buffer.put(curLine[i + 3]).put(curLine[i + 2]).put(curLine[i + 1]).put(curLine[i]); } } private void copyRGBAtoRGB(ByteBuffer buffer, byte[] curLine) { for (int i = 1, n = curLine.length; i < n; i += 4) { buffer.put(curLine[i]).put(curLine[i + 1]).put(curLine[i + 2]); } } private void copyPALtoABGR(ByteBuffer buffer, byte[] curLine) { if (paletteA != null) { for (int i = 1, n = curLine.length; i < n; i += 1) { int idx = curLine[i] & 255; byte r = palette[idx * 3 + 0]; byte g = palette[idx * 3 + 1]; byte b = palette[idx * 3 + 2]; byte a = paletteA[idx]; buffer.put(a).put(b).put(g).put(r); } } else { for (int i = 1, n = curLine.length; i < n; i += 1) { int idx = curLine[i] & 255; byte r = palette[idx * 3 + 0]; byte g = palette[idx * 3 + 1]; byte b = palette[idx * 3 + 2]; byte a = (byte) 0xFF; buffer.put(a).put(b).put(g).put(r); } } } private void copyPALtoRGBA(ByteBuffer buffer, byte[] curLine) { if (paletteA != null) { for (int i = 1, n = curLine.length; i < n; i += 1) { int idx = curLine[i] & 255; byte r = palette[idx * 3 + 0]; byte g = palette[idx * 3 + 1]; byte b = palette[idx * 3 + 2]; byte a = paletteA[idx]; buffer.put(r).put(g).put(b).put(a); } } else { for (int i = 1, n = curLine.length; i < n; i += 1) { int idx = curLine[i] & 255; byte r = palette[idx * 3 + 0]; byte g = palette[idx * 3 + 1]; byte b = palette[idx * 3 + 2]; byte a = (byte) 0xFF; buffer.put(r).put(g).put(b).put(a); } } } private void unfilter(byte[] curLine, byte[] prevLine) throws IOException { switch (curLine[0]) { case 0: // none break; case 1: unfilterSub(curLine); break; case 2: unfilterUp(curLine, prevLine); break; case 3: unfilterAverage(curLine, prevLine); break; case 4: unfilterPaeth(curLine, prevLine); break; default: throw new IOException("invalide filter type in scanline: " + curLine[0]); } } private void unfilterSub(byte[] curLine) { final int bpp = this.bytesPerPixel; final int lineSize = width * bpp; for (int i = bpp + 1; i <= lineSize; ++i) { curLine[i] += curLine[i - bpp]; } } private void unfilterUp(byte[] curLine, byte[] prevLine) { final int bpp = this.bytesPerPixel; final int lineSize = width * bpp; for (int i = 1; i <= lineSize; ++i) { curLine[i] += prevLine[i]; } } private void unfilterAverage(byte[] curLine, byte[] prevLine) { final int bpp = this.bytesPerPixel; final int lineSize = width * bpp; int i; for (i = 1; i <= bpp; ++i) { curLine[i] += (byte) ((prevLine[i] & 0xFF) >>> 1); } for (; i <= lineSize; ++i) { curLine[i] += (byte) (((prevLine[i] & 0xFF) + (curLine[i - bpp] & 0xFF)) >>> 1); } } private void unfilterPaeth(byte[] curLine, byte[] prevLine) { final int bpp = this.bytesPerPixel; final int lineSize = width * bpp; int i; for (i = 1; i <= bpp; ++i) { curLine[i] += prevLine[i]; } for (; i <= lineSize; ++i) { int a = curLine[i - bpp] & 255; int b = prevLine[i] & 255; int c = prevLine[i - bpp] & 255; int p = a + b - c; int pa = p - a; if (pa < 0) pa = -pa; int pb = p - b; if (pb < 0) pb = -pb; int pc = p - c; if (pc < 0) pc = -pc; if (pa <= pb && pa <= pc) c = a; else if (pb <= pc) c = b; curLine[i] += (byte) c; } } private void readIHDR() throws IOException { checkChunkLength(13); readChunk(buffer, 0, 13); width = readInt(buffer, 0); height = readInt(buffer, 4); bitdepth = buffer[8] & 255; colorType = buffer[9] & 255; if (bitdepth != 8) { throw new IOException("Unsupported bit depth: " + bitdepth); } switch (colorType) { case COLOR_GREYSCALE: bytesPerPixel = 1; break; case COLOR_TRUECOLOR: bytesPerPixel = 3; break; case COLOR_TRUEALPHA: bytesPerPixel = 4; break; case COLOR_INDEXED: bytesPerPixel = 1; break; default: throw new IOException("unsupported color format"); } if (buffer[10] != 0) { throw new IOException("unsupported compression method"); } if (buffer[11] != 0) { throw new IOException("unsupported filtering method"); } if (buffer[12] != 0) { throw new IOException("unsupported interlace method"); } } private void readPLTE() throws IOException { int paletteEntries = chunkLength / 3; if (paletteEntries < 1 || paletteEntries > 256 || (chunkLength % 3) != 0) { throw new IOException("PLTE chunk has wrong length"); } palette = new byte[paletteEntries * 3]; readChunk(palette, 0, palette.length); } private void readtRNS() throws IOException { switch (colorType) { case COLOR_GREYSCALE: checkChunkLength(2); transPixel = new byte[2]; readChunk(transPixel, 0, 2); break; case COLOR_TRUECOLOR: checkChunkLength(6); transPixel = new byte[6]; readChunk(transPixel, 0, 6); break; case COLOR_INDEXED: if (palette == null) { throw new IOException("tRNS chunk without PLTE chunk"); } paletteA = new byte[palette.length / 3]; Arrays.fill(paletteA, (byte) 0xFF); readChunk(paletteA, 0, paletteA.length); break; default: // just ignore it } } private void closeChunk() throws IOException { if (chunkRemaining > 0) { // just skip the rest and the CRC skip(chunkRemaining + 4); } else { readFully(buffer, 0, 4); // read crc /* int expectedCrc = */readInt(buffer, 0); /* * int computedCrc = (int) crc.getValue(); if (computedCrc != expectedCrc) { * throw new IOException("Invalid CRC"); } */ } chunkRemaining = 0; chunkLength = 0; chunkType = 0; } private void openChunk() throws IOException { readFully(buffer, 0, 8); chunkLength = readInt(buffer, 0); chunkType = readInt(buffer, 4); chunkRemaining = chunkLength; // crc.reset(); // crc.update(buffer, 4, 4); // only chunkType } private void openChunk(int expected) throws IOException { openChunk(); if (chunkType != expected) { throw new IOException("Expected chunk: " + Integer.toHexString(expected)); } } private void checkChunkLength(int expected) throws IOException { if (chunkLength != expected) { throw new IOException("Chunk has wrong size"); } } private int readChunk(byte[] buffer, int offset, int length) throws IOException { if (length > chunkRemaining) { length = chunkRemaining; } readFully(buffer, offset, length); // crc.update(buffer, offset, length); chunkRemaining -= length; return length; } private void refillInflater(Inflater inflater) throws IOException { while (chunkRemaining == 0) { closeChunk(); openChunk(IDAT); } int read = readChunk(buffer, 0, buffer.length); inflater.setInput(buffer, 0, read); } private void readChunkUnzip(Inflater inflater, byte[] buffer, int offset, int length) throws IOException { assert (buffer != this.buffer); try { do { int read = inflater.inflate(buffer, offset, length); if (read <= 0) { if (inflater.finished()) { throw new EOFException(); } if (inflater.needsInput()) { refillInflater(inflater); } else { throw new IOException("Can't inflate " + length + " bytes"); } } else { offset += read; length -= read; } } while (length > 0); } catch (DataFormatException ex) { throw (IOException) (new IOException("inflate error").initCause(ex)); } } private void readFully(byte[] buffer, int offset, int length) throws IOException { do { int read = input.read(buffer, offset, length); if (read < 0) { throw new EOFException(); } offset += read; length -= read; } while (length > 0); } private int readInt(byte[] buffer, int offset) { return ((buffer[offset]) << 24) | ((buffer[offset + 1] & 255) << 16) | ((buffer[offset + 2] & 255) << 8) | ((buffer[offset + 3] & 255)); } private void skip(long amount) throws IOException { while (amount > 0) { long skipped = input.skip(amount); if (skipped < 0) { throw new EOFException(); } amount -= skipped; } } private static boolean checkSignature(byte[] buffer) { for (int i = 0; i < SIGNATURE.length; i++) { if (buffer[i] != SIGNATURE[i]) { return false; } } return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.chukwa.util; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; /*>>> import checkers.nullness.quals.*; import checkers.regex.quals.*; */ /** * Utility methods for regular expressions, most notably for testing whether * a string is a regular expression. */ public class RegexUtil { /** * A checked version of {@link PatternSyntaxException}. * <p> * This exception is useful when an illegal regex is detected but the * contextual information to report a helpful error message is not available * at the current depth in the call stack. By using a checked * PatternSyntaxException the error must be handled up the call stack where * a better error message can be reported. * <p> * * Typical usage is: * <pre> * void myMethod(...) throws CheckedPatternSyntaxException { * ... * if (! isRegex(myString)) { * throw new CheckedPatternSyntaxException(...); * } * ... Pattern.compile(myString) ... * </pre> * * Simply calling <tt>Pattern.compile</tt> would have a similar effect, * in that <tt>PatternSyntaxException</tt> would be thrown at run time if * <tt>myString</tt> is not a regular expression. There are two problems * with such an approach. First, a client of <tt>myMethod</tt> might * forget to handle the exception, since <tt>PatternSyntaxException</tt> * is not checked. Also, the Regex Checker would issue a warning about * the call to <tt>Pattern.compile</tt> that might throw an exception. * The above usage pattern avoids both problems. * * @see PatternSyntaxException */ public static class CheckedPatternSyntaxException extends Exception { private static final long serialVersionUID = 6266881831979001480L; private final PatternSyntaxException pse; /** * Constructs a new CheckedPatternSyntaxException equivalent to the * given {@link PatternSyntaxException}. * <p> * Consider calling this constructor with the result of * {@link RegexUtil#regexError}. * @param pse is PatternSyntaxException object */ public CheckedPatternSyntaxException(PatternSyntaxException pse) { this.pse = pse; } /** * Constructs a new CheckedPatternSyntaxException. * * @param desc A description of the error * @param regex The erroneous pattern * @param index The approximate index in the pattern of the error, * or {@code -1} if the index is not known */ public CheckedPatternSyntaxException(String desc, String regex, int index) { this(new PatternSyntaxException(desc, regex, index)); } /** * Retrieves the description of the error. * * @return The description of the error */ public String getDescription() { return pse.getDescription(); } /** * Retrieves the error index. * * @return The approximate index in the pattern of the error, or {@code -1} * if the index is not known */ public int getIndex() { return pse.getIndex(); } /** * Returns a multi-line string containing the description of the syntax * error and its index, the erroneous regular-expression pattern, and a * visual indication of the error index within the pattern. * * @return The full detail message */ public String getMessage() { return pse.getMessage(); } /** * Retrieves the erroneous regular-expression pattern. * * @return The erroneous pattern */ public String getPattern() { return pse.getPattern(); } } private RegexUtil() { throw new AssertionError("Class RegexUtil shouldn't be instantiated"); } /** * Returns true if the argument is a syntactically valid regular * expression. * @param s is regular expression * @return true if there is a match */ public static boolean isRegex(String s) { return isRegex(s, 0); } /** * Returns true if the argument is a syntactically valid regular * expression with at least the given number of groups. * @param s is regular expression * @param groups is number of groups to match * @return true if there is a match */ /*>>> @SuppressWarnings("regex") // RegexUtil */ /*@Pure*/ public static boolean isRegex(String s, int groups) { Pattern p; try { p = Pattern.compile(s); } catch (PatternSyntaxException e) { return false; } return getGroupCount(p) >= groups; } /** * Returns true if the argument is a syntactically valid regular * expression. * @param c is a character * @return true if there is a match */ public static boolean isRegex(char c) { return isRegex(Character.toString(c)); } /** * Returns null if the argument is a syntactically valid regular * expression. Otherwise returns a string describing why the argument is * not a regex. * @param s is regular expression * @return null if s is a regular expression */ public static String regexError(String s) { return regexError(s, 0); } /** * Returns null if the argument is a syntactically valid regular * expression with at least the given number of groups. Otherwise returns * a string describing why the argument is not a regex. * @param s is regular expression * @param groups is number of groups to match * @return null if s is a regular expression */ public static String regexError(String s, int groups) { try { Pattern p = Pattern.compile(s); int actualGroups = getGroupCount(p); if (actualGroups < groups) { return regexErrorMessage(s, groups, actualGroups); } } catch (PatternSyntaxException e) { return e.getMessage(); } return null; } /** * Returns null if the argument is a syntactically valid regular * expression. Otherwise returns a PatternSyntaxException describing * why the argument is not a regex. * @param s is regular expression * @return null if s is a regular expression */ public static PatternSyntaxException regexException(String s) { return regexException(s, 0); } /** * Returns null if the argument is a syntactically valid regular * expression with at least the given number of groups. Otherwise returns a * PatternSyntaxException describing why the argument is not a regex. * @param s is regular expression * @param groups is number of groups to match * @return null if s is a regular expression */ public static PatternSyntaxException regexException(String s, int groups) { try { Pattern p = Pattern.compile(s); int actualGroups = getGroupCount(p); if (actualGroups < groups) { return new PatternSyntaxException(regexErrorMessage(s, groups, actualGroups), s, -1); } } catch (PatternSyntaxException pse) { return pse; } return null; } /** * Returns the argument as a {@code @Regex String} if it is a regex, * otherwise throws an error. The purpose of this method is to suppress Regex * Checker warnings. Once the the Regex Checker supports flow-sensitivity, it * should be very rarely needed. * @param s is a regular expression * @return null if s is a regular expression */ public static String asRegex(String s) { return asRegex(s, 0); } /** * Returns the argument as a {@code @Regex(groups) String} if it is a regex * with at least the given number of groups, otherwise throws an error. The * purpose of this method is to suppress Regex Checker warnings. Once the the * Regex Checker supports flow-sensitivity, it should be very rarely needed. * @param s is a regular expression * @param groups is number of group to match * @return null if s is a regular expression */ public static String asRegex(String s, int groups) { try { Pattern p = Pattern.compile(s); int actualGroups = getGroupCount(p); if (actualGroups < groups) { throw new Error(regexErrorMessage(s, groups, actualGroups)); } return s; } catch (PatternSyntaxException e) { throw new Error(e); } } /** * Generates an error message for s when expectedGroups are needed, but s * only has actualGroups. */ private static String regexErrorMessage(String s, int expectedGroups, int actualGroups) { return "regex \"" + s + "\" has " + actualGroups + " groups, but " + expectedGroups + " groups are needed."; } /** * Returns the count of groups in the argument. */ private static int getGroupCount(Pattern p) { return p.matcher("").groupCount(); } }
/** * Copyright (c) 2012-2017 Netflix, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.msl.crypto; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Random; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; import com.netflix.msl.MslConstants.CipherSpec; import com.netflix.msl.MslCryptoException; import com.netflix.msl.MslEncodingException; import com.netflix.msl.MslError; import com.netflix.msl.crypto.MslCiphertextEnvelope.Version; import com.netflix.msl.entityauth.EntityAuthenticationScheme; import com.netflix.msl.io.MslEncoderException; import com.netflix.msl.io.MslEncoderFactory; import com.netflix.msl.io.MslEncoderFormat; import com.netflix.msl.io.MslObject; import com.netflix.msl.test.ExpectedMslException; import com.netflix.msl.util.MockMslContext; import com.netflix.msl.util.MslContext; /** * MSL encryption envelope unit tests. * * @author Wesley Miaw <wmiaw@netflix.com> */ @RunWith(Suite.class) @SuiteClasses({MslCiphertextEnvelopeSuite.Version1.class, MslCiphertextEnvelopeSuite.Version2.class}) public class MslCiphertextEnvelopeSuite { /** Key version. */ private final static String KEY_VERSION = "version"; /** Key key ID. */ private final static String KEY_KEY_ID = "keyid"; /** Key cipherspec. */ private final static String KEY_CIPHERSPEC = "cipherspec"; /** Key initialization vector. */ private final static String KEY_IV = "iv"; /** Key ciphertext. */ private final static String KEY_CIPHERTEXT = "ciphertext"; /** Key SHA-256. */ private final static String KEY_SHA256 = "sha256"; /** MSL encoder format. */ private final static MslEncoderFormat ENCODER_FORMAT = MslEncoderFormat.JSON; /** Key ID. */ private final static String KEY_ID = "keyid"; private static final byte[] IV = new byte[16]; private static final byte[] CIPHERTEXT = new byte[32]; /** MSL encoder factory. */ private static MslEncoderFactory encoder; @BeforeClass public static void setup() throws MslEncodingException, MslCryptoException { final Random random = new Random(); random.nextBytes(IV); random.nextBytes(CIPHERTEXT); final MslContext ctx = new MockMslContext(EntityAuthenticationScheme.PSK, false); encoder = ctx.getMslEncoderFactory(); } @AfterClass public static void teardown() { // Teardown causes problems because the data is shared by the inner // classes, so don't do any cleanup. } public static class Version1 { @Rule public ExpectedMslException thrown = ExpectedMslException.none(); @Test public void ctors() throws MslCryptoException, MslEncodingException, MslEncoderException { final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(KEY_ID, IV, CIPHERTEXT); assertEquals(KEY_ID, envelope.getKeyId()); assertNull(envelope.getCipherSpec()); assertArrayEquals(IV, envelope.getIv()); assertArrayEquals(CIPHERTEXT, envelope.getCiphertext()); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); assertNotNull(encode); final MslObject mo = encoder.parseObject(encode); final MslCiphertextEnvelope moEnvelope = new MslCiphertextEnvelope(mo); assertEquals(envelope.getKeyId(), moEnvelope.getKeyId()); assertEquals(envelope.getCipherSpec(), moEnvelope.getCipherSpec()); assertArrayEquals(envelope.getIv(), moEnvelope.getIv()); assertArrayEquals(envelope.getCiphertext(), moEnvelope.getCiphertext()); final byte[] moEncode = moEnvelope.toMslEncoding(encoder, ENCODER_FORMAT); assertArrayEquals(encode, moEncode); } @Test public void ctorsNullIv() throws MslEncoderException, MslCryptoException, MslEncodingException { final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(KEY_ID, null, CIPHERTEXT); assertEquals(KEY_ID, envelope.getKeyId()); assertNull(envelope.getCipherSpec()); assertNull(envelope.getIv()); assertArrayEquals(CIPHERTEXT, envelope.getCiphertext()); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); assertNotNull(encode); final MslObject mo = encoder.parseObject(encode); final MslCiphertextEnvelope moEnvelope = new MslCiphertextEnvelope(mo); assertEquals(envelope.getKeyId(), moEnvelope.getKeyId()); assertEquals(envelope.getCipherSpec(), moEnvelope.getCipherSpec()); assertArrayEquals(envelope.getIv(), moEnvelope.getIv()); assertArrayEquals(envelope.getCiphertext(), moEnvelope.getCiphertext()); final byte[] moEncode = moEnvelope.toMslEncoding(encoder, ENCODER_FORMAT); assertArrayEquals(encode, moEncode); } @Test public void encode() throws MslEncoderException, MslCryptoException, MslEncodingException { final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(KEY_ID, IV, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); assertEquals(KEY_ID, mo.getString(KEY_KEY_ID)); assertFalse(mo.has(KEY_CIPHERSPEC)); assertArrayEquals(IV, mo.getBytes(KEY_IV)); assertArrayEquals(CIPHERTEXT, mo.getBytes(KEY_CIPHERTEXT)); } @Test public void encodeNullIv() throws MslCryptoException, MslEncodingException, MslEncoderException { final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(KEY_ID, null, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); assertEquals(KEY_ID, mo.getString(KEY_KEY_ID)); assertFalse(mo.has(KEY_CIPHERSPEC)); assertFalse(mo.has(KEY_IV)); assertArrayEquals(CIPHERTEXT, mo.getBytes(KEY_CIPHERTEXT)); } @Test public void missingKeyId() throws MslEncoderException, MslCryptoException, MslEncodingException { thrown.expect(MslEncodingException.class); thrown.expectMslError(MslError.MSL_PARSE_ERROR); final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(KEY_ID, IV, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); mo.remove(KEY_KEY_ID); new MslCiphertextEnvelope(mo); } @Test public void missingCiphertext() throws MslEncoderException, MslCryptoException, MslEncodingException { thrown.expect(MslEncodingException.class); thrown.expectMslError(MslError.MSL_PARSE_ERROR); final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(KEY_ID, IV, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); mo.remove(KEY_CIPHERTEXT); new MslCiphertextEnvelope(mo); } @Test public void missingSha256() throws MslEncoderException, MslCryptoException, MslEncodingException { thrown.expect(MslEncodingException.class); thrown.expectMslError(MslError.MSL_PARSE_ERROR); final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(KEY_ID, IV, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); mo.remove(KEY_SHA256); new MslCiphertextEnvelope(mo); } @Test public void incorrectSha256() throws MslEncoderException, MslCryptoException, MslEncodingException { final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(KEY_ID, IV, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); final byte[] hash = mo.getBytes(KEY_SHA256); assertNotNull(hash); hash[0] += 1; mo.put(KEY_SHA256, hash); final MslCiphertextEnvelope moEnvelope = new MslCiphertextEnvelope(mo); assertEquals(KEY_ID, moEnvelope.getKeyId()); assertNull(moEnvelope.getCipherSpec()); assertArrayEquals(IV, moEnvelope.getIv()); assertArrayEquals(CIPHERTEXT, moEnvelope.getCiphertext()); } } @RunWith(Parameterized.class) public static class Version2 { @Rule public ExpectedMslException thrown = ExpectedMslException.none(); @Parameters public static Collection<Object[]> data() { final List<Object[]> params = new ArrayList<Object[]>(); for (final CipherSpec spec : CipherSpec.values()) params.add(new Object[] { spec }); return params; } /** Cipher specification. */ private final CipherSpec cipherSpec; /** * Create a new Version 2 test set with the provided cipher * specification. * * @param cipherSpec the cipher specification. */ public Version2(final CipherSpec cipherSpec) { this.cipherSpec = cipherSpec; } @Test public void ctors() throws MslCryptoException, MslEncodingException, MslEncoderException { final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(cipherSpec, IV, CIPHERTEXT); assertNull(envelope.getKeyId()); assertEquals(cipherSpec, envelope.getCipherSpec()); assertArrayEquals(IV, envelope.getIv()); assertArrayEquals(CIPHERTEXT, envelope.getCiphertext()); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); assertNotNull(encode); final MslObject mo = encoder.parseObject(encode); final MslCiphertextEnvelope moEnvelope = new MslCiphertextEnvelope(mo); assertEquals(envelope.getKeyId(), moEnvelope.getKeyId()); assertEquals(envelope.getCipherSpec(), moEnvelope.getCipherSpec()); assertArrayEquals(envelope.getIv(), moEnvelope.getIv()); assertArrayEquals(envelope.getCiphertext(), moEnvelope.getCiphertext()); final byte[] moEncode = moEnvelope.toMslEncoding(encoder, ENCODER_FORMAT); assertArrayEquals(encode, moEncode); } @Test public void ctorsNullIv() throws MslEncoderException, MslCryptoException, MslEncodingException { final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(cipherSpec, null, CIPHERTEXT); assertNull(envelope.getKeyId()); assertEquals(cipherSpec, envelope.getCipherSpec()); assertNull(envelope.getIv()); assertArrayEquals(CIPHERTEXT, envelope.getCiphertext()); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); assertNotNull(encode); final MslObject mo = encoder.parseObject(encode); final MslCiphertextEnvelope moEnvelope = new MslCiphertextEnvelope(mo); assertEquals(envelope.getKeyId(), moEnvelope.getKeyId()); assertEquals(envelope.getCipherSpec(), moEnvelope.getCipherSpec()); assertArrayEquals(envelope.getIv(), moEnvelope.getIv()); assertArrayEquals(envelope.getCiphertext(), moEnvelope.getCiphertext()); final byte[] moEncode = moEnvelope.toMslEncoding(encoder, ENCODER_FORMAT); assertArrayEquals(encode, moEncode); } @Test public void encode() throws MslEncoderException, MslCryptoException, MslEncodingException { final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(cipherSpec, IV, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); assertEquals(Version.V2.intValue(), mo.getInt(KEY_VERSION)); assertFalse(mo.has(KEY_KEY_ID)); assertEquals(cipherSpec.toString(), mo.getString(KEY_CIPHERSPEC)); assertArrayEquals(IV, mo.getBytes(KEY_IV)); assertArrayEquals(CIPHERTEXT, mo.getBytes(KEY_CIPHERTEXT)); } @Test public void encodeNullIv() throws MslCryptoException, MslEncodingException, MslEncoderException { final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(cipherSpec, null, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); assertEquals(Version.V2.intValue(), mo.getInt(KEY_VERSION)); assertFalse(mo.has(KEY_KEY_ID)); assertEquals(cipherSpec.toString(), mo.getString(KEY_CIPHERSPEC)); assertFalse(mo.has(KEY_IV)); assertArrayEquals(CIPHERTEXT, mo.getBytes(KEY_CIPHERTEXT)); } @Test public void misingVersion() throws MslEncoderException, MslCryptoException, MslEncodingException { thrown.expect(MslEncodingException.class); thrown.expectMslError(MslError.MSL_PARSE_ERROR); final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(cipherSpec, IV, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); mo.remove(KEY_VERSION); new MslCiphertextEnvelope(mo); } @Test public void invalidVersion() throws MslEncoderException, MslCryptoException, MslEncodingException { thrown.expect(MslEncodingException.class); thrown.expectMslError(MslError.MSL_PARSE_ERROR); final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(cipherSpec, IV, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); mo.put(KEY_VERSION, "x"); new MslCiphertextEnvelope(mo); } @Test public void unknownVersion() throws MslEncoderException, MslCryptoException, MslEncodingException { thrown.expect(MslCryptoException.class); thrown.expectMslError(MslError.UNIDENTIFIED_CIPHERTEXT_ENVELOPE); final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(cipherSpec, IV, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); mo.put(KEY_VERSION, -1); new MslCiphertextEnvelope(mo); } @Test public void missingCipherSpec() throws MslEncoderException, MslCryptoException, MslEncodingException { thrown.expect(MslEncodingException.class); thrown.expectMslError(MslError.MSL_PARSE_ERROR); final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(cipherSpec, IV, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); mo.remove(KEY_CIPHERSPEC); new MslCiphertextEnvelope(mo); } @Test public void invalidCipherSpec() throws MslEncoderException, MslCryptoException, MslEncodingException { thrown.expect(MslCryptoException.class); thrown.expectMslError(MslError.UNIDENTIFIED_CIPHERSPEC); final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(cipherSpec, IV, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); mo.put(KEY_CIPHERSPEC, "x"); new MslCiphertextEnvelope(mo); } @Test public void missingCiphertext() throws MslEncoderException, MslCryptoException, MslEncodingException { thrown.expect(MslEncodingException.class); thrown.expectMslError(MslError.MSL_PARSE_ERROR); final MslCiphertextEnvelope envelope = new MslCiphertextEnvelope(cipherSpec, IV, CIPHERTEXT); final byte[] encode = envelope.toMslEncoding(encoder, ENCODER_FORMAT); final MslObject mo = encoder.parseObject(encode); mo.remove(KEY_CIPHERTEXT); new MslCiphertextEnvelope(mo); } } }
package com.j256.simplecsv.processor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import com.j256.simplecsv.common.CsvColumn; import com.j256.simplecsv.common.CsvField; import com.j256.simplecsv.converter.Converter; import com.j256.simplecsv.converter.ConverterUtils; import com.j256.simplecsv.converter.VoidConverter; /** * Information about a particular CSV column used internally to keep track of the CSV columns. * * @param <T> * The type of the column whose information we are storing in here. * @author graywatson */ @SuppressWarnings("deprecation") public class ColumnInfo<T> { private final String fieldName; private final Class<T> type; // may be null private final Field field; // may be null private final Method getMethod; // may be null private final Method setMethod; private final Converter<T, ?> converter; private final Object configInfo; private final String columnName; private int position; private final boolean mustNotBeBlank; private final boolean trimInput; private final boolean needsQuotes; private final String defaultValue; private final boolean mustBeSupplied; private final String afterColumn; private ColumnInfo(String fieldName, Class<T> type, Field field, Method getMethod, Method setMethod, Converter<T, ?> converter, String format, long converterFlags, String columnName, boolean mustNotBeBlank, boolean trimInput, String defaultValue, boolean mustBeSupplied, String afterColumn) { this.fieldName = fieldName; this.type = type; this.field = field; this.getMethod = getMethod; this.setMethod = setMethod; this.converter = converter; this.columnName = columnName; this.mustNotBeBlank = mustNotBeBlank; this.trimInput = trimInput; this.defaultValue = defaultValue; this.mustBeSupplied = mustBeSupplied; this.afterColumn = afterColumn; // now that we have setup this class we can call configure the converter to get our config-info this.configInfo = converter.configure(format, converterFlags, this); @SuppressWarnings("unchecked") Converter<Object, Object> castConverter = (Converter<Object, Object>) converter; this.needsQuotes = castConverter.isNeedsQuotes(configInfo); } /** * Get the value associated with this field from the object parameter either by getting from the field or calling * the get method. */ public T getValue(Object obj) throws IllegalAccessException, InvocationTargetException { if (field == null) { @SuppressWarnings("unchecked") T cast = (T) getMethod.invoke(obj); return cast; } else { @SuppressWarnings("unchecked") T cast = (T) field.get(obj); return cast; } } /** * Set the value associated with this field from the object parameter either by setting via the field or calling the * set method. */ public void setValue(Object obj, T value) throws IllegalAccessException, InvocationTargetException { if (field == null) { setMethod.invoke(obj, value); } else { field.set(obj, value); } } /** * Name of the java field or the get/set methods. */ public String getFieldName() { return fieldName; } public Class<T> getType() { return type; } /** * Associated reflection field or null if using get/set method. */ public Field getField() { return field; } /** * Associated reflection get/is method or null if using field. */ public Method getGetMethod() { return getMethod; } /** * Associated reflection set method or null if using field. */ public Method getSetMethod() { return setMethod; } /** * Returns the converter class associated with the column. */ public Converter<T, ?> getConverter() { return converter; } /** * Returns the configuration information associated with the column, if any. */ public Object getConfigInfo() { return configInfo; } /** * Returns the header name for this column. * * @see CsvColumn#columnName() */ public String getColumnName() { return columnName; } /** * Returns the position the column appears in the file. */ public int getPosition() { return position; } /** * Set the position the column appears in the file. */ public void setPosition(int position) { this.position = position; } /** * Returns whether this column is required. * * @see CsvColumn#mustNotBeBlank() */ public boolean isMustNotBeBlank() { return mustNotBeBlank; } /** * Returns whether this column should be trimmed when read. * * @see CsvColumn#trimInput() */ public boolean isTrimInput() { return trimInput; } /** * Returns whether this column should be surrounded by quotes or not. */ public boolean isNeedsQuotes() { return needsQuotes; } /** * Returns the default string for the column or null if none. * * @see CsvColumn#defaultValue() */ public String getDefaultValue() { return defaultValue; } /** * Returns whether the column is optional or not. * * @see CsvColumn#mustBeSupplied() */ public boolean isMustBeSupplied() { return mustBeSupplied; } /** * Column name that we come after to have the order not be field or method position based. * * @see CsvColumn#afterColumn() */ public String getAfterColumn() { return afterColumn; } /** * Make a column-info instance from a Java Field. */ public static <T> ColumnInfo<T> fromAnnotation(CsvColumn csvColumn, String fieldName, Class<T> type, Field field, Method getMethod, Method setMethod, Converter<T, ?> converter) { return fromAnnoation(csvColumn.converterClass(), csvColumn.format(), csvColumn.converterFlags(), csvColumn.columnName(), csvColumn.defaultValue(), csvColumn.afterColumn(), csvColumn.mustNotBeBlank(), csvColumn.mustBeSupplied(), csvColumn.trimInput(), fieldName, type, field, getMethod, setMethod, converter); } /** * Make a column-info instance from a Java Field. */ public static <T> ColumnInfo<T> fromAnnotation(CsvField csvField, String fieldName, Class<T> type, Field field, Method getMethod, Method setMethod, Converter<T, ?> converter) { return fromAnnoation(csvField.converterClass(), csvField.format(), csvField.converterFlags(), csvField.columnName(), csvField.defaultValue(), null, csvField.mustNotBeBlank(), csvField.mustBeSupplied(), csvField.trimInput(), fieldName, type, field, getMethod, setMethod, converter); } private static <T> ColumnInfo<T> fromAnnoation(Class<? extends Converter<?, ?>> converterClass, String format, long converterFlags, String columnName, String defaultValue, String afterColumn, boolean mustNotBeBlank, boolean mustBeSupplied, boolean trimInput, String fieldName, Class<T> type, Field field, Method getMethod, Method setMethod, Converter<T, ?> converter) { if (converterClass == VoidConverter.class) { if (converter == null) { throw new IllegalArgumentException("No converter available for type: " + type); } else { // use the passed in one } } else { @SuppressWarnings("unchecked") Converter<T, Object> castConverter = (Converter<T, Object>) ConverterUtils.constructConverter(converterClass); converter = castConverter; } if (format != null && format.equals(CsvColumn.DEFAULT_VALUE)) { format = null; } if (columnName != null && columnName.equals(CsvColumn.DEFAULT_VALUE)) { columnName = fieldName; } if (defaultValue != null && defaultValue.equals(CsvColumn.DEFAULT_VALUE)) { defaultValue = null; } if (afterColumn != null && afterColumn.equals(CsvColumn.DEFAULT_VALUE)) { afterColumn = null; } return new ColumnInfo<T>(fieldName, type, field, getMethod, setMethod, converter, format, converterFlags, columnName, mustNotBeBlank, trimInput, defaultValue, mustBeSupplied, afterColumn); } /** * For testing purposes. */ public static <T> ColumnInfo<T> forTests(Converter<T, ?> converter, Class<?> type, String format, long converterFlags) { @SuppressWarnings("unchecked") Class<T> castType = (Class<T>) type; return new ColumnInfo<T>("name", castType, null, null, null, converter, format, converterFlags, "name", false, false, null, false, null); } @Override public String toString() { return "ColumnInfo [name=" + fieldName + ", type=" + type + "]"; } }
/* * Copyright (c) 2021, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.feature.detect.intensity.impl; import boofcv.struct.image.GrayF32; import boofcv.struct.image.GrayS16; import boofcv.struct.image.GrayS32; import pabeles.concurrency.GrowArray; import javax.annotation.Generated; //CONCURRENT_INLINE import boofcv.concurrency.BoofConcurrency; /** * <p> * Implementation of {@link ImplSsdCornerBase} for {@link GrayS16}. * </p> * * * <p>DO NOT MODIFY. Automatically generated code created by GenerateImplSsdCorner</p> * * @author Peter Abeles */ @Generated("boofcv.alg.feature.detect.intensity.impl.GenerateImplSsdCorner") public class ImplSsdCorner_S16 extends ImplSsdCornerBox<GrayS16, GrayS32> { private GrowArray<WorkSpace> workspaces = new GrowArray<>(() -> new WorkSpace(0)); private CornerIntensity_S32 intensity; public ImplSsdCorner_S16( int windowRadius, CornerIntensity_S32 intensity ) { super(windowRadius, GrayS16.class, GrayS32.class); this.intensity = intensity; } @Override protected void setImageShape( int imageWidth, int imageHeight ) { super.setImageShape(imageWidth, imageHeight); workspaces = new GrowArray<>(() -> new WorkSpace(imageWidth)); } /** * Compute the derivative sum along the x-axis while taking advantage of duplicate * calculations for each window. */ @Override protected void horizontal() { short[] dataX = derivX.data; short[] dataY = derivY.data; int[] hXX = horizXX.data; int[] hXY = horizXY.data; int[] hYY = horizYY.data; final int imgHeight = derivX.getHeight(); final int imgWidth = derivX.getWidth(); int windowWidth = radius*2 + 1; int radp1 = radius + 1; //CONCURRENT_BELOW BoofConcurrency.loopFor(0,imgHeight,row->{ for (int row = 0; row < imgHeight; row++) { int pix = row*imgWidth; int end = pix + windowWidth; int totalXX = 0; int totalXY = 0; int totalYY = 0; int indexX = derivX.startIndex + row*derivX.stride; int indexY = derivY.startIndex + row*derivY.stride; for (; pix < end; pix++) { short dx = dataX[indexX++]; short dy = dataY[indexY++]; totalXX += dx*dx; totalXY += dx*dy; totalYY += dy*dy; } hXX[pix - radp1] = totalXX; hXY[pix - radp1] = totalXY; hYY[pix - radp1] = totalYY; end = row*imgWidth + imgWidth; for (; pix < end; pix++, indexX++, indexY++) { short dx = dataX[indexX - windowWidth]; short dy = dataY[indexY - windowWidth]; // saving these multiplications in an array to avoid recalculating them made // the algorithm about 50% slower totalXX -= dx*dx; totalXY -= dx*dy; totalYY -= dy*dy; dx = dataX[indexX]; dy = dataY[indexY]; totalXX += dx*dx; totalXY += dx*dy; totalYY += dy*dy; hXX[pix - radius] = totalXX; hXY[pix - radius] = totalXY; hYY[pix - radius] = totalYY; } } //CONCURRENT_ABOVE }); } /** * Compute the derivative sum along the y-axis while taking advantage of duplicate * calculations for each window and avoiding cache misses. Then compute the eigen values */ @Override protected void vertical( GrayF32 intensity ) { int[] hXX = horizXX.data; int[] hXY = horizXY.data; int[] hYY = horizYY.data; final float[] inten = intensity.data; final int imgHeight = horizXX.getHeight(); final int imgWidth = horizXX.getWidth(); final int kernelWidth = radius*2 + 1; final int startX = radius; final int endX = imgWidth - radius; final int backStep = kernelWidth*imgWidth; WorkSpace work = workspaces.grow(); //CONCURRENT_REMOVE_LINE //CONCURRENT_BELOW BoofConcurrency.loopBlocks(radius,imgHeight-radius,workspaces,(work,y0,y1)->{ int y0 = radius, y1 = imgHeight - radius; final int[] tempXX = work.xx; final int[] tempXY = work.yy; final int[] tempYY = work.zz; for (int x = startX; x < endX; x++) { // defines the A matrix, from which the eigenvalues are computed int srcIndex = x + (y0 - radius)*imgWidth; int destIndex = imgWidth*y0 + x; int totalXX = 0, totalXY = 0, totalYY = 0; int indexEnd = srcIndex + imgWidth*kernelWidth; for (; srcIndex < indexEnd; srcIndex += imgWidth) { totalXX += hXX[srcIndex]; totalXY += hXY[srcIndex]; totalYY += hYY[srcIndex]; } tempXX[x] = totalXX; tempXY[x] = totalXY; tempYY[x] = totalYY; // compute the eigen values inten[destIndex] = this.intensity.compute(totalXX, totalXY, totalYY); destIndex += imgWidth; } // change the order it is processed in to reduce cache misses for (int y = y0 + 1; y < y1; y++) { int srcIndex = (y + radius)*imgWidth + startX; int destIndex = y*imgWidth + startX; for (int x = startX; x < endX; x++, srcIndex++, destIndex++) { int totalXX = tempXX[x] - hXX[srcIndex - backStep]; tempXX[x] = totalXX += hXX[srcIndex]; int totalXY = tempXY[x] - hXY[srcIndex - backStep]; tempXY[x] = totalXY += hXY[srcIndex]; int totalYY = tempYY[x] - hYY[srcIndex - backStep]; tempYY[x] = totalYY += hYY[srcIndex]; inten[destIndex] = this.intensity.compute(totalXX, totalXY, totalYY); } } //CONCURRENT_INLINE }); } private static class WorkSpace { public final int[] xx; public final int[] yy; public final int[] zz; public WorkSpace( int size ) { xx = new int[size]; yy = new int[size]; zz = new int[size]; } } }
/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.auraframework.impl; import java.lang.ref.WeakReference; import java.util.Collection; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; import org.apache.log4j.Logger; import org.auraframework.Aura; import org.auraframework.builder.CacheBuilder; import org.auraframework.cache.Cache; import org.auraframework.def.ApplicationDef; import org.auraframework.def.ComponentDef; import org.auraframework.def.DefDescriptor; import org.auraframework.def.Definition; import org.auraframework.ds.serviceloader.AuraServiceProvider; import org.auraframework.impl.cache.CacheImpl; import org.auraframework.service.CachingService; import org.auraframework.service.DefinitionService; import org.auraframework.system.DependencyEntry; import org.auraframework.system.SourceListener; import com.google.common.base.Optional; import aQute.bnd.annotation.component.Component; @Component (provide=AuraServiceProvider.class) public class CachingServiceImpl implements CachingService { private static final long serialVersionUID = -3311707270226573084L; /** Default size of definition caches, in number of entries */ private final static int DEFINITION_CACHE_SIZE = 6 * 1024; /** Default size of dependency caches, in number of entries */ private final static int DEPENDENCY_CACHE_SIZE = 1024; /** Default size of descriptor filter caches, in number of entries */ private final static int FILTER_CACHE_SIZE = 2048; /** Default size of string caches, in number of entries */ private final static int STRING_CACHE_SIZE = 100; /** Default size of client lib caches, in number of entries */ private final static int CLIENT_LIB_CACHE_SIZE = 30; private final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(); private final WriteLock wLock = rwLock.writeLock(); @Override public <K, T> CacheBuilder<K, T> getCacheBuilder() { return new CacheImpl.Builder<>(); } private final Cache<DefDescriptor<?>, Boolean> existsCache; private final Cache<DefDescriptor<?>, Optional<? extends Definition>> defsCache; private final Cache<String, String> stringsCache; private final Cache<String, Set<DefDescriptor<?>>> descriptorFilterCache; private final Cache<String, DependencyEntry> depsCache; private final Cache<String, String> clientLibraryOutputCache; private final Cache<String, Set<String>> clientLibraryUrlsCache; private final Cache<DefDescriptor.DescriptorKey, DefDescriptor<? extends Definition>> defDescriptorByNameCache; private static final Logger logger = Logger.getLogger(CachingServiceImpl.class); public CachingServiceImpl() { int size = getCacheSize("aura.cache.existsCacheSize", DEFINITION_CACHE_SIZE); existsCache = this.<DefDescriptor<?>, Boolean> getCacheBuilder() .setInitialSize(size) .setMaximumSize(size) .setRecordStats(true) .setName("existsCache") .setSoftValues(true).build(); size = getCacheSize("aura.cache.defsCacheSize", DEFINITION_CACHE_SIZE); defsCache = this .<DefDescriptor<?>, Optional<? extends Definition>> getCacheBuilder() .setInitialSize(size) .setMaximumSize(size) .setRecordStats(true) .setName("defsCache") .setSoftValues(true).build(); size = getCacheSize("aura.cache.stringsCacheSize", STRING_CACHE_SIZE); stringsCache = this.<String, String> getCacheBuilder() .setInitialSize(size) .setMaximumSize(size) .setRecordStats(true) .setName("stringsCache") .setSoftValues(true).build(); size = getCacheSize("aura.cache.filterCacheSize", FILTER_CACHE_SIZE); descriptorFilterCache = this .<String, Set<DefDescriptor<?>>> getCacheBuilder() .setInitialSize(size) .setMaximumSize(size) .setRecordStats(true) .setName("descriptorFilterCache") .setSoftValues(true).build(); size = getCacheSize("aura.cache.depsCacheSize", DEPENDENCY_CACHE_SIZE); depsCache = this.<String, DependencyEntry> getCacheBuilder() .setInitialSize(size) .setMaximumSize(size) .setRecordStats(true) .setName("depsCache") .setSoftValues(true).build(); size = getCacheSize("aura.cache.clientLibraryOutputCacheSize", CLIENT_LIB_CACHE_SIZE); clientLibraryOutputCache = this.<String, String> getCacheBuilder() .setInitialSize(size) .setMaximumSize(size) .setSoftValues(true) .setName("clientLibraryOutputCache") .setRecordStats(true).build(); size = getCacheSize("aura.cache.clientLibraryUrlsCacheSize", CLIENT_LIB_CACHE_SIZE); clientLibraryUrlsCache = this.<String, Set<String>> getCacheBuilder() .setInitialSize(size) .setMaximumSize(size) .setSoftValues(true) .setName("clientLibraryUrlsCache") .setRecordStats(true).build(); size = getCacheSize("aura.cache.defDescByNameCacheSize", 1024 * 20); defDescriptorByNameCache = this.<DefDescriptor.DescriptorKey, DefDescriptor<? extends Definition>> getCacheBuilder() .setInitialSize(512) .setMaximumSize(size) .setConcurrencyLevel(20) .setName("defDescByNameCache") .build(); } @Override public final Cache<DefDescriptor<?>, Boolean> getExistsCache() { return existsCache; } @Override public final Cache<DefDescriptor<?>, Optional<? extends Definition>> getDefsCache() { return defsCache; } @Override public final Cache<String, String> getStringsCache() { return stringsCache; } @Override public final Cache<String, Set<DefDescriptor<?>>> getDescriptorFilterCache() { return descriptorFilterCache; } @Override public final Cache<String, DependencyEntry> getDepsCache() { return depsCache; } @Override public final Cache<String, String> getClientLibraryOutputCache() { return clientLibraryOutputCache; } @Override public final Cache<String, Set<String>> getClientLibraryUrlsCache() { return clientLibraryUrlsCache; } @Override public final Cache<DefDescriptor.DescriptorKey, DefDescriptor<? extends Definition>> getDefDescriptorByNameCache() { return defDescriptorByNameCache; } @Override public Lock getReadLock() { return rwLock.readLock(); } @Override public Lock getWriteLock() { return rwLock.writeLock(); } /** * The driver for cache-consistency management in response to source changes. MDR drives the process, will notify * all registered listeners while write blocking, then invalidate it's own caches. If this routine can't acquire the * lock , it will log it as an non-fatal error, as it only results in staleness. * * @param listeners - collections of listeners to notify of source changes * @param source - DefDescriptor that changed - for granular cache clear (currently not considered here, but other * listeners may make use of it) * @param event - what type of event triggered the change */ @Override public void notifyDependentSourceChange( Collection<WeakReference<SourceListener>> listeners, DefDescriptor<?> source, SourceListener.SourceMonitorEvent event, String filePath) { boolean haveLock = false; try { // We have now eliminated all known deadlocks, but for production // safety, we never want to block forever haveLock = wLock.tryLock(5, TimeUnit.SECONDS); // If this occurs, we have a new deadlock. But it only means // temporary cache staleness, so it is not fatal if (!haveLock) { logger.error("Couldn't acquire cache clear lock in a reasonable time. Cache may be stale until next clear."); return; } // successfully acquired the lock, start clearing caches // notify provided listeners, presumably to clear caches for (WeakReference<SourceListener> i : listeners) { SourceListener sl = i.get(); if (sl != null) { sl.onSourceChanged(source, event, filePath); } } // lastly, clear MDR's static caches invalidateSourceRelatedCaches(source); } catch (InterruptedException e) { } finally { if (haveLock) { wLock.unlock(); } } } private void invalidateSourceRelatedCaches(DefDescriptor<?> descriptor) { depsCache.invalidateAll(); descriptorFilterCache.invalidateAll(); stringsCache.invalidateAll(); if (descriptor == null) { defsCache.invalidateAll(); existsCache.invalidateAll(); } else { DefinitionService ds = Aura.getDefinitionService(); DefDescriptor<ComponentDef> cdesc = ds.getDefDescriptor(descriptor, "markup", ComponentDef.class); DefDescriptor<ApplicationDef> adesc = ds.getDefDescriptor( descriptor, "markup", ApplicationDef.class); defsCache.invalidate(descriptor); existsCache.invalidate(descriptor); defsCache.invalidate(cdesc); existsCache.invalidate(cdesc); defsCache.invalidate(adesc); existsCache.invalidate(adesc); switch (descriptor.getDefType()) { case NAMESPACE: // invalidate all DDs with the same namespace if its a namespace DD invalidateScope(descriptor, true, false); break; case LAYOUTS: invalidateScope(descriptor, true, true); break; case INCLUDE: invalidateSourceRelatedCaches(descriptor.getBundle()); break; default: } } } private void invalidateScope(DefDescriptor<?> descriptor, boolean clearNamespace, boolean clearName) { final Set<DefDescriptor<?>> defsKeySet = defsCache.getKeySet(); final String namespace = descriptor.getNamespace(); final String name = descriptor.getName(); for (DefDescriptor<?> dd : defsKeySet) { boolean sameNamespace = namespace.equals(dd.getNamespace()); boolean sameName = name.equals(dd.getName()); boolean shouldClear = (clearNamespace && clearName) ? (clearNamespace && sameNamespace) && (clearName && sameName) : (clearNamespace && sameNamespace) || (clearName && sameName); if (shouldClear) { defsCache.invalidate(dd); existsCache.invalidate(dd); } } } /** * Computes a size for a given cache. The defaults can be overridden * with system properties. */ private int getCacheSize(String propName, int defaultSize) { String prop = System.getProperty(propName); if (prop == null) { prop = System.getProperty("aura.cache.defaultCacheSize"); } if (prop != null && !prop.isEmpty()) { try { return Integer.parseInt(prop); } catch (NumberFormatException e) { // ne'ermind, use the default } } return defaultSize; } }
/* * Copyright (c) 2000-2003 Yale University. All rights reserved. * * THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ARE EXPRESSLY * DISCLAIMED. IN NO EVENT SHALL YALE UNIVERSITY OR ITS EMPLOYEES BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED, THE COSTS OF * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED IN ADVANCE OF THE POSSIBILITY OF SUCH * DAMAGE. * * Redistribution and use of this software in source or binary forms, * with or without modification, are permitted, provided that the * following conditions are met: * * 1. Any redistribution must include the above copyright notice and * disclaimer and this list of conditions in any related documentation * and, if feasible, in the redistributed software. * * 2. Any redistribution must include the acknowledgment, "This product * includes software developed by Yale University," in any related * documentation and, if feasible, in the redistributed software. * * 3. The names "Yale" and "Yale University" must not be used to endorse * or promote products derived from this software. */ package edu.yale.its.tp.cas.client.taglib; import java.io.*; import java.util.*; import javax.servlet.*; import javax.servlet.http.*; import javax.servlet.jsp.*; import javax.servlet.jsp.tagext.*; import edu.yale.its.tp.cas.client.*; import javax.xml.parsers.ParserConfigurationException; import org.xml.sax.SAXException; /** * <p>Authentication tag for use with the Yale Central Authentication * Service.</p> * * <p>Typical usage involves placing the tag at the top of the page. * The tag checks to determine if the attribute referenced by id/scope * exists; if it does, the tag has no runtime effect. If the attribute * does not exist, however, a CAS authentication is necessary: * if no ticket is present, we redirect to CAS, and if a ticket is * present, we validate it. Upon successful CAS authentication (either * by a pre-existing attribute or through CAS directly), we store the * NetID in the attribute referenced by id/scope.</p> * * @author Shawn Bayern * @author Drew Mazurek */ public class AuthTag extends TagSupport { //********************************************************************* // Internal state private String var; // tag attribute private int scope; // tag attribute private String casLogin, casValidate, service; // from children private List acceptedProxies; // from children private HttpServletRequest request; private HttpServletResponse response; //********************************************************************* // Tag logic public int doStartTag() throws JspException { // retrieve and save the request and response objects request = (HttpServletRequest) pageContext.getRequest(); response = (HttpServletResponse) pageContext.getResponse(); // reset invocation-specific state casLogin = null; casValidate = null; try { service = Util.getService( request, (String) pageContext.getServletContext().getInitParameter( "edu.yale.its.tp.cas.serverName")); } catch (ServletException ex) { throw new JspException(ex); } acceptedProxies = new ArrayList(); return EVAL_BODY_INCLUDE; } public int doEndTag() throws JspTagException { try { // if our attribute's already present, don't do anything if (pageContext.getAttribute(var, scope) != null) return EVAL_PAGE; // otherwise, we need to authenticate via CAS String ticket = request.getParameter("ticket"); // no ticket? redirect... if (ticket == null || ticket.equals("")) { if (casLogin == null) throw new JspTagException( "for pages that expect to be called without 'ticket' parameter, " + "cas:auth must have a cas:loginUrl subtag"); response.sendRedirect(casLogin + "?service=" + service); return SKIP_PAGE; } // Yay, ticket! Validate it. String netid = getAuthenticatedNetid(ticket); if (netid == null) throw new JspTagException("Unexpected CAS authentication error"); // Store the authenticate user in the id/scope attribute pageContext.setAttribute(var, netid, scope); return EVAL_PAGE; } catch (IOException ex) { throw new JspTagException(ex.getMessage()); } catch (SAXException ex) { throw new JspTagException(ex.getMessage()); } catch (ParserConfigurationException ex) { throw new JspTagException(ex.getMessage()); } } //********************************************************************* // Attribute accessors public void setVar(String var) { this.var = var; } public void setScope(String scope) { if (scope.equals("page")) this.scope = PageContext.PAGE_SCOPE; else if (scope.equals("request")) this.scope = PageContext.REQUEST_SCOPE; else if (scope.equals("session")) this.scope = PageContext.SESSION_SCOPE; else if (scope.equals("application")) this.scope = PageContext.APPLICATION_SCOPE; else throw new IllegalArgumentException("invalid scope"); } //********************************************************************* // Accessors for child tags public void setCasLogin(String url) { casLogin = url; } public void setCasValidate(String url) { casValidate = url; } public void addAuthorizedProxy(String proxyId) { acceptedProxies.add(proxyId); } public void setService(String service) { this.service = service; } //********************************************************************* // Constructor and lifecycle management public AuthTag() { super(); init(); } // Releases any resources we may have (or inherit) public void release() { super.release(); init(); } // clears any internal state we might have private void init() { var = null; scope = PageContext.PAGE_SCOPE; casLogin = null; casValidate = null; acceptedProxies = null; } //********************************************************************* // Utility methods private String getAuthenticatedNetid(String ticket) throws ParserConfigurationException, SAXException, IOException, JspTagException { ProxyTicketValidator pv = new ProxyTicketValidator(); pv.setCasValidateUrl(casValidate); pv.setServiceTicket(ticket); pv.setService(service); pv.validate(); if (!pv.isAuthenticationSuccesful()) throw new JspTagException( "CAS authentication error: " + pv.getErrorCode()); if (pv.getProxyList().size() != 0) { // ticket was proxied if (acceptedProxies.size() == 0) throw new JspTagException("this page does not accept proxied tickets"); else if (!acceptedProxies.contains(pv.getProxyList().get(0))) throw new JspTagException( "unauthorized top-level proxy: '" + pv.getProxyList().get(0) + "'"); } return pv.getUser(); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.xray.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetSamplingRules" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GetSamplingRulesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * Rule definitions and metadata. * </p> */ private java.util.List<SamplingRuleRecord> samplingRuleRecords; /** * <p> * Pagination token. * </p> */ private String nextToken; /** * <p> * Rule definitions and metadata. * </p> * * @return Rule definitions and metadata. */ public java.util.List<SamplingRuleRecord> getSamplingRuleRecords() { return samplingRuleRecords; } /** * <p> * Rule definitions and metadata. * </p> * * @param samplingRuleRecords * Rule definitions and metadata. */ public void setSamplingRuleRecords(java.util.Collection<SamplingRuleRecord> samplingRuleRecords) { if (samplingRuleRecords == null) { this.samplingRuleRecords = null; return; } this.samplingRuleRecords = new java.util.ArrayList<SamplingRuleRecord>(samplingRuleRecords); } /** * <p> * Rule definitions and metadata. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setSamplingRuleRecords(java.util.Collection)} or {@link #withSamplingRuleRecords(java.util.Collection)} * if you want to override the existing values. * </p> * * @param samplingRuleRecords * Rule definitions and metadata. * @return Returns a reference to this object so that method calls can be chained together. */ public GetSamplingRulesResult withSamplingRuleRecords(SamplingRuleRecord... samplingRuleRecords) { if (this.samplingRuleRecords == null) { setSamplingRuleRecords(new java.util.ArrayList<SamplingRuleRecord>(samplingRuleRecords.length)); } for (SamplingRuleRecord ele : samplingRuleRecords) { this.samplingRuleRecords.add(ele); } return this; } /** * <p> * Rule definitions and metadata. * </p> * * @param samplingRuleRecords * Rule definitions and metadata. * @return Returns a reference to this object so that method calls can be chained together. */ public GetSamplingRulesResult withSamplingRuleRecords(java.util.Collection<SamplingRuleRecord> samplingRuleRecords) { setSamplingRuleRecords(samplingRuleRecords); return this; } /** * <p> * Pagination token. * </p> * * @param nextToken * Pagination token. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * Pagination token. * </p> * * @return Pagination token. */ public String getNextToken() { return this.nextToken; } /** * <p> * Pagination token. * </p> * * @param nextToken * Pagination token. * @return Returns a reference to this object so that method calls can be chained together. */ public GetSamplingRulesResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSamplingRuleRecords() != null) sb.append("SamplingRuleRecords: ").append(getSamplingRuleRecords()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GetSamplingRulesResult == false) return false; GetSamplingRulesResult other = (GetSamplingRulesResult) obj; if (other.getSamplingRuleRecords() == null ^ this.getSamplingRuleRecords() == null) return false; if (other.getSamplingRuleRecords() != null && other.getSamplingRuleRecords().equals(this.getSamplingRuleRecords()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSamplingRuleRecords() == null) ? 0 : getSamplingRuleRecords().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public GetSamplingRulesResult clone() { try { return (GetSamplingRulesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }