lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
apache-2.0
|
1f775395ae5dd060de0303f221f15fd985f025da
| 0
|
ShortMap/ShortMap,ShortMap/ShortMap,aseldawy/spatialhadoop,aseldawy/spatialhadoop,aseldawy/spatialhadoop,aseldawy/spatialhadoop,ShortMap/ShortMap,ShortMap/ShortMap,ShortMap/ShortMap,ShortMap/ShortMap,aseldawy/spatialhadoop,ShortMap/ShortMap,ShortMap/ShortMap,aseldawy/spatialhadoop,aseldawy/spatialhadoop,ShortMap/ShortMap,aseldawy/spatialhadoop,aseldawy/spatialhadoop
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.filecache;
import java.io.*;
import java.util.*;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.util.*;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.FileSystem;
import java.net.URI;
/**
* Distribute application-specific large, read-only files efficiently.
*
* <p><code>DistributedCache</code> is a facility provided by the Map-Reduce
* framework to cache files (text, archives, jars etc.) needed by applications.
* </p>
*
* <p>Applications specify the files, via urls (hdfs:// or http://) to be cached
* via the {@link org.apache.hadoop.mapred.JobConf}.
* The <code>DistributedCache</code> assumes that the
* files specified via hdfs:// urls are already present on the
* {@link FileSystem} at the path specified by the url.</p>
*
* <p>The framework will copy the necessary files on to the slave node before
* any tasks for the job are executed on that node. Its efficiency stems from
* the fact that the files are only copied once per job and the ability to
* cache archives which are un-archived on the slaves.</p>
*
* <p><code>DistributedCache</code> can be used to distribute simple, read-only
* data/text files and/or more complex types such as archives, jars etc.
* Archives (zip, tar and tgz/tar.gz files) are un-archived at the slave nodes.
* Jars may be optionally added to the classpath of the tasks, a rudimentary
* software distribution mechanism. Files have execution permissions.
* Optionally users can also direct it to symlink the distributed cache file(s)
* into the working directory of the task.</p>
*
* <p><code>DistributedCache</code> tracks modification timestamps of the cache
* files. Clearly the cache files should not be modified by the application
* or externally while the job is executing.</p>
*
* <p>Here is an illustrative example on how to use the
* <code>DistributedCache</code>:</p>
* <p><blockquote><pre>
* // Setting up the cache for the application
*
* 1. Copy the requisite files to the <code>FileSystem</code>:
*
* $ bin/hadoop fs -copyFromLocal lookup.dat /myapp/lookup.dat
* $ bin/hadoop fs -copyFromLocal map.zip /myapp/map.zip
* $ bin/hadoop fs -copyFromLocal mylib.jar /myapp/mylib.jar
* $ bin/hadoop fs -copyFromLocal mytar.tar /myapp/mytar.tar
* $ bin/hadoop fs -copyFromLocal mytgz.tgz /myapp/mytgz.tgz
* $ bin/hadoop fs -copyFromLocal mytargz.tar.gz /myapp/mytargz.tar.gz
*
* 2. Setup the application's <code>JobConf</code>:
*
* JobConf job = new JobConf();
* DistributedCache.addCacheFile(new URI("/myapp/lookup.dat#lookup.dat"),
* job);
* DistributedCache.addCacheArchive(new URI("/myapp/map.zip", job);
* DistributedCache.addFileToClassPath(new Path("/myapp/mylib.jar"), job);
* DistributedCache.addCacheArchive(new URI("/myapp/mytar.tar", job);
* DistributedCache.addCacheArchive(new URI("/myapp/mytgz.tgz", job);
* DistributedCache.addCacheArchive(new URI("/myapp/mytargz.tar.gz", job);
*
* 3. Use the cached files in the {@link org.apache.hadoop.mapred.Mapper}
* or {@link org.apache.hadoop.mapred.Reducer}:
*
* public static class MapClass extends MapReduceBase
* implements Mapper<K, V, K, V> {
*
* private Path[] localArchives;
* private Path[] localFiles;
*
* public void configure(JobConf job) {
* // Get the cached archives/files
* localArchives = DistributedCache.getLocalCacheArchives(job);
* localFiles = DistributedCache.getLocalCacheFiles(job);
* }
*
* public void map(K key, V value,
* OutputCollector<K, V> output, Reporter reporter)
* throws IOException {
* // Use data from the cached archives/files here
* // ...
* // ...
* output.collect(k, v);
* }
* }
*
* </pre></blockquote></p>
* It is also very common to use the DistributedCache by using
* {@link org.apache.hadoop.util.GenericOptionsParser}.
*
* This class includes methods that should be used by users
* (specifically those mentioned in the example above, as well
* as {@link DistributedCache#addArchiveToClassPath(Path, Configuration)}),
* as well as methods intended for use by the MapReduce framework
* (e.g., {@link org.apache.hadoop.mapred.JobClient}). For implementation
* details, see {@link TrackerDistributedCacheManager} and
* {@link TaskDistributedCacheManager}.
*
* @see TrackerDistributedCacheManager
* @see TaskDistributedCacheManager
* @see org.apache.hadoop.mapred.JobConf
* @see org.apache.hadoop.mapred.JobClient
*/
public class DistributedCache {
/**
* Warning: {@link #CACHE_FILES_SIZES} is not a *public* constant.
*/
public static final String CACHE_FILES_SIZES = "mapred.cache.files.filesizes";
/**
* Warning: {@link #CACHE_ARCHIVES_SIZES} is not a *public* constant.
*/
public static final String CACHE_ARCHIVES_SIZES =
"mapred.cache.archives.filesizes";
/**
* Warning: {@link #CACHE_ARCHIVES_TIMESTAMPS} is not a *public* constant.
**/
public static final String CACHE_ARCHIVES_TIMESTAMPS = "mapred.cache.archives.timestamps";
/**
* Warning: {@link #CACHE_FILES_TIMESTAMPS} is not a *public* constant.
**/
public static final String CACHE_FILES_TIMESTAMPS = "mapred.cache.files.timestamps";
/**
* Warning: {@link #CACHE_ARCHIVES} is not a *public* constant.
**/
public static final String CACHE_ARCHIVES = "mapred.cache.archives";
/**
* Warning: {@link #CACHE_FILES} is not a *public* constant.
**/
public static final String CACHE_FILES = "mapred.cache.files";
/**
* Warning: {@link #CACHE_LOCALARCHIVES} is not a *public* constant.
**/
public static final String CACHE_LOCALARCHIVES = "mapred.cache.localArchives";
/**
* Warning: {@link #CACHE_LOCALFILES} is not a *public* constant.
**/
public static final String CACHE_LOCALFILES = "mapred.cache.localFiles";
/**
* Warning: {@link #CACHE_SYMLINK} is not a *public* constant.
**/
public static final String CACHE_SYMLINK = "mapred.create.symlink";
/**
* Returns {@link FileStatus} of a given cache file on hdfs. Internal to
* MapReduce.
* @param conf configuration
* @param cache cache file
* @return <code>FileStatus</code> of a given cache file on hdfs
* @throws IOException
*/
public static FileStatus getFileStatus(Configuration conf, URI cache)
throws IOException {
FileSystem fileSystem = FileSystem.get(cache, conf);
return fileSystem.getFileStatus(new Path(cache.getPath()));
}
/**
* Returns mtime of a given cache file on hdfs. Internal to MapReduce.
* @param conf configuration
* @param cache cache file
* @return mtime of a given cache file on hdfs
* @throws IOException
*/
public static long getTimestamp(Configuration conf, URI cache)
throws IOException {
return getFileStatus(conf, cache).getModificationTime();
}
/**
* This method create symlinks for all files in a given dir in another directory
* @param conf the configuration
* @param jobCacheDir the target directory for creating symlinks
* @param workDir the directory in which the symlinks are created
* @throws IOException
* @deprecated Internal to MapReduce framework. Use DistributedCacheManager
* instead.
*/
public static void createAllSymlink(Configuration conf, File jobCacheDir, File workDir)
throws IOException{
TrackerDistributedCacheManager.createAllSymlink(conf, jobCacheDir, workDir);
}
/**
* Set the configuration with the given set of archives. Intended
* to be used by user code.
* @param archives The list of archives that need to be localized
* @param conf Configuration which will be changed
*/
public static void setCacheArchives(URI[] archives, Configuration conf) {
String sarchives = StringUtils.uriToString(archives);
conf.set(CACHE_ARCHIVES, sarchives);
}
/**
* Set the configuration with the given set of files. Intended to be
* used by user code.
* @param files The list of files that need to be localized
* @param conf Configuration which will be changed
*/
public static void setCacheFiles(URI[] files, Configuration conf) {
String sfiles = StringUtils.uriToString(files);
conf.set(CACHE_FILES, sfiles);
}
private static Path[] parsePaths(String[] strs) {
if (strs == null) {
return null;
}
Path[] result = new Path[strs.length];
for(int i=0; i < strs.length; ++i) {
result[i] = new Path(strs[i]);
}
return result;
}
/**
* Get cache archives set in the Configuration. Used by
* internal DistributedCache and MapReduce code.
* @param conf The configuration which contains the archives
* @return An array of the caches set in the Configuration
* @throws IOException
*/
public static URI[] getCacheArchives(Configuration conf) throws IOException {
return StringUtils.stringToURI(conf.getStrings(CACHE_ARCHIVES));
}
/**
* Get cache files set in the Configuration. Used by internal
* DistributedCache and MapReduce code.
* @param conf The configuration which contains the files
* @return Am array of the files set in the Configuration
* @throws IOException
*/
public static URI[] getCacheFiles(Configuration conf) throws IOException {
return StringUtils.stringToURI(conf.getStrings(CACHE_FILES));
}
/**
* Return the path array of the localized caches. Intended to be used
* by user code.
* @param conf Configuration that contains the localized archives
* @return A path array of localized caches
* @throws IOException
*/
public static Path[] getLocalCacheArchives(Configuration conf)
throws IOException {
return StringUtils.stringToPath(conf
.getStrings(CACHE_LOCALARCHIVES));
}
/**
* Return the path array of the localized files. Intended to be used
* by user code.
* @param conf Configuration that contains the localized files
* @return A path array of localized files
* @throws IOException
*/
public static Path[] getLocalCacheFiles(Configuration conf)
throws IOException {
return StringUtils.stringToPath(conf.getStrings(CACHE_LOCALFILES));
}
/**
* Parse a list of strings into longs.
* @param strs the list of strings to parse
* @return a list of longs that were parsed. same length as strs.
*/
private static long[] parseTimestamps(String[] strs) {
if (strs == null) {
return null;
}
long[] result = new long[strs.length];
for(int i=0; i < strs.length; ++i) {
result[i] = Long.parseLong(strs[i]);
}
return result;
}
/**
* Get the timestamps of the archives. Used by internal
* DistributedCache and MapReduce code.
* @param conf The configuration which stored the timestamps
* @return a long array of timestamps
* @throws IOException
*/
public static long[] getArchiveTimestamps(Configuration conf) {
return parseTimestamps(conf.getStrings(CACHE_ARCHIVES_TIMESTAMPS));
}
/**
* Get the timestamps of the files. Used by internal
* DistributedCache and MapReduce code.
* @param conf The configuration which stored the timestamps
* @return a long array of timestamps
* @throws IOException
*/
public static long[] getFileTimestamps(Configuration conf) {
return parseTimestamps(conf.getStrings(CACHE_FILES_TIMESTAMPS));
}
/**
* This is to check the timestamp of the archives to be localized.
* Used by internal MapReduce code.
* @param conf Configuration which stores the timestamp's
* @param timestamps comma separated list of timestamps of archives.
* The order should be the same as the order in which the archives are added.
*/
public static void setArchiveTimestamps(Configuration conf, String timestamps) {
conf.set(CACHE_ARCHIVES_TIMESTAMPS, timestamps);
}
/**
* This is to check the timestamp of the files to be localized.
* Used by internal MapReduce code.
* @param conf Configuration which stores the timestamp's
* @param timestamps comma separated list of timestamps of files.
* The order should be the same as the order in which the files are added.
*/
public static void setFileTimestamps(Configuration conf, String timestamps) {
conf.set(CACHE_FILES_TIMESTAMPS, timestamps);
}
/**
* Set the conf to contain the location for localized archives. Used
* by internal DistributedCache code.
* @param conf The conf to modify to contain the localized caches
* @param str a comma separated list of local archives
*/
public static void setLocalArchives(Configuration conf, String str) {
conf.set(CACHE_LOCALARCHIVES, str);
}
/**
* Set the conf to contain the location for localized files. Used
* by internal DistributedCache code.
* @param conf The conf to modify to contain the localized caches
* @param str a comma separated list of local files
*/
public static void setLocalFiles(Configuration conf, String str) {
conf.set(CACHE_LOCALFILES, str);
}
/**
* Add a archive that has been localized to the conf. Used
* by internal DistributedCache code.
* @param conf The conf to modify to contain the localized caches
* @param str a comma separated list of local archives
*/
public static void addLocalArchives(Configuration conf, String str) {
String archives = conf.get(CACHE_LOCALARCHIVES);
conf.set(CACHE_LOCALARCHIVES, archives == null ? str
: archives + "," + str);
}
/**
* Add a file that has been localized to the conf.. Used
* by internal DistributedCache code.
* @param conf The conf to modify to contain the localized caches
* @param str a comma separated list of local files
*/
public static void addLocalFiles(Configuration conf, String str) {
String files = conf.get(CACHE_LOCALFILES);
conf.set(CACHE_LOCALFILES, files == null ? str
: files + "," + str);
}
/**
* Add a archives to be localized to the conf. Intended to
* be used by user code.
* @param uri The uri of the cache to be localized
* @param conf Configuration to add the cache to
*/
public static void addCacheArchive(URI uri, Configuration conf) {
String archives = conf.get(CACHE_ARCHIVES);
conf.set(CACHE_ARCHIVES, archives == null ? uri.toString()
: archives + "," + uri.toString());
}
/**
* Add a file to be localized to the conf. Intended
* to be used by user code.
* @param uri The uri of the cache to be localized
* @param conf Configuration to add the cache to
*/
public static void addCacheFile(URI uri, Configuration conf) {
String files = conf.get(CACHE_FILES);
conf.set(CACHE_FILES, files == null ? uri.toString() : files + ","
+ uri.toString());
}
/**
* Add a file path to the current set of classpath entries. It adds the file
* to cache as well. Intended to be used by user code.
*
* @deprecated
*
* Please use {@link #addFileToClassPath(Path, Configuration, FileSystem)}
* instead. The {@code FileSystem} should be obtained within an
* appropriate {@code doAs}.
*
* @param file Path of the file to be added
* @param conf Configuration that contains the classpath setting
*/
@Deprecated
public static void addFileToClassPath(Path file, Configuration conf)
throws IOException {
addFileToClassPath(file, conf, file.getFileSystem(conf));
}
/**
* Add a file path to the current set of classpath entries. It adds the file
* to cache as well. Intended to be used by user code.
*
* @param file Path of the file to be added
* @param conf Configuration that contains the classpath setting
* @param fs FileSystem with respect to which {@code archivefile} should
* be interpreted.
*/
public static void addFileToClassPath
(Path file, Configuration conf, FileSystem fs)
throws IOException {
String filepath = file.toUri().getPath();
String classpath = conf.get("mapred.job.classpath.files");
conf.set("mapred.job.classpath.files", classpath == null
? filepath
: classpath + System.getProperty("path.separator") + filepath);
URI uri = fs.makeQualified(file).toUri();
addCacheFile(uri, conf);
}
/**
* Get the file entries in classpath as an array of Path.
* Used by internal DistributedCache code.
*
* @param conf Configuration that contains the classpath setting
*/
public static Path[] getFileClassPaths(Configuration conf) {
String classpath = conf.get("mapred.job.classpath.files");
if (classpath == null)
return null;
ArrayList<Object> list = Collections.list(new StringTokenizer(classpath, System
.getProperty("path.separator")));
Path[] paths = new Path[list.size()];
for (int i = 0; i < list.size(); i++) {
paths[i] = new Path((String) list.get(i));
}
return paths;
}
/**
* Add an archive path to the current set of classpath entries. It adds the
* archive to cache as well. Intended to be used by user code.
*
* @deprecated
*
* Please use {@link #addArchiveToClassPath(Path, Configuration, FileSystem)}
* instead. The {@code FileSystem} should be obtained within an
* appropriate {@code doAs}.
*
* @param archive Path of the archive to be added
* @param conf Configuration that contains the classpath setting
*/
@Deprecated
public static void addArchiveToClassPath
(Path archive, Configuration conf)
throws IOException {
addArchiveToClassPath(archive, conf, archive.getFileSystem(conf));
}
/**
* Add an archive path to the current set of classpath entries. It adds the
* archive to cache as well. Intended to be used by user code.
*
* @param archive Path of the archive to be added
* @param conf Configuration that contains the classpath setting
* @param fs FileSystem with respect to which {@code archive} should be interpreted.
*/
public static void addArchiveToClassPath
(Path archive, Configuration conf, FileSystem fs)
throws IOException {
String archivepath = archive.toUri().getPath();
String classpath = conf.get("mapred.job.classpath.archives");
conf.set("mapred.job.classpath.archives", classpath == null
? archivepath
: classpath + System.getProperty("path.separator") + archivepath);
URI uri = fs.makeQualified(archive).toUri();
addCacheArchive(uri, conf);
}
/**
* Get the archive entries in classpath as an array of Path.
* Used by internal DistributedCache code.
*
* @param conf Configuration that contains the classpath setting
*/
public static Path[] getArchiveClassPaths(Configuration conf) {
String classpath = conf.get("mapred.job.classpath.archives");
if (classpath == null)
return null;
ArrayList<Object> list = Collections.list(new StringTokenizer(classpath, System
.getProperty("path.separator")));
Path[] paths = new Path[list.size()];
for (int i = 0; i < list.size(); i++) {
paths[i] = new Path((String) list.get(i));
}
return paths;
}
/**
* This method allows you to create symlinks in the current working directory
* of the task to all the cache files/archives.
* Intended to be used by user code.
* @param conf the jobconf
*/
public static void createSymlink(Configuration conf){
conf.set(CACHE_SYMLINK, "yes");
}
/**
* This method checks to see if symlinks are to be create for the
* localized cache files in the current working directory
* Used by internal DistributedCache code.
* @param conf the jobconf
* @return true if symlinks are to be created- else return false
*/
public static boolean getSymlink(Configuration conf){
String result = conf.get(CACHE_SYMLINK);
if ("yes".equals(result)){
return true;
}
return false;
}
/**
* This method checks if there is a conflict in the fragment names
* of the uris. Also makes sure that each uri has a fragment. It
* is only to be called if you want to create symlinks for
* the various archives and files. May be used by user code.
* @param uriFiles The uri array of urifiles
* @param uriArchives the uri array of uri archives
*/
public static boolean checkURIs(URI[] uriFiles, URI[] uriArchives) {
if ((uriFiles == null) && (uriArchives == null)){
return true;
}
// check if fragment is null for any uri
// also check if there are any conflicts in fragment names
Set<String> fragments = new HashSet<String>();
// iterate over file uris
if (uriFiles != null) {
for (int i = 0; i < uriFiles.length; i++) {
String fragment = uriFiles[i].getFragment();
if (fragment == null) {
return false;
}
String lowerCaseFragment = fragment.toLowerCase();
if (fragments.contains(lowerCaseFragment)) {
return false;
}
fragments.add(lowerCaseFragment);
}
}
// iterate over archive uris
if (uriArchives != null) {
for (int i = 0; i < uriArchives.length; i++) {
String fragment = uriArchives[i].getFragment();
if (fragment == null) {
return false;
}
String lowerCaseFragment = fragment.toLowerCase();
if (fragments.contains(lowerCaseFragment)) {
return false;
}
fragments.add(lowerCaseFragment);
}
}
return true;
}
}
|
src/mapred/org/apache/hadoop/filecache/DistributedCache.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.filecache;
import java.io.*;
import java.util.*;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.util.*;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.FileSystem;
import java.net.URI;
/**
* Distribute application-specific large, read-only files efficiently.
*
* <p><code>DistributedCache</code> is a facility provided by the Map-Reduce
* framework to cache files (text, archives, jars etc.) needed by applications.
* </p>
*
* <p>Applications specify the files, via urls (hdfs:// or http://) to be cached
* via the {@link org.apache.hadoop.mapred.JobConf}.
* The <code>DistributedCache</code> assumes that the
* files specified via hdfs:// urls are already present on the
* {@link FileSystem} at the path specified by the url.</p>
*
* <p>The framework will copy the necessary files on to the slave node before
* any tasks for the job are executed on that node. Its efficiency stems from
* the fact that the files are only copied once per job and the ability to
* cache archives which are un-archived on the slaves.</p>
*
* <p><code>DistributedCache</code> can be used to distribute simple, read-only
* data/text files and/or more complex types such as archives, jars etc.
* Archives (zip, tar and tgz/tar.gz files) are un-archived at the slave nodes.
* Jars may be optionally added to the classpath of the tasks, a rudimentary
* software distribution mechanism. Files have execution permissions.
* Optionally users can also direct it to symlink the distributed cache file(s)
* into the working directory of the task.</p>
*
* <p><code>DistributedCache</code> tracks modification timestamps of the cache
* files. Clearly the cache files should not be modified by the application
* or externally while the job is executing.</p>
*
* <p>Here is an illustrative example on how to use the
* <code>DistributedCache</code>:</p>
* <p><blockquote><pre>
* // Setting up the cache for the application
*
* 1. Copy the requisite files to the <code>FileSystem</code>:
*
* $ bin/hadoop fs -copyFromLocal lookup.dat /myapp/lookup.dat
* $ bin/hadoop fs -copyFromLocal map.zip /myapp/map.zip
* $ bin/hadoop fs -copyFromLocal mylib.jar /myapp/mylib.jar
* $ bin/hadoop fs -copyFromLocal mytar.tar /myapp/mytar.tar
* $ bin/hadoop fs -copyFromLocal mytgz.tgz /myapp/mytgz.tgz
* $ bin/hadoop fs -copyFromLocal mytargz.tar.gz /myapp/mytargz.tar.gz
*
* 2. Setup the application's <code>JobConf</code>:
*
* JobConf job = new JobConf();
* DistributedCache.addCacheFile(new URI("/myapp/lookup.dat#lookup.dat"),
* job);
* DistributedCache.addCacheArchive(new URI("/myapp/map.zip", job);
* DistributedCache.addFileToClassPath(new Path("/myapp/mylib.jar"), job);
* DistributedCache.addCacheArchive(new URI("/myapp/mytar.tar", job);
* DistributedCache.addCacheArchive(new URI("/myapp/mytgz.tgz", job);
* DistributedCache.addCacheArchive(new URI("/myapp/mytargz.tar.gz", job);
*
* 3. Use the cached files in the {@link org.apache.hadoop.mapred.Mapper}
* or {@link org.apache.hadoop.mapred.Reducer}:
*
* public static class MapClass extends MapReduceBase
* implements Mapper<K, V, K, V> {
*
* private Path[] localArchives;
* private Path[] localFiles;
*
* public void configure(JobConf job) {
* // Get the cached archives/files
* localArchives = DistributedCache.getLocalCacheArchives(job);
* localFiles = DistributedCache.getLocalCacheFiles(job);
* }
*
* public void map(K key, V value,
* OutputCollector<K, V> output, Reporter reporter)
* throws IOException {
* // Use data from the cached archives/files here
* // ...
* // ...
* output.collect(k, v);
* }
* }
*
* </pre></blockquote></p>
* It is also very common to use the DistributedCache by using
* {@link org.apache.hadoop.util.GenericOptionsParser}.
*
* This class includes methods that should be used by users
* (specifically those mentioned in the example above, as well
* as {@link DistributedCache#addArchiveToClassPath(Path, Configuration)}),
* as well as methods intended for use by the MapReduce framework
* (e.g., {@link org.apache.hadoop.mapred.JobClient}). For implementation
* details, see {@link TrackerDistributedCacheManager} and
* {@link TaskDistributedCacheManager}.
*
* @see TrackerDistributedCacheManager
* @see TaskDistributedCacheManager
* @see org.apache.hadoop.mapred.JobConf
* @see org.apache.hadoop.mapred.JobClient
*/
public class DistributedCache {
/**
* Warning: {@link #CACHE_FILES_SIZES} is not a *public* constant.
*/
public static final String CACHE_FILES_SIZES = "mapred.cache.files.filesizes";
/**
* Warning: {@link #CACHE_ARCHIVES_SIZES} is not a *public* constant.
*/
public static final String CACHE_ARCHIVES_SIZES =
"mapred.cache.archives.filesizes";
/**
* Warning: {@link #CACHE_ARCHIVES_TIMESTAMPS} is not a *public* constant.
**/
public static final String CACHE_ARCHIVES_TIMESTAMPS = "mapred.cache.archives.timestamps";
/**
* Warning: {@link #CACHE_FILES_TIMESTAMPS} is not a *public* constant.
**/
public static final String CACHE_FILES_TIMESTAMPS = "mapred.cache.files.timestamps";
/**
* Warning: {@link #CACHE_ARCHIVES} is not a *public* constant.
**/
public static final String CACHE_ARCHIVES = "mapred.cache.archives";
/**
* Warning: {@link #CACHE_FILES} is not a *public* constant.
**/
public static final String CACHE_FILES = "mapred.cache.files";
/**
* Warning: {@link #CACHE_LOCALARCHIVES} is not a *public* constant.
**/
public static final String CACHE_LOCALARCHIVES = "mapred.cache.localArchives";
/**
* Warning: {@link #CACHE_LOCALFILES} is not a *public* constant.
**/
public static final String CACHE_LOCALFILES = "mapred.cache.localFiles";
/**
* Warning: {@link #CACHE_SYMLINK} is not a *public* constant.
**/
public static final String CACHE_SYMLINK = "mapred.create.symlink";
/**
* Returns {@link FileStatus} of a given cache file on hdfs. Internal to
* MapReduce.
* @param conf configuration
* @param cache cache file
* @return <code>FileStatus</code> of a given cache file on hdfs
* @throws IOException
*/
public static FileStatus getFileStatus(Configuration conf, URI cache)
throws IOException {
FileSystem fileSystem = FileSystem.get(cache, conf);
return fileSystem.getFileStatus(new Path(cache.getPath()));
}
/**
* Returns mtime of a given cache file on hdfs. Internal to MapReduce.
* @param conf configuration
* @param cache cache file
* @return mtime of a given cache file on hdfs
* @throws IOException
*/
public static long getTimestamp(Configuration conf, URI cache)
throws IOException {
return getFileStatus(conf, cache).getModificationTime();
}
/**
* This method create symlinks for all files in a given dir in another directory
* @param conf the configuration
* @param jobCacheDir the target directory for creating symlinks
* @param workDir the directory in which the symlinks are created
* @throws IOException
* @deprecated Internal to MapReduce framework. Use DistributedCacheManager
* instead.
*/
public static void createAllSymlink(Configuration conf, File jobCacheDir, File workDir)
throws IOException{
TrackerDistributedCacheManager.createAllSymlink(conf, jobCacheDir, workDir);
}
/**
* Set the configuration with the given set of archives. Intended
* to be used by user code.
* @param archives The list of archives that need to be localized
* @param conf Configuration which will be changed
*/
public static void setCacheArchives(URI[] archives, Configuration conf) {
String sarchives = StringUtils.uriToString(archives);
conf.set(CACHE_ARCHIVES, sarchives);
}
/**
* Set the configuration with the given set of files. Intended to be
* used by user code.
* @param files The list of files that need to be localized
* @param conf Configuration which will be changed
*/
public static void setCacheFiles(URI[] files, Configuration conf) {
String sfiles = StringUtils.uriToString(files);
conf.set(CACHE_FILES, sfiles);
}
private static Path[] parsePaths(String[] strs) {
if (strs == null) {
return null;
}
Path[] result = new Path[strs.length];
for(int i=0; i < strs.length; ++i) {
result[i] = new Path(strs[i]);
}
return result;
}
/**
* Get cache archives set in the Configuration. Used by
* internal DistributedCache and MapReduce code.
* @param conf The configuration which contains the archives
* @return An array of the caches set in the Configuration
* @throws IOException
*/
public static URI[] getCacheArchives(Configuration conf) throws IOException {
return StringUtils.stringToURI(conf.getStrings(CACHE_ARCHIVES));
}
/**
* Get cache files set in the Configuration. Used by internal
* DistributedCache and MapReduce code.
* @param conf The configuration which contains the files
* @return Am array of the files set in the Configuration
* @throws IOException
*/
public static URI[] getCacheFiles(Configuration conf) throws IOException {
return StringUtils.stringToURI(conf.getStrings(CACHE_FILES));
}
/**
* Return the path array of the localized caches. Intended to be used
* by user code.
* @param conf Configuration that contains the localized archives
* @return A path array of localized caches
* @throws IOException
*/
public static Path[] getLocalCacheArchives(Configuration conf)
throws IOException {
return StringUtils.stringToPath(conf
.getStrings(CACHE_LOCALARCHIVES));
}
/**
* Return the path array of the localized files. Intended to be used
* by user code.
* @param conf Configuration that contains the localized files
* @return A path array of localized files
* @throws IOException
*/
public static Path[] getLocalCacheFiles(Configuration conf)
throws IOException {
return StringUtils.stringToPath(conf.getStrings(CACHE_LOCALFILES));
}
/**
* Parse a list of strings into longs.
* @param strs the list of strings to parse
* @return a list of longs that were parsed. same length as strs.
*/
private static long[] parseTimestamps(String[] strs) {
if (strs == null) {
return null;
}
long[] result = new long[strs.length];
for(int i=0; i < strs.length; ++i) {
result[i] = Long.parseLong(strs[i]);
}
return result;
}
/**
* Get the timestamps of the archives. Used by internal
* DistributedCache and MapReduce code.
* @param conf The configuration which stored the timestamps
* @return a long array of timestamps
* @throws IOException
*/
public static long[] getArchiveTimestamps(Configuration conf) {
return parseTimestamps(conf.getStrings(CACHE_ARCHIVES_TIMESTAMPS));
}
/**
* Get the timestamps of the files. Used by internal
* DistributedCache and MapReduce code.
* @param conf The configuration which stored the timestamps
* @return a long array of timestamps
* @throws IOException
*/
public static long[] getFileTimestamps(Configuration conf) {
return parseTimestamps(conf.getStrings(CACHE_FILES_TIMESTAMPS));
}
/**
* This is to check the timestamp of the archives to be localized.
* Used by internal MapReduce code.
* @param conf Configuration which stores the timestamp's
* @param timestamps comma separated list of timestamps of archives.
* The order should be the same as the order in which the archives are added.
*/
public static void setArchiveTimestamps(Configuration conf, String timestamps) {
conf.set(CACHE_ARCHIVES_TIMESTAMPS, timestamps);
}
/**
* This is to check the timestamp of the files to be localized.
* Used by internal MapReduce code.
* @param conf Configuration which stores the timestamp's
* @param timestamps comma separated list of timestamps of files.
* The order should be the same as the order in which the files are added.
*/
public static void setFileTimestamps(Configuration conf, String timestamps) {
conf.set(CACHE_FILES_TIMESTAMPS, timestamps);
}
/**
* Set the conf to contain the location for localized archives. Used
* by internal DistributedCache code.
* @param conf The conf to modify to contain the localized caches
* @param str a comma separated list of local archives
*/
public static void setLocalArchives(Configuration conf, String str) {
conf.set(CACHE_LOCALARCHIVES, str);
}
/**
* Set the conf to contain the location for localized files. Used
* by internal DistributedCache code.
* @param conf The conf to modify to contain the localized caches
* @param str a comma separated list of local files
*/
public static void setLocalFiles(Configuration conf, String str) {
conf.set(CACHE_LOCALFILES, str);
}
/**
* Add a archive that has been localized to the conf. Used
* by internal DistributedCache code.
* @param conf The conf to modify to contain the localized caches
* @param str a comma separated list of local archives
*/
public static void addLocalArchives(Configuration conf, String str) {
String archives = conf.get(CACHE_LOCALARCHIVES);
conf.set(CACHE_LOCALARCHIVES, archives == null ? str
: archives + "," + str);
}
/**
* Add a file that has been localized to the conf.. Used
* by internal DistributedCache code.
* @param conf The conf to modify to contain the localized caches
* @param str a comma separated list of local files
*/
public static void addLocalFiles(Configuration conf, String str) {
String files = conf.get(CACHE_LOCALFILES);
conf.set(CACHE_LOCALFILES, files == null ? str
: files + "," + str);
}
/**
* Add a archives to be localized to the conf. Intended to
* be used by user code.
* @param uri The uri of the cache to be localized
* @param conf Configuration to add the cache to
*/
public static void addCacheArchive(URI uri, Configuration conf) {
String archives = conf.get(CACHE_ARCHIVES);
conf.set(CACHE_ARCHIVES, archives == null ? uri.toString()
: archives + "," + uri.toString());
}
/**
* Add a file to be localized to the conf. Intended
* to be used by user code.
* @param uri The uri of the cache to be localized
* @param conf Configuration to add the cache to
*/
public static void addCacheFile(URI uri, Configuration conf) {
String files = conf.get(CACHE_FILES);
conf.set(CACHE_FILES, files == null ? uri.toString() : files + ","
+ uri.toString());
}
/**
* Add a file path to the current set of classpath entries. It adds the file
* to cache as well. Intended to be used by user code.
*
* @deprecated
*
* Please use {@link #addFileToClassPath(Path, Configuration, FileSystem)}
* instead. The {@code FileSystem} should be obtained within an
* appropriate {@code doAs}.
*
* @param file Path of the file to be added
* @param conf Configuration that contains the classpath setting
*/
@Deprecated
public static void addFileToClassPath(Path file, Configuration conf)
throws IOException {
addFileToClassPath(file, conf, FileSystem.get(conf));
}
/**
* Add a file path to the current set of classpath entries. It adds the file
* to cache as well. Intended to be used by user code.
*
* @param file Path of the file to be added
* @param conf Configuration that contains the classpath setting
* @param fs FileSystem with respect to which {@code archivefile} should
* be interpreted.
*/
public static void addFileToClassPath
(Path file, Configuration conf, FileSystem fs)
throws IOException {
String filepath = file.toUri().getPath();
String classpath = conf.get("mapred.job.classpath.files");
conf.set("mapred.job.classpath.files", classpath == null
? filepath
: classpath + System.getProperty("path.separator") + filepath);
URI uri = fs.makeQualified(file).toUri();
addCacheFile(uri, conf);
}
/**
* Get the file entries in classpath as an array of Path.
* Used by internal DistributedCache code.
*
* @param conf Configuration that contains the classpath setting
*/
public static Path[] getFileClassPaths(Configuration conf) {
String classpath = conf.get("mapred.job.classpath.files");
if (classpath == null)
return null;
ArrayList<Object> list = Collections.list(new StringTokenizer(classpath, System
.getProperty("path.separator")));
Path[] paths = new Path[list.size()];
for (int i = 0; i < list.size(); i++) {
paths[i] = new Path((String) list.get(i));
}
return paths;
}
/**
* Add an archive path to the current set of classpath entries. It adds the
* archive to cache as well. Intended to be used by user code.
*
* @deprecated
*
* Please use {@link #addArchiveToClassPath(Path, Configuration, FileSystem)}
* instead. The {@code FileSystem} should be obtained within an
* appropriate {@code doAs}.
*
* @param archive Path of the archive to be added
* @param conf Configuration that contains the classpath setting
*/
@Deprecated
public static void addArchiveToClassPath
(Path archive, Configuration conf)
throws IOException {
addArchiveToClassPath(archive, conf, FileSystem.get(conf));
}
/**
* Add an archive path to the current set of classpath entries. It adds the
* archive to cache as well. Intended to be used by user code.
*
* @param archive Path of the archive to be added
* @param conf Configuration that contains the classpath setting
* @param fs FileSystem with respect to which {@code archive} should be interpreted.
*/
public static void addArchiveToClassPath
(Path archive, Configuration conf, FileSystem fs)
throws IOException {
String archivepath = archive.toUri().getPath();
String classpath = conf.get("mapred.job.classpath.archives");
conf.set("mapred.job.classpath.archives", classpath == null
? archivepath
: classpath + System.getProperty("path.separator") + archivepath);
URI uri = fs.makeQualified(archive).toUri();
addCacheArchive(uri, conf);
}
/**
* Get the archive entries in classpath as an array of Path.
* Used by internal DistributedCache code.
*
* @param conf Configuration that contains the classpath setting
*/
public static Path[] getArchiveClassPaths(Configuration conf) {
String classpath = conf.get("mapred.job.classpath.archives");
if (classpath == null)
return null;
ArrayList<Object> list = Collections.list(new StringTokenizer(classpath, System
.getProperty("path.separator")));
Path[] paths = new Path[list.size()];
for (int i = 0; i < list.size(); i++) {
paths[i] = new Path((String) list.get(i));
}
return paths;
}
/**
* This method allows you to create symlinks in the current working directory
* of the task to all the cache files/archives.
* Intended to be used by user code.
* @param conf the jobconf
*/
public static void createSymlink(Configuration conf){
conf.set(CACHE_SYMLINK, "yes");
}
/**
* This method checks to see if symlinks are to be create for the
* localized cache files in the current working directory
* Used by internal DistributedCache code.
* @param conf the jobconf
* @return true if symlinks are to be created- else return false
*/
public static boolean getSymlink(Configuration conf){
String result = conf.get(CACHE_SYMLINK);
if ("yes".equals(result)){
return true;
}
return false;
}
/**
* This method checks if there is a conflict in the fragment names
* of the uris. Also makes sure that each uri has a fragment. It
* is only to be called if you want to create symlinks for
* the various archives and files. May be used by user code.
* @param uriFiles The uri array of urifiles
* @param uriArchives the uri array of uri archives
*/
public static boolean checkURIs(URI[] uriFiles, URI[] uriArchives) {
if ((uriFiles == null) && (uriArchives == null)){
return true;
}
// check if fragment is null for any uri
// also check if there are any conflicts in fragment names
Set<String> fragments = new HashSet<String>();
// iterate over file uris
if (uriFiles != null) {
for (int i = 0; i < uriFiles.length; i++) {
String fragment = uriFiles[i].getFragment();
if (fragment == null) {
return false;
}
String lowerCaseFragment = fragment.toLowerCase();
if (fragments.contains(lowerCaseFragment)) {
return false;
}
fragments.add(lowerCaseFragment);
}
}
// iterate over archive uris
if (uriArchives != null) {
for (int i = 0; i < uriArchives.length; i++) {
String fragment = uriArchives[i].getFragment();
if (fragment == null) {
return false;
}
String lowerCaseFragment = fragment.toLowerCase();
if (fragments.contains(lowerCaseFragment)) {
return false;
}
fragments.add(lowerCaseFragment);
}
}
return true;
}
}
|
commit fdc3b0490b88162235b6d267cbbae48e2dbe3bc4
Author: Krishna Ramachandran <ramach@yahoo-inc.com>
Date: Thu Feb 17 18:03:39 2011 -0800
Fix Ticket 4337918 - [STRIPING] default name node is viewfs:/// and if we pass
updated YAHOO-CHANGES.txt
+++ b/YAHOO-CHANGES.txt
+ Bug:4337918 Fix [STRIPING] default name node is viewfs:/// if we pass
+ hdfs://path giving error - fix in DistributedCache(ramach)
+
git-svn-id: 7e7a2d564dde2945c3a26e08d1235fcc0cb7aba1@1077793 13f79535-47bb-0310-9956-ffa450edef68
|
src/mapred/org/apache/hadoop/filecache/DistributedCache.java
|
commit fdc3b0490b88162235b6d267cbbae48e2dbe3bc4 Author: Krishna Ramachandran <ramach@yahoo-inc.com> Date: Thu Feb 17 18:03:39 2011 -0800
|
|
Java
|
apache-2.0
|
8d93dcf63355e5b8b61c634b0ae67fc96eb7de6c
| 0
|
osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi,osgi/osgi
|
package org.osgi.service.enocean;
/**
* This interface represents an EnOcean Host, a device that offers EnOcean
* networking features.
*
* @version 1.0
* @author $Id$
*/
public interface EnOceanHost {
/**
* repeater level to disable repeating; this is the default.
*/
public static int REPEATER_LEVEL_OFF = 0;
/**
* repeater level to repeat every telegram at most once.
*/
public static int REPEATER_LEVEL_ONE = 1;
/**
* repeater level to repeat every telegram at most twice.
*/
public static int REPEATER_LEVEL_TWO = 2;
public static int FLASH_TYPE_FLASH = 0;
public static int FLASH_TYPE_RAM_0 = 1;
public static int FLASH_TYPE_DATA = 2;
public static int FLASH_TYPE_IDATA = 3;
public static int FLASH_TYPE_XDATA = 4;
/**
* A sub-interface that is used to access the chip's filter values.
*
* @author $Id$
*/
public interface EnOceanHostFilter {
/**
* The type of the filter (chip, rorg, dbm)
* @return
*/
public int getType();
/**
* The value of the filter (a chip id, a particular rorg, a power level)
* @return
*/
public int getValue();
/**
* The status of this filter; is it enabled or disabled ?
* @return
*/
public boolean isEnabled();
}
/**
* Reset the EnOcean Host (cf. ESP3 command 0x02: C0_WR_RESET)
*/
public void reset() throws EnOceanException;
/**
* Returns the chip's application version info (cf. ESP3 command 0x03: C0_RD_VERSION)
* @return a String object containing the application version info.
*/
public String appVersion() throws EnOceanException;
/**
* Returns the chip's API version info (cf. ESP3 command 0x03: C0_RD_VERSION)
* @return a String object containing the API version info.
*/
public String apiVersion() throws EnOceanException;
/**
* Gets the BASE_ID of the chip, if set (cf. ESP3 command 0x08: C0_RD_IDBASE)
* @return the BASE_ID of the device as defined in EnOcean specification
*/
public int getBaseID() throws EnOceanException;
/**
* Sets the base ID of the device, may be used up to 10 times (cf. ESP3 command 0x07: C0_WR_IDBASE)
* @param baseID to be set.
*/
public void setBaseID(int baseID) throws EnOceanException;
/**
* Sets the repeater level on the host (cf. ESP3 command 0x09: C0_WR_REPEATER)
* @param level one value in REPEATER_LEVEL_XXX
*/
public void setRepeaterLevel(int level) throws EnOceanException;
/**
* Gets the current repeater level of the host (cf. ESP3 command 0x0A: C0_RD_REPEATER)
* @return REPEATER_LEVEL_XXX depending on the repeater level.
*/
public int getRepeaterLevel() throws EnOceanException;
/**
* Retrieves the SENDER_ID associated with the given servicePID, if existing on this chip.
* @param servicePID
* @return
*/
public int getSenderId(String servicePID);
}
|
org.osgi.service.enocean/src/org/osgi/service/enocean/EnOceanHost.java
|
package org.osgi.service.enocean;
/**
* This interface represents an EnOcean Host, a device that offers EnOcean
* networking features.
*
* @version 1.0
* @author $Id$
*/
public interface EnOceanHost {
/**
* repeater level to disable repeating; this is the default.
*/
public static int REPEATER_LEVEL_OFF = 0;
/**
* repeater level to repeat every telegram at most once.
*/
public static int REPEATER_LEVEL_ONE = 1;
/**
* repeater level to repeat every telegram at most twice.
*/
public static int REPEATER_LEVEL_TWO = 2;
public static int FLASH_TYPE_FLASH = 0;
public static int FLASH_TYPE_RAM_0 = 1;
public static int FLASH_TYPE_DATA = 2;
public static int FLASH_TYPE_IDATA = 3;
public static int FLASH_TYPE_XDATA = 4;
/**
* A sub-interface that is used to access the chip's filter values.
*
* @author $Id$
*/
public interface EnOceanHostFilter {
/**
* The type of the filter (chip, rorg, dbm)
* @return
*/
public int getType();
/**
* The value of the filter (a chip id, a particular rorg, a power level)
* @return
*/
public int getValue();
/**
* The status of this filter; is it enabled or disabled ?
* @return
*/
public boolean isEnabled();
}
/**
* Reset the EnOcean Host (cf. ESP3 command 0x02: C0_WR_RESET)
*/
public void reset() throws EnOceanException;
/**
* Returns the chip's application version info (cf. ESP3 command 0x03: C0_RD_VERSION)
* @return a String object containing the application version info.
*/
public String appVersion() throws EnOceanException;
/**
* Returns the chip's API version info (cf. ESP3 command 0x03: C0_RD_VERSION)
* @return a String object containing the API version info.
*/
public String apiVersion() throws EnOceanException;
/**
* Gets the BASE_ID of the chip, if set (cf. ESP3 command 0x08: C0_RD_IDBASE)
* @return the BASE_ID of the device as defined in EnOcean specification
*/
public int getBaseID() throws EnOceanException;
/**
* Sets the base ID of the device, may be used up to 10 times (cf. ESP3 command 0x07: C0_WR_IDBASE)
* @param baseID to be set.
*/
public void setBaseID(int baseID) throws EnOceanException;
/**
* Sets the repeater level on the host (cf. ESP3 command 0x09: C0_WR_REPEATER)
* @param level one value in REPEATER_LEVEL_XXX
*/
public void setRepeaterLevel(int level) throws EnOceanException;
/**
* Gets the current repeater level of the host (cf. ESP3 command 0x0A: C0_RD_REPEATER)
* @return REPEATER_LEVEL_XXX depending on the repeater level.
*/
public int getRepeaterLevel() throws EnOceanException;
}
|
EnOceanHost: add getSenderID method
|
org.osgi.service.enocean/src/org/osgi/service/enocean/EnOceanHost.java
|
EnOceanHost: add getSenderID method
|
|
Java
|
apache-2.0
|
64bb795da430206c1f0d5e2744653b2cdff2b729
| 0
|
akalberer/deep-runtime-library,ursgraf/runtime-library,deepjava/runtime-library,AaronLeander/runtime-library
|
package ch.ntb.inf.deep.runtime.mpc555;
import ch.ntb.inf.deep.unsafe.*;
/* changes:
* 11.11.10 NTB/Urs Graf creation
*/
public class Kernel implements ntbMpc555HB {
static int loopAddr;
static int cmdAddr;
private static void loop() { // endless loop
while (true) {
if (cmdAddr != -1) {
US.PUTSPR(LR, cmdAddr);
US.ASM("bclrl always, 0");
cmdAddr = -1;
}
}
}
/**
* @return system time in us
*/
public static long time() {
long time = (long)US.GETSPR(TBUread) << 32;
time |= US.GETSPR(TBLread) & 0xffffffffL;
return time;
}
/**
* blinks LED on MPIOSM pin 15, nTimes with approx. 100us high time and 100us low time, blocks for 1s
*/
public static void blink(int nTimes) {
US.PUT2(MPIOSMDDR, US.GET2(MPIOSMDDR) | 0x8000);
int delay = 200000;
for (int i = 0; i < nTimes; i++) {
US.PUT2(MPIOSMDR, US.GET2(MPIOSMDR) | 0x8000);
for (int k = 0; k < delay; k++);
US.PUT2(MPIOSMDR, US.GET2(MPIOSMDR) & ~0x8000);
for (int k = 0; k < delay; k++);
}
for (int k = 0; k < (10 * delay + nTimes * 2 * delay); k++);
// for (int k = 0; k < (2000000); k++);
}
private static short FCS(int begin, int end) {
return 0;
}
private static void boot() {
// blink(1);
US.PUT4(SIUMCR, 0x00040000); // internal arb., no data show cycles, BDM operation, CS functions,
// output FREEZE, no lock, use data & address bus, use as RSTCONF, no reserv. logic
US.PUT4(PLPRCR, 0x00900000); // MF = 9, 40MHz operation with 4MHz quarz
int reg;
do reg = US.GET4(PLPRCR); while ((reg & (1 << 16)) == 0); // wait for PLL to lock
US.PUT4(UMCR, 0); // enable IMB clock, no int. multiplexing, full speed
US.PUTSPR(158, 0x800); // take out of serialized mode
US.PUTSPR(638, 0x800); // enable internal flash
// configure CS for external Flash
US.PUT4(BR0, 0x01000003); // chip select base address reg external Flash,
// base address = 1000000H, 32 bit port, no write protect, WE activ, no burst accesses, valid
US.PUT4(OR0, 0x0ffc00020); // address mask = 4MB, adress type mask = 0,
// CS normal timing, CS and addr. same timing, 2 wait states
// configure CS for external RAM
US.PUT4(BR1, 0x00800003); // chip select base address reg external RAM,
// base address = 800000H, 32 bit port, no write protect, WE activ, no burst accesses, valid
US.PUT4(OR1, 0x0ffe00020); //address mask = 2MB, adress type mask = 0,
// CS normal timing, CS and addr. same timing, 2 wait states
US.PUT2(PDMCR, 0); // configure pads, slow slew rate, enable pull-ups
US.PUT4(SCCR, 0x081210300); // enable clock out and engineering clock, EECLK = 10MHz
US.PUT2(TBSCR, 1); // time base, no interrupts, stop time base while freeze, enable
short reset = US.GET2(RSR);
if ((reset & (1<<5 | 1<<15)) != 0) { // boot from flash
US.PUT4(SYPCR, 0); // noch korrigieren
/* SYS.PUT4(DMBR, pDMBRRom);
SYS.PUT4(DMOR, pDMOR);
SYS.GET(sysTabAdrRom + stoSysTabSize, sysTabSize);
SYS.MOVE(sysTabAdrRom, sysTabAdr, sysTabSize)*/
}
// SetFPSCR;
int classConstOffset = US.GET4(sysTabBaseAddr) * 4 + 4;
int state = 0;
int kernelClinitAddr = US.GET4(sysTabBaseAddr + stKernelClinitAddr);
while (true) {
// get addresses of classes from system table
int constBlkBase = US.GET4(sysTabBaseAddr + classConstOffset);
if (constBlkBase == 0) break;
// check integrity of constant block for each class
int constBlkSize = US.GET4(constBlkBase);
if (FCS(constBlkBase, constBlkBase + constBlkSize) != 0) while(true) blink(1);
// check integrity of code block for each class
int codeBase = US.GET4(constBlkBase + cblkCodeBaseOffset);
int codeSize = US.GET4(constBlkBase + cblkCodeSizeOffset);
if (FCS(codeBase, codeBase + codeSize) != 0) while(true) blink(2);
// initialize class variables
int varBase = US.GET4(constBlkBase + cblkVarBaseOffset);
int varSize = US.GET4(constBlkBase + cblkVarSizeOffset);
int begin = varBase;
int end = varBase + varSize;
while (begin < end) {US.PUT4(begin, 0); begin += 4;}
state++;
classConstOffset += 4;
}
classConstOffset = US.GET4(sysTabBaseAddr) * 4 + 4;
while (true) {
// get addresses of classes from system table
int constBlkBase = US.GET4(sysTabBaseAddr + classConstOffset);
if (constBlkBase == 0) break;
// initialize classes
int clinitAddr = US.GET4(constBlkBase + cblkClinitAddrOffset);
if (clinitAddr != -1) {
if (clinitAddr != kernelClinitAddr) { // skip kernel
US.PUTSPR(LR, clinitAddr);
US.ASM("bclrl always, 0");
} else { // kernel
loopAddr = US.ADR_OF_METHOD("ch/ntb/inf/deep/runtime/mpc555/Kernel/loop");
}
}
// the direct call to clinitAddr destroys volatile registers, hence make sure
// the variable classConstOffset is forced into nonvolatile register
// this is done by call to empty()
empty();
classConstOffset += 4;
}
}
private static void empty() {
}
static {
boot();
cmdAddr = -1; // must be after class variables are zeroed by boot
US.ASM("mtspr EIE, r0");
US.PUTSPR(LR, loopAddr);
US.ASM("bclrl always, 0");
}
}
|
src/ch/ntb/inf/deep/runtime/mpc555/Kernel.java
|
package ch.ntb.inf.deep.runtime.mpc555;
import ch.ntb.inf.deep.unsafe.*;
/* changes:
* 11.11.10 NTB/Urs Graf creation
*/
public class Kernel implements ntbMpc555HB {
static int loopAddr;
static int cmdAddr;
private static void loop() { // endless loop
while (true) {
if (cmdAddr != -1) {
US.PUTSPR(LR, cmdAddr);
US.ASM("bclrl always, 0");
cmdAddr = -1;
}
}
}
/**
* @return system time in us
*/
public static long time() {
long time = (long)US.GETSPR(TBUread) << 32;
time |= US.GETSPR(TBLread) & 0xffffffffL;
return time;
}
/**
* blinks LED on MPIOSM pin 15, nTimes with approx. 100us high time and 100us low time, blocks for 1s
*/
public static void blink(int nTimes) {
US.PUT2(MPIOSMDDR, US.GET2(MPIOSMDDR) | 0x8000);
int delay = 200000;
for (int i = 0; i < nTimes; i++) {
US.PUT2(MPIOSMDR, US.GET2(MPIOSMDR) | 0x8000);
for (int k = 0; k < delay; k++);
US.PUT2(MPIOSMDR, US.GET2(MPIOSMDR) & ~0x8000);
for (int k = 0; k < delay; k++);
}
for (int k = 0; k < (10 * delay + nTimes * 2 * delay); k++);
// for (int k = 0; k < (2000000); k++);
}
private static short FCS(int begin, int end) {
return 0;
}
private static void boot() {
// blink(1);
US.PUT4(SIUMCR, 0x00040000); // internal arb., no data show cycles, BDM operation, CS functions,
// output FREEZE, no lock, use data & address bus, use as RSTCONF, no reserv. logic
US.PUT4(PLPRCR, 0x00900000); // MF = 9, 40MHz operation with 4MHz quarz
int reg;
do reg = US.GET4(PLPRCR); while ((reg & (1 << 16)) == 0); // wait for PLL to lock
US.PUT4(UMCR, 0); // enable IMB clock, no int. multiplexing, full speed
US.PUTSPR(158, 0x800); // take out of serialized mode
US.PUTSPR(638, 0x800); // enable internal flash
// configure CS for external Flash
US.PUT4(BR0, 0x01000003); // chip select base address reg external Flash,
// base address = 1000000H, 32 bit port, no write protect, WE activ, no burst accesses, valid
US.PUT4(OR0, 0x0ffc00020); // address mask = 4MB, adress type mask = 0,
// CS normal timing, CS and addr. same timing, 2 wait states
// configure CS for external RAM
US.PUT4(BR1, 0x00800003); // chip select base address reg external RAM,
// base address = 800000H, 32 bit port, no write protect, WE activ, no burst accesses, valid
US.PUT4(OR1, 0x0ffe00020); //address mask = 2MB, adress type mask = 0,
// CS normal timing, CS and addr. same timing, 2 wait states
US.PUT2(PDMCR, 0); // configure pads, slow slew rate, enable pull-ups
US.PUT4(SCCR, 0x081210300); // enable clock out and engineering clock, EECLK = 10MHz
US.PUT2(TBSCR, 1); // time base, no interrupts, stop time base while freeze, enable
short reset = US.GET2(RSR);
if ((reset & (1<<5 | 1<<15)) != 0) { // boot from flash
US.PUT4(SYPCR, 0); // noch korrigieren
/* SYS.PUT4(DMBR, pDMBRRom);
SYS.PUT4(DMOR, pDMOR);
SYS.GET(sysTabAdrRom + stoSysTabSize, sysTabSize);
SYS.MOVE(sysTabAdrRom, sysTabAdr, sysTabSize)*/
}
// SetFPSCR;
int classConstOffset = US.GET4(sysTabBaseAddr) * 4 + 4;
int state = 0;
int kernelClinitAddr = US.GET4(sysTabBaseAddr + stKernelClinitAddr);
while (true) {
// !!! Kernel needs to call at least one method in this loop, as the variables in boot
// must go into nonvolatile registers
// get addresses of classes from system table
int constBlkBase = US.GET4(sysTabBaseAddr + classConstOffset);
if (constBlkBase == 0) break;
// check integrity of constant block for each class
int constBlkSize = US.GET4(constBlkBase);
if (FCS(constBlkBase, constBlkBase + constBlkSize) != 0) while(true) blink(1);
// check integrity of code block for each class
int codeBase = US.GET4(constBlkBase + cblkCodeBaseOffset);
int codeSize = US.GET4(constBlkBase + cblkCodeSizeOffset);
if (FCS(codeBase, codeBase + codeSize) != 0) while(true) blink(2);
// initialize class variables
int varBase = US.GET4(constBlkBase + cblkVarBaseOffset);
int varSize = US.GET4(constBlkBase + cblkVarSizeOffset);
int begin = varBase;
int end = varBase + varSize;
while (begin < end) {US.PUT4(begin, 0); begin += 4;}
// initialize classes
int clinitAddr = US.GET4(constBlkBase + cblkClinitAddrOffset);
if (clinitAddr != -1) {
if (clinitAddr != kernelClinitAddr) { // skip kernel
US.PUTSPR(LR, clinitAddr);
US.ASM("bclrl always, 0");
} else { // kernel
loopAddr = US.ADR_OF_METHOD("ch/ntb/inf/deep/runtime/mpc555/Kernel/loop");
}
}
state++;
classConstOffset += 4;
}
}
static {
boot();
cmdAddr = -1; // must be after class variables are zeroed by boot
US.ASM("mtspr EIE, r0");
US.PUTSPR(LR, loopAddr);
US.ASM("bclrl always, 0");
}
}
|
init of classes changed
|
src/ch/ntb/inf/deep/runtime/mpc555/Kernel.java
|
init of classes changed
|
|
Java
|
apache-2.0
|
e030047844cfb933718397cb62e4758db6674097
| 0
|
jvd10/apollo,ApolloDev/apollo,ApolloDev/apollo,ApolloDev/apollo,jvd10/apollo,jvd10/apollo,jvd10/apollo,ApolloDev/apollo
|
package edu.pitt.apollo.db;
//import edu.pitt.apollo.types.UrlOutputResource;
//import edu.pitt.apollo.types.VisualizerResult;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.codehaus.jackson.JsonGenerator;
import org.codehaus.jackson.map.ObjectMapper;
import com.mysql.jdbc.Statement;
import edu.pitt.apollo.types.v2_0_1.ApolloSoftwareTypeEnum;
import edu.pitt.apollo.types.v2_0_1.Authentication;
import edu.pitt.apollo.types.v2_0_1.Role;
import edu.pitt.apollo.types.v2_0_1.RunSimulationMessage;
import edu.pitt.apollo.types.v2_0_1.RunVisualizationMessage;
import edu.pitt.apollo.types.v2_0_1.ServiceRegistrationRecord;
import edu.pitt.apollo.types.v2_0_1.SoftwareIdentification;
/**
*
* Author: Nick Millett Email: nick.millett@gmail.com Date: May 17, 2013 Time:
* 4:35:10 PM Class: DbUtils IDE: NetBeans 6.9.1
*/
public class ApolloDbUtils {
Connection dbcon = null;
Properties properties;
// public final int RECORD_NOT_FOUND = -1;
// public final int PASSWORD_NOT_CORRECT = -2;
// public final int RECORD_ALREADY_EXISTS = -3;
// public final int SOFTWARE_ID_RECORD_NOT_FOUND = -4;
// public final int KEY_NOT_FOUND = -5;
// public final int RECORD_NOT_INSERTED = -6;
public enum DbContentDataFormatEnum {
TEXT, URL, ZIP,
};
public enum DbContentDataType {
CONFIGURATION_FILE
};
public ApolloDbUtils(File databasePropertiesFile) throws IOException {
FileInputStream fis = new FileInputStream(databasePropertiesFile);
properties = new Properties();
properties.load(fis);
fis.close();
}
public String getSoftwareIdAsDbString(SoftwareIdentification softwareIdentification) {
return softwareIdentification.getSoftwareDeveloper() + "#" + softwareIdentification.getSoftwareName() + "#"
+ softwareIdentification.getSoftwareVersion();
}
private void establishDbConn() throws ClassNotFoundException, SQLException {
String dbClass = properties.getProperty("class");
String url = properties.getProperty("url");
String user = properties.getProperty("user");
String password = properties.getProperty("password");
try {
if (dbcon != null) {
dbcon.close();
}
} catch (SQLException e) {
// who cares, making a new one anyway
}
dbcon = null;
try {
Class.forName(dbClass);
dbcon = DriverManager.getConnection(url, user, password);
} catch (SQLException e) {
throw new SQLException("Error getting connection to database: " + url + " using username " + user
+ ". Specific error was:\n" + e.getMessage());
}
}
private ByteArrayOutputStream getJsonBytes(Object obj) {
try {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
mapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
mapper.writeValue(baos, obj);
return baos;
} catch (IOException ex) {
System.err.println("IO Exception JSON encoding and getting bytes from RunSimulationMessage");
return null;
}
}
public Connection getConn() throws ClassNotFoundException, SQLException {
if (dbcon == null) {
establishDbConn();
} else {
boolean connIsValid = false;
try {
connIsValid = dbcon.isValid(1000);
} catch (SQLException e1) {
// who cares, we are making a new one anyway!
}
if (!connIsValid) {
establishDbConn();
}
}
return dbcon;
}
// public Map<String, String> getStoredRuns() throws SQLException,
// ClassNotFoundException {
// try {
// String query = "SELECT LABEL,MD5HASHOFCONFIGURATIONFILE from run";
// PreparedStatement pstmt = getConn().prepareStatement(query);
// ResultSet rs = pstmt.executeQuery();
//
// Map<String, String> hashLabelMap = new HashMap<String, String>();
// while (rs.next()) {
// String hash = rs.getString("md5HashOfConfigurationFile");
// String label = rs.getString("label");
// if (label != null) {
// hashLabelMap.put(label, hash);
// }
// }
// return hashLabelMap;
// } catch (SQLException e) {
// throw new
// SQLException("Error retreiving all stored run hashes. Specific error was:\n"
// + e.getMessage());
// }
// }
public int getUserKey(String requesterId, String requesterPassword) throws SQLException, ClassNotFoundException,
ApolloDatabaseUserPasswordException, ApolloDatabaseKeyNotFoundException {
String query = "SELECT id, requester_password FROM users WHERE requester_id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, requesterId);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
String password = rs.getString("requester_password");
if (requesterPassword.equals(password)) {
return rs.getInt("id");
} else {
throw new ApolloDatabaseUserPasswordException("No entry in the users table where requester_id = " + requesterId
+ " for the given password.");
}
} else {
throw new ApolloDatabaseKeyNotFoundException("No entry in the users table where requester_id = " + requesterId);
}
}
// user key doesn't exist
public int addUser(String requesterId, String requesterPassword, String email) throws SQLException, ClassNotFoundException,
ApolloDatabaseRecordAlreadyExistsException {
// check authorization?!
try {
int userKey = getUserKey(requesterId, requesterPassword);
throw new ApolloDatabaseRecordAlreadyExistsException("User " + requesterId + " already exists in the database.");
} catch (ApolloDatabaseKeyNotFoundException e) {
// good this means the user doesn't already exist
} catch (ApolloDatabaseUserPasswordException e) {
// error
}
String query = "INSERT INTO users (requester_id,requester_password,requester_email) VALUES (?,?,?)";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, requesterId);
pstmt.setString(2, requesterPassword);
pstmt.setString(3, email);
pstmt.execute();
query = "SELECT LAST_INSERT_ID()";
pstmt = getConn().prepareStatement(query);
ResultSet rs = pstmt.executeQuery();
rs.next();
return rs.getInt(1);
}
public int getSoftwareIdentificationKey(SoftwareIdentification softwareIdentification) throws SQLException,
ClassNotFoundException, ApolloDatabaseKeyNotFoundException {
String query = "SELECT id FROM software_identification where developer = ? and name = ? and version = ? and service_type = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, softwareIdentification.getSoftwareDeveloper());
pstmt.setString(2, softwareIdentification.getSoftwareName());
pstmt.setString(3, softwareIdentification.getSoftwareVersion());
pstmt.setString(4, softwareIdentification.getSoftwareType().toString());
ResultSet rs = pstmt.executeQuery();
int softwareIdKey = -1;
if (rs.next()) {
softwareIdKey = rs.getInt(1);
return softwareIdKey;
} else {
throw new ApolloDatabaseKeyNotFoundException("No entry in the software_identification table where developer = "
+ softwareIdentification.getSoftwareDeveloper() + " and name = " + softwareIdentification.getSoftwareName()
+ " and version = " + softwareIdentification.getSoftwareVersion() + " and service_type = "
+ softwareIdentification.getSoftwareType().toString());
}
}
private String getMd5(Object object) {
return DigestUtils.md5Hex(getJsonBytes(object).toString());
}
public int getRunKey(RunSimulationMessage runSimulationMessage) throws SQLException, ClassNotFoundException,
ApolloDatabaseException {
Authentication auth = runSimulationMessage.getAuthentication();
int userKey = getUserKey(auth.getRequesterId(), auth.getRequesterPassword());
int softwareKey = getSoftwareIdentificationKey(runSimulationMessage.getSimulatorIdentification());
String hash = getMd5(runSimulationMessage);
String query = "SELECT id FROM run WHERE md5_hash_of_run_message = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, hash);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
} else {
query = "INSERT INTO run (requester_id, software_id, md5_hash_of_run_message) VALUES (?,?,?)";
pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
pstmt.setInt(1, userKey);
pstmt.setInt(2, softwareKey);
pstmt.setString(3, hash);
pstmt.execute();
rs = pstmt.getGeneratedKeys();
rs.next();
return rs.getInt(1);
}
}
public int addRole(SoftwareIdentification softwareIdentification, boolean canRun, boolean canViewCache) throws SQLException,
ClassNotFoundException, ApolloDatabaseException {
int softwareIdKey;
try {
softwareIdKey = getSoftwareIdentificationKey(softwareIdentification);
} catch (ApolloDatabaseKeyNotFoundException e) {
throw new ApolloDatabaseKeyNotFoundException(
"The softwareIdentifiation object provided to addRole() does not have an entry in the software_identification table. Error was: "
+ e.getMessage());
}
int roleKey;
try {
roleKey = getRoleKey(softwareIdKey, canRun, canViewCache);
return roleKey;
} catch (ApolloDatabaseKeyNotFoundException e) {
// this means that we need to insert
}
String query = "INSERT INTO ROLES (software_id, can_run, can_view_cached_results) values (?, ?, ?)";
PreparedStatement pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
try {
pstmt.setInt(1, softwareIdKey);
pstmt.setBoolean(2, canRun);
pstmt.setBoolean(3, canViewCache);
pstmt.execute();
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next()) {
roleKey = rs.getInt(1);
return roleKey;
} else {
throw new ApolloDatabaseException("No primary key returned from addRole().");
}
} finally {
pstmt.close();
}
}
private int getRoleKey(int softwareIdKey, boolean canRun, boolean canViewCache) throws SQLException, ClassNotFoundException,
ApolloDatabaseKeyNotFoundException {
if (softwareIdKey >= 1) {
// software id found...now lets see if this specific role exists...
String query = "SELECT id FROM roles WHERE software_id = ? AND can_run = ? AND can_view_cached_results = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
try {
pstmt.setInt(1, softwareIdKey);
pstmt.setBoolean(2, canRun);
pstmt.setBoolean(3, canViewCache);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
throw new ApolloDatabaseKeyNotFoundException("No entry found in the roles table where software_id = "
+ softwareIdKey + " and can_run = " + canRun + " and can_view_cached_resuls = " + canViewCache);
} else {
return rs.getInt(1);
}
} finally {
pstmt.close();
}
} else {
throw new ApolloDatabaseKeyNotFoundException("getRoleKey() called with invalid softwareIdKey: " + softwareIdKey);
}
}
public Map<Integer, ServiceRegistrationRecord> getRegisteredSoftware() throws SQLException, ClassNotFoundException {
Map<Integer, ServiceRegistrationRecord> result = new HashMap<Integer, ServiceRegistrationRecord>();
// get all of the users that are an admin of a software
String query = "SELECT u.id, u.requester_id FROM users u, software_identification s WHERE " + "s.admin_id = u.id";
Map<Integer, String> userIdMap = new HashMap<Integer, String>();
PreparedStatement pstmt = getConn().prepareStatement(query);
try {
ResultSet rs = pstmt.executeQuery();
while (rs.next()) {
int userId = rs.getInt(1);
String requester_id = rs.getString(2);
userIdMap.put(userId, requester_id);
}
} finally {
pstmt.close();
}
query = "SELECT id, developer, name, version, service_type, wsdl_url, admin_id FROM software_identification";
pstmt = getConn().prepareStatement(query);
try {
ResultSet rs = pstmt.executeQuery();
while (rs.next()) {
ServiceRegistrationRecord srr = new ServiceRegistrationRecord();
srr.setSoftwareIdentification(new SoftwareIdentification());
srr.setAuthentication(new Authentication());
int id = rs.getInt(1);
srr.getSoftwareIdentification().setSoftwareDeveloper(rs.getString(2));
srr.getSoftwareIdentification().setSoftwareName(rs.getString(3));
srr.getSoftwareIdentification().setSoftwareType(ApolloSoftwareTypeEnum.valueOf(rs.getString(4)));
srr.setUrl(rs.getString(5));
srr.getAuthentication().setRequesterId(userIdMap.get(rs.getInt(6)));
srr.getAuthentication().setRequesterPassword("");
result.put(id, srr);
}
} finally {
pstmt.close();
}
return result;
}
// okay so user A can see...?
private List<Role> getUserRoles(int userId) throws SQLException, ClassNotFoundException {
// List<Role> roles = new ArrayList<Role>();
//
// String query =
// "SELECT r.software_id, r.can_run, r.can_view_cached_results from roles r, user_roles ur where "
// + "r.id = ur.role_id AND ur.user_id = ?";
// PreparedStatement pstmt = getConn().prepareStatement(query);
// try {
// pstmt.setInt(1, userId);
// ResultSet rs = pstmt.executeQuery();
// while (rs.next()) {
// Role r = new Role();
// r.setSoftwareIdentification(getSoftwareIdentification(rs.getInt(1)));
// r.setCanRun(rs.getBoolean(2));
// r.setCanViewCachedResults(rs.getBoolean(3));
// roles.add(r);
// }
// } finally {
// pstmt.close();
// }
// return roles;
return null;
}
private int addUserRole(int userId, int roleId) {
return 0;
}
public int addTextDataContent(InputStream content) throws SQLException, ClassNotFoundException, IOException {
return addTextDataContent(IOUtils.toString(content));
}
public int addTextDataContent(String content) throws SQLException, ClassNotFoundException {
String md5 = DigestUtils.md5Hex(content);
// is the data already in the table?
String query = "SELECT id FROM run_data_content where md5_hash_of_content = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, md5);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
// no need to store the data twice
return rs.getInt(1);
} else {
pstmt.close();
// store it
query = "INSERT INTO run_data_content (text_content, md5_hash_of_content) values (?,?)";
pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
try {
pstmt.setString(1, content);
pstmt.setString(2, md5);
pstmt.execute();
rs = pstmt.getGeneratedKeys();
rs.next();
return rs.getInt(1);
} finally {
pstmt.close();
}
}
}
public int associateContentWithRunId(int runKey, int dataContentKey, DbContentDataFormatEnum dataFormat, String dataLabel,
DbContentDataType dataType, String dataSourceSoftware, String dataDestinationSoftware) throws ClassNotFoundException,
SQLException, ApolloDatabaseKeyNotFoundException {
int runDataDescriptionId;
try {
runDataDescriptionId = getRunDataDescriptionId(dataFormat, dataLabel, dataType, dataSourceSoftware,
dataDestinationSoftware);
} catch (ApolloDatabaseKeyNotFoundException e) {
throw new ApolloDatabaseKeyNotFoundException(
"Unable to associate data content with run id. The runDataDescriptionId was not found. Error was: "
+ e.getMessage());
}
return associateContentWithRunId(runKey, dataContentKey, runDataDescriptionId);
}
public int associateContentWithRunId(int runKey, int dataContentKey, int runDataDescriptionId) throws ClassNotFoundException,
SQLException, ApolloDatabaseKeyNotFoundException {
if (runDataDescriptionId >= 0) {
String query = "SELECT id FROM run_data WHERE run_id = ? AND description_id = ? and content_id = ?";
PreparedStatement pstmt = null;
try {
pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, runKey);
pstmt.setInt(2, runDataDescriptionId);
pstmt.setInt(3, dataContentKey);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
}
} finally {
pstmt.close();
}
query = "INSERT INTO run_data (run_id, description_id, content_id) values (?,?,?)";
try {
pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
pstmt.setInt(1, runKey);
pstmt.setInt(2, runDataDescriptionId);
pstmt.setInt(3, dataContentKey);
pstmt.execute();
ResultSet rs = pstmt.getGeneratedKeys();
rs.next();
return rs.getInt(1);
} finally {
pstmt.close();
}
} else {
throw new ApolloDatabaseKeyNotFoundException("associateContentWithRunId() called with an invalid key: " + runKey);
}
}
public int getRunDataDescriptionId(DbContentDataFormatEnum dataFormat, String dataLabel, DbContentDataType dataType,
String dataSourceSoftware, String dataDestinationSoftware) throws SQLException, ClassNotFoundException,
ApolloDatabaseKeyNotFoundException {
String query = "SELECT v.run_data_description_id FROM run_data_description_view v WHERE "
+ "v.format = ? AND v.label = ? and v.type = ? and v.source_software = ? and v.destination_software = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, dataFormat.toString());
pstmt.setString(2, dataLabel);
pstmt.setString(3, dataType.toString());
pstmt.setString(4, dataSourceSoftware);
pstmt.setString(5, dataDestinationSoftware);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new ApolloDatabaseKeyNotFoundException("No entry found in run_data_description_view where format = "
+ dataFormat.toString() + " and label = " + dataLabel + " and type = " + dataType.toString()
+ " and source_software = " + dataSourceSoftware + " and destination_software = " + dataDestinationSoftware);
}
}
public int addRunDataDescription(String description, String dataFormat, String dataLabel, String dataType,
String dataSourceSoftware, String dataDestinationSoftware) throws SQLException, ClassNotFoundException {
int runDataDescriptionKey = -1;
String query = "INSERT INTO run_data_description SET label = ?";
PreparedStatement pstmt = getConn().prepareStatement(query, Statement.NO_GENERATED_KEYS);
try {
pstmt.setString(1, description);
pstmt.execute();
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next()) {
runDataDescriptionKey = rs.getInt(1);
}
query = "INSERT INTO run_data_description_axis_value (run_data_description_id, run_data_description_axis_id, value) values (?,?,?)";
pstmt.setInt(1, runDataDescriptionKey);
// pstmt.setIn
// not done yet
return -1;
} catch (Exception e) {
}
return -1;
}
public SoftwareIdentification getSoftwareIdentification(int i) throws ApolloDatabaseKeyNotFoundException, SQLException,
ClassNotFoundException {
String query = "SELECT developer, name, version, service_type FROM software_identification WHERE " + "id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, i);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
SoftwareIdentification softwareIdentification = new SoftwareIdentification();
softwareIdentification.setSoftwareDeveloper(rs.getString(1));
softwareIdentification.setSoftwareName(rs.getString(2));
softwareIdentification.setSoftwareVersion(rs.getString(3));
softwareIdentification.setSoftwareType(ApolloSoftwareTypeEnum.fromValue(rs.getString(4)));
return softwareIdentification;
} else {
throw new ApolloDatabaseKeyNotFoundException("No entry found in software_identification where id = " + i);
}
}
public String getUrlForSoftwareIdentification(SoftwareIdentification softwareIdentification) throws SQLException,
ApolloDatabaseKeyNotFoundException, ClassNotFoundException {
String query = "SELECT wsdl_url FROM software_identification WHERE developer = ? and name = ? and version = ? and service_type = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, softwareIdentification.getSoftwareDeveloper());
pstmt.setString(2, softwareIdentification.getSoftwareName());
pstmt.setString(3, softwareIdentification.getSoftwareVersion());
pstmt.setString(4, softwareIdentification.getSoftwareType().toString());
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getString(1);
} else {
throw new ApolloDatabaseKeyNotFoundException("Unable to get wsdl_url from software_identification where developer = "
+ softwareIdentification.getSoftwareDeveloper() + " and name = " + softwareIdentification.getSoftwareName()
+ " and version = " + softwareIdentification.getSoftwareVersion() + " and service_type = "
+ softwareIdentification.getSoftwareType().toString());
}
}
public String getUrlForSoftwareIdentification(int softwareIdentificaitonKey) throws ApolloDatabaseKeyNotFoundException,
SQLException, ClassNotFoundException {
String query = "SELECT wsdl_url FROM software_identification WHERE id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, softwareIdentificaitonKey);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getString(1);
} else {
throw new ApolloDatabaseKeyNotFoundException(
"Unable to get wsdl_url from software_identification where software_identification.id = "
+ softwareIdentificaitonKey);
}
}
// get files
public int addSimulationRun(RunSimulationMessage runSimulationMessage) throws ApolloDatabaseRecordNotInsertedException,
SQLException, ClassNotFoundException, ApolloDatabaseKeyNotFoundException {
int softwareKey = getSoftwareIdentificationKey(runSimulationMessage.getSimulatorIdentification());
String query = "INSERT INTO run (md5_hash_of_run_message, software_id, requester_id, last_service_to_be_called) VALUES (?, ?, ?, ?)";
PreparedStatement pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
pstmt.setString(1, getMd5(runSimulationMessage));
pstmt.setInt(2, softwareKey);
pstmt.setInt(3, 1);
pstmt.setInt(4, 4); // 4 is translator
pstmt.execute();
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new ApolloDatabaseRecordNotInsertedException("Record not inserted!");
}
}
public int getSimulationRunId(RunSimulationMessage runSimulationMessage) throws ApolloDatabaseKeyNotFoundException,
SQLException, ClassNotFoundException {
int softwareKey = getSoftwareIdentificationKey(runSimulationMessage.getSimulatorIdentification());
String md5Hash = getMd5(runSimulationMessage);
String query = "SELECT id FROM run WHERE md5_hash_of_run_message = ? AND software_id = ? and requester_id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, md5Hash);
pstmt.setInt(2, softwareKey);
pstmt.setInt(3, 1);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new ApolloDatabaseKeyNotFoundException("No id found for simulation run where md5_hash_of_run_message = "
+ md5Hash + " and softare_id = " + softwareKey + " and requester_id = 1");
}
}
/**
*
* @param runId
* @param softwareIdentificationKey
* @return The number of rows that were updated (either 1 or 0).
* @throws SQLException
* @throws ClassNotFoundException
*/
public int updateLastServiceToBeCalledForRun(Integer runId, Integer softwareIdentificationKey) throws SQLException,
ClassNotFoundException {
String query = "UPDATE run SET last_service_to_be_called = ? WHERE id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, softwareIdentificationKey);
pstmt.setInt(2, runId);
return pstmt.executeUpdate();
}
public int updateLastServiceToBeCalledForRun(Integer runId, SoftwareIdentification softwareIdentification)
throws ApolloDatabaseKeyNotFoundException, SQLException, ClassNotFoundException {
int softwareIdentificationKey = getSoftwareIdentificationKey(softwareIdentification);
return updateLastServiceToBeCalledForRun(runId, softwareIdentificationKey);
}
public int getIdOfLastServiceToBeCalledForRun(Integer runId) throws ApolloDatabaseKeyNotFoundException, SQLException,
ClassNotFoundException {
String query = "SELECT last_service_to_be_called FROM run WHERE id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, runId);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new ApolloDatabaseKeyNotFoundException("No last_service_to_be_called found for simulation run where id = "
+ runId);
}
}
public SoftwareIdentification getLastServiceToBeCalledForRun(Integer runId) throws ApolloDatabaseKeyNotFoundException,
SQLException, ClassNotFoundException {
int softwareId = getIdOfLastServiceToBeCalledForRun(runId);
String query = "SELECT developer, name, version, service_type FROM software_identification WHERE id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, softwareId);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
String developer = rs.getString("developer");
String name = rs.getString("name");
String version = rs.getString("version");
String type = rs.getString("service_type");
SoftwareIdentification softwareIdentification = new SoftwareIdentification();
softwareIdentification.setSoftwareDeveloper(developer);
softwareIdentification.setSoftwareName(name);
softwareIdentification.setSoftwareVersion(version);
softwareIdentification.setSoftwareType(ApolloSoftwareTypeEnum.valueOf(type));
return softwareIdentification;
} else {
throw new ApolloDatabaseKeyNotFoundException("No software identification found for id = " + softwareId);
}
}
public int getNewSimulationGroupId() throws SQLException, ClassNotFoundException, ApolloDatabaseRecordNotInsertedException {
String query = "INSERT INTO simulation_groups DEFAULT";
PreparedStatement pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
pstmt.execute();
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new ApolloDatabaseRecordNotInsertedException("Unable to create new simulation group, insert failed.");
}
}
public void addRunIdsToSimulationGroup(int simulationGroupId, List<String> simulationRunIds) throws SQLException,
ClassNotFoundException {
String query = "INSERT INTO simulation_groups_definition (simulation_group_id, run_id) VALUES (?,?)";
PreparedStatement pstmt = getConn().prepareStatement(query);
for (String simulationRunId : simulationRunIds) {
pstmt.setInt(1, simulationGroupId);
pstmt.setInt(2, Integer.valueOf(simulationRunId));
pstmt.execute();
}
}
public int addVisualizationRun(RunVisualizationMessage runVisualizationMessage) throws ApolloDatabaseKeyNotFoundException,
SQLException, ClassNotFoundException, ApolloDatabaseRecordNotInsertedException {
int softwareKey = getSoftwareIdentificationKey(runVisualizationMessage.getVisualizerIdentification());
int simulationGroupId = getNewSimulationGroupId();
addRunIdsToSimulationGroup(simulationGroupId, runVisualizationMessage.getSimulationRunIds());
String query = "INSERT INTO run (md5_hash_of_run_message, software_id, requester_id, last_service_to_be_called, simulation_group_id) VALUES (?, ?, ?, ?)";
PreparedStatement pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
pstmt.setString(1, getMd5(runVisualizationMessage));
pstmt.setInt(2, softwareKey);
pstmt.setInt(3, 1);
pstmt.setInt(4, 4); // 4 is translator
pstmt.setInt(5, simulationGroupId);
pstmt.execute();
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new ApolloDatabaseRecordNotInsertedException("Record not inserted!");
}
}
public void removeRunData(int runId) throws SQLException, ClassNotFoundException {
// need to delete the data content
// find out if there any other runs that reference this data content
String query = "SELECT content_id FROM run_data WHERE run_id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, runId);
ResultSet rs = pstmt.executeQuery();
while (rs.next()) {
int content_id = rs.getInt(1);
String innerQuery = "SELECT content_id FROM run_data WHERE run_id <> ?";
PreparedStatement innerPstmt = getConn().prepareStatement(innerQuery);
innerPstmt.setInt(1, runId);
ResultSet innerRs = innerPstmt.executeQuery();
if (!innerRs.next()) {
// content_id is not used by any other run, delete it!
String deleteQuery = "DELETE FROM run_data_content WHERE id = ?";
PreparedStatement deletePstmt = getConn().prepareStatement(deleteQuery);
deletePstmt.setInt(1, content_id);
deletePstmt.execute();
}
}
query = "DELETE FROM run_data WHERE run_id = ?";
pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, runId);
pstmt.execute();
query = "SELECT simulation_group_id FROM run WHERE id = ?";
pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, runId);
rs = pstmt.executeQuery();
if (rs.next()) {
int simulation_group_id = rs.getInt(1);
if (!rs.wasNull()) {
String innerQuery = "DELETE FROM simulation_groups_definition WHERE simulation_group_id = ?";
pstmt = getConn().prepareStatement(innerQuery);
pstmt.setInt(1, simulation_group_id);
pstmt.execute();
innerQuery = "DELETE FROM simulation_groups WHERE id = ?";
pstmt = getConn().prepareStatement(innerQuery);
pstmt.setInt(1, simulation_group_id);
pstmt.execute();
}
}
query = "DELETE FROM time_series WHERE run_id = ?";
pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, runId);
pstmt.execute();
query = "DELETE FROM run WHERE id = ?";
pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, runId);
pstmt.execute();
}
}
|
apollo-ws/service-skeletons/java/trunk/apollo-db-common/src/main/java/edu/pitt/apollo/db/ApolloDbUtils.java
|
package edu.pitt.apollo.db;
//import edu.pitt.apollo.types.UrlOutputResource;
//import edu.pitt.apollo.types.VisualizerResult;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import org.codehaus.jackson.JsonGenerator;
import org.codehaus.jackson.map.ObjectMapper;
import com.mysql.jdbc.Statement;
import edu.pitt.apollo.types.v2_0_1.ApolloSoftwareTypeEnum;
import edu.pitt.apollo.types.v2_0_1.Authentication;
import edu.pitt.apollo.types.v2_0_1.Role;
import edu.pitt.apollo.types.v2_0_1.RunSimulationMessage;
import edu.pitt.apollo.types.v2_0_1.RunVisualizationMessage;
import edu.pitt.apollo.types.v2_0_1.ServiceRegistrationRecord;
import edu.pitt.apollo.types.v2_0_1.SoftwareIdentification;
import edu.pitt.apollo.types.v2_0_1.UrlOutputResource;
import edu.pitt.apollo.types.v2_0_1.VisualizerResult;
/**
*
* Author: Nick Millett Email: nick.millett@gmail.com Date: May 17, 2013 Time:
* 4:35:10 PM Class: DbUtils IDE: NetBeans 6.9.1
*/
public class ApolloDbUtils {
Connection dbcon = null;
Properties properties;
// public final int RECORD_NOT_FOUND = -1;
// public final int PASSWORD_NOT_CORRECT = -2;
// public final int RECORD_ALREADY_EXISTS = -3;
// public final int SOFTWARE_ID_RECORD_NOT_FOUND = -4;
// public final int KEY_NOT_FOUND = -5;
// public final int RECORD_NOT_INSERTED = -6;
public enum DbContentDataFormatEnum {
TEXT, URL, ZIP,
};
public enum DbContentDataType {
CONFIGURATION_FILE
};
public ApolloDbUtils(File databasePropertiesFile) throws IOException {
FileInputStream fis = new FileInputStream(databasePropertiesFile);
properties = new Properties();
properties.load(fis);
fis.close();
}
public String getSoftwareIdAsDbString(SoftwareIdentification softwareIdentification) {
return softwareIdentification.getSoftwareDeveloper() + "#" + softwareIdentification.getSoftwareName() + "#"
+ softwareIdentification.getSoftwareVersion();
}
private void establishDbConn() throws ClassNotFoundException, SQLException {
String dbClass = properties.getProperty("class");
String url = properties.getProperty("url");
String user = properties.getProperty("user");
String password = properties.getProperty("password");
try {
if (dbcon != null) {
dbcon.close();
}
} catch (SQLException e) {
// who cares, making a new one anyway
}
dbcon = null;
try {
Class.forName(dbClass);
dbcon = DriverManager.getConnection(url, user, password);
} catch (SQLException e) {
throw new SQLException("Error getting connection to database: " + url + " using username " + user
+ ". Specific error was:\n" + e.getMessage());
}
}
private ByteArrayOutputStream getJsonBytes(Object obj) {
try {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
mapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
mapper.writeValue(baos, obj);
return baos;
} catch (IOException ex) {
System.err.println("IO Exception JSON encoding and getting bytes from RunSimulationMessage");
return null;
}
}
public Connection getConn() throws ClassNotFoundException, SQLException {
if (dbcon == null) {
establishDbConn();
} else {
boolean connIsValid = false;
try {
connIsValid = dbcon.isValid(1000);
} catch (SQLException e1) {
// who cares, we are making a new one anyway!
}
if (!connIsValid) {
establishDbConn();
}
}
return dbcon;
}
// public Map<String, String> getStoredRuns() throws SQLException,
// ClassNotFoundException {
// try {
// String query = "SELECT LABEL,MD5HASHOFCONFIGURATIONFILE from run";
// PreparedStatement pstmt = getConn().prepareStatement(query);
// ResultSet rs = pstmt.executeQuery();
//
// Map<String, String> hashLabelMap = new HashMap<String, String>();
// while (rs.next()) {
// String hash = rs.getString("md5HashOfConfigurationFile");
// String label = rs.getString("label");
// if (label != null) {
// hashLabelMap.put(label, hash);
// }
// }
// return hashLabelMap;
// } catch (SQLException e) {
// throw new
// SQLException("Error retreiving all stored run hashes. Specific error was:\n"
// + e.getMessage());
// }
// }
public int getUserKey(String requesterId, String requesterPassword) throws SQLException, ClassNotFoundException,
ApolloDatabaseUserPasswordException, ApolloDatabaseKeyNotFoundException {
String query = "SELECT id, requester_password FROM users WHERE requester_id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, requesterId);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
String password = rs.getString("requester_password");
if (requesterPassword.equals(password)) {
return rs.getInt("id");
} else {
throw new ApolloDatabaseUserPasswordException("No entry in the users table where requester_id = " + requesterId
+ " for the given password.");
}
} else {
throw new ApolloDatabaseKeyNotFoundException("No entry in the users table where requester_id = " + requesterId);
}
}
// user key doesn't exist
public int addUser(String requesterId, String requesterPassword, String email) throws SQLException, ClassNotFoundException,
ApolloDatabaseRecordAlreadyExistsException {
// check authorization?!
try {
int userKey = getUserKey(requesterId, requesterPassword);
throw new ApolloDatabaseRecordAlreadyExistsException("User " + requesterId + " already exists in the database.");
} catch (ApolloDatabaseKeyNotFoundException e) {
// good this means the user doesn't already exist
} catch (ApolloDatabaseUserPasswordException e) {
// error
}
String query = "INSERT INTO users (requester_id,requester_password,requester_email) VALUES (?,?,?)";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, requesterId);
pstmt.setString(2, requesterPassword);
pstmt.setString(3, email);
pstmt.execute();
query = "SELECT LAST_INSERT_ID()";
pstmt = getConn().prepareStatement(query);
ResultSet rs = pstmt.executeQuery();
rs.next();
return rs.getInt(1);
}
public int getSoftwareIdentificationKey(SoftwareIdentification softwareIdentification) throws SQLException,
ClassNotFoundException, ApolloDatabaseKeyNotFoundException {
String query = "SELECT id FROM software_identification where developer = ? and name = ? and version = ? and service_type = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, softwareIdentification.getSoftwareDeveloper());
pstmt.setString(2, softwareIdentification.getSoftwareName());
pstmt.setString(3, softwareIdentification.getSoftwareVersion());
pstmt.setString(4, softwareIdentification.getSoftwareType().toString());
ResultSet rs = pstmt.executeQuery();
int softwareIdKey = -1;
if (rs.next()) {
softwareIdKey = rs.getInt(1);
return softwareIdKey;
} else {
throw new ApolloDatabaseKeyNotFoundException("No entry in the software_identification table where developer = "
+ softwareIdentification.getSoftwareDeveloper() + " and name = " + softwareIdentification.getSoftwareName()
+ " and version = " + softwareIdentification.getSoftwareVersion() + " and service_type = "
+ softwareIdentification.getSoftwareType().toString());
}
}
private String getMd5(Object object) {
return DigestUtils.md5Hex(getJsonBytes(object).toString());
}
public int getRunKey(RunSimulationMessage runSimulationMessage) throws SQLException, ClassNotFoundException,
ApolloDatabaseException {
Authentication auth = runSimulationMessage.getAuthentication();
int userKey = getUserKey(auth.getRequesterId(), auth.getRequesterPassword());
int softwareKey = getSoftwareIdentificationKey(runSimulationMessage.getSimulatorIdentification());
String hash = getMd5(runSimulationMessage);
String query = "SELECT id FROM run WHERE md5_hash_of_run_message = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, hash);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
} else {
query = "INSERT INTO run (requester_id, software_id, md5_hash_of_run_message) VALUES (?,?,?)";
pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
pstmt.setInt(1, userKey);
pstmt.setInt(2, softwareKey);
pstmt.setString(3, hash);
pstmt.execute();
rs = pstmt.getGeneratedKeys();
rs.next();
return rs.getInt(1);
}
}
public int addRole(SoftwareIdentification softwareIdentification, boolean canRun, boolean canViewCache) throws SQLException,
ClassNotFoundException, ApolloDatabaseException {
int softwareIdKey;
try {
softwareIdKey = getSoftwareIdentificationKey(softwareIdentification);
} catch (ApolloDatabaseKeyNotFoundException e) {
throw new ApolloDatabaseKeyNotFoundException(
"The softwareIdentifiation object provided to addRole() does not have an entry in the software_identification table. Error was: "
+ e.getMessage());
}
int roleKey;
try {
roleKey = getRoleKey(softwareIdKey, canRun, canViewCache);
return roleKey;
} catch (ApolloDatabaseKeyNotFoundException e) {
// this means that we need to insert
}
String query = "INSERT INTO ROLES (software_id, can_run, can_view_cached_results) values (?, ?, ?)";
PreparedStatement pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
try {
pstmt.setInt(1, softwareIdKey);
pstmt.setBoolean(2, canRun);
pstmt.setBoolean(3, canViewCache);
pstmt.execute();
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next()) {
roleKey = rs.getInt(1);
return roleKey;
} else {
throw new ApolloDatabaseException("No primary key returned from addRole().");
}
} finally {
pstmt.close();
}
}
private int getRoleKey(int softwareIdKey, boolean canRun, boolean canViewCache) throws SQLException, ClassNotFoundException,
ApolloDatabaseKeyNotFoundException {
if (softwareIdKey >= 1) {
// software id found...now lets see if this specific role exists...
String query = "SELECT id FROM roles WHERE software_id = ? AND can_run = ? AND can_view_cached_results = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
try {
pstmt.setInt(1, softwareIdKey);
pstmt.setBoolean(2, canRun);
pstmt.setBoolean(3, canViewCache);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
throw new ApolloDatabaseKeyNotFoundException("No entry found in the roles table where software_id = "
+ softwareIdKey + " and can_run = " + canRun + " and can_view_cached_resuls = " + canViewCache);
} else {
return rs.getInt(1);
}
} finally {
pstmt.close();
}
} else {
throw new ApolloDatabaseKeyNotFoundException("getRoleKey() called with invalid softwareIdKey: " + softwareIdKey);
}
}
public Map<Integer, ServiceRegistrationRecord> getRegisteredSoftware() throws SQLException, ClassNotFoundException {
Map<Integer, ServiceRegistrationRecord> result = new HashMap<Integer, ServiceRegistrationRecord>();
// get all of the users that are an admin of a software
String query = "SELECT u.id, u.requester_id FROM users u, software_identification s WHERE " + "s.admin_id = u.id";
Map<Integer, String> userIdMap = new HashMap<Integer, String>();
PreparedStatement pstmt = getConn().prepareStatement(query);
try {
ResultSet rs = pstmt.executeQuery();
while (rs.next()) {
int userId = rs.getInt(1);
String requester_id = rs.getString(2);
userIdMap.put(userId, requester_id);
}
} finally {
pstmt.close();
}
query = "SELECT id, developer, name, version, service_type, wsdl_url, admin_id FROM software_identification";
pstmt = getConn().prepareStatement(query);
try {
ResultSet rs = pstmt.executeQuery();
while (rs.next()) {
ServiceRegistrationRecord srr = new ServiceRegistrationRecord();
srr.setSoftwareIdentification(new SoftwareIdentification());
srr.setAuthentication(new Authentication());
int id = rs.getInt(1);
srr.getSoftwareIdentification().setSoftwareDeveloper(rs.getString(2));
srr.getSoftwareIdentification().setSoftwareName(rs.getString(3));
srr.getSoftwareIdentification().setSoftwareType(ApolloSoftwareTypeEnum.valueOf(rs.getString(4)));
srr.setUrl(rs.getString(5));
srr.getAuthentication().setRequesterId(userIdMap.get(rs.getInt(6)));
srr.getAuthentication().setRequesterPassword("");
result.put(id, srr);
}
} finally {
pstmt.close();
}
return result;
}
// okay so user A can see...?
private List<Role> getUserRoles(int userId) throws SQLException, ClassNotFoundException {
// List<Role> roles = new ArrayList<Role>();
//
// String query =
// "SELECT r.software_id, r.can_run, r.can_view_cached_results from roles r, user_roles ur where "
// + "r.id = ur.role_id AND ur.user_id = ?";
// PreparedStatement pstmt = getConn().prepareStatement(query);
// try {
// pstmt.setInt(1, userId);
// ResultSet rs = pstmt.executeQuery();
// while (rs.next()) {
// Role r = new Role();
// r.setSoftwareIdentification(getSoftwareIdentification(rs.getInt(1)));
// r.setCanRun(rs.getBoolean(2));
// r.setCanViewCachedResults(rs.getBoolean(3));
// roles.add(r);
// }
// } finally {
// pstmt.close();
// }
// return roles;
return null;
}
private int addUserRole(int userId, int roleId) {
return 0;
}
public int addTextDataContent(InputStream content) throws SQLException, ClassNotFoundException, IOException {
return addTextDataContent(IOUtils.toString(content));
}
public int addTextDataContent(String content) throws SQLException, ClassNotFoundException {
String md5 = DigestUtils.md5Hex(content);
// is the data already in the table?
String query = "SELECT id FROM run_data_content where md5_hash_of_content = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, md5);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
// no need to store the data twice
return rs.getInt(1);
} else {
pstmt.close();
// store it
query = "INSERT INTO run_data_content (text_content, md5_hash_of_content) values (?,?)";
pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
try {
pstmt.setString(1, content);
pstmt.setString(2, md5);
pstmt.execute();
rs = pstmt.getGeneratedKeys();
rs.next();
return rs.getInt(1);
} finally {
pstmt.close();
}
}
}
public int associateContentWithRunId(int runKey, int dataContentKey, DbContentDataFormatEnum dataFormat, String dataLabel,
DbContentDataType dataType, String dataSourceSoftware, String dataDestinationSoftware) throws ClassNotFoundException,
SQLException, ApolloDatabaseKeyNotFoundException {
int runDataDescriptionId;
try {
runDataDescriptionId = getRunDataDescriptionId(dataFormat, dataLabel, dataType, dataSourceSoftware,
dataDestinationSoftware);
} catch (ApolloDatabaseKeyNotFoundException e) {
throw new ApolloDatabaseKeyNotFoundException(
"Unable to associate data content with run id. The runDataDescriptionId was not found. Error was: "
+ e.getMessage());
}
return associateContentWithRunId(runKey, dataContentKey, runDataDescriptionId);
}
public int associateContentWithRunId(int runKey, int dataContentKey, int runDataDescriptionId) throws ClassNotFoundException,
SQLException, ApolloDatabaseKeyNotFoundException {
if (runDataDescriptionId >= 0) {
String query = "SELECT id FROM run_data WHERE run_id = ? AND description_id = ? and content_id = ?";
PreparedStatement pstmt = null;
try {
pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, runKey);
pstmt.setInt(2, runDataDescriptionId);
pstmt.setInt(3, dataContentKey);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
}
} finally {
pstmt.close();
}
query = "INSERT INTO run_data (run_id, description_id, content_id) values (?,?,?)";
try {
pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
pstmt.setInt(1, runKey);
pstmt.setInt(2, runDataDescriptionId);
pstmt.setInt(3, dataContentKey);
pstmt.execute();
ResultSet rs = pstmt.getGeneratedKeys();
rs.next();
return rs.getInt(1);
} finally {
pstmt.close();
}
} else {
throw new ApolloDatabaseKeyNotFoundException("associateContentWithRunId() called with an invalid key: " + runKey);
}
}
public int getRunDataDescriptionId(DbContentDataFormatEnum dataFormat, String dataLabel, DbContentDataType dataType,
String dataSourceSoftware, String dataDestinationSoftware) throws SQLException, ClassNotFoundException,
ApolloDatabaseKeyNotFoundException {
String query = "SELECT v.run_data_description_id FROM run_data_description_view v WHERE "
+ "v.format = ? AND v.label = ? and v.type = ? and v.source_software = ? and v.destination_software = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, dataFormat.toString());
pstmt.setString(2, dataLabel);
pstmt.setString(3, dataType.toString());
pstmt.setString(4, dataSourceSoftware);
pstmt.setString(5, dataDestinationSoftware);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new ApolloDatabaseKeyNotFoundException("No entry found in run_data_description_view where format = "
+ dataFormat.toString() + " and label = " + dataLabel + " and type = " + dataType.toString()
+ " and source_software = " + dataSourceSoftware + " and destination_software = " + dataDestinationSoftware);
}
}
public int addRunDataDescription(String description, String dataFormat, String dataLabel, String dataType,
String dataSourceSoftware, String dataDestinationSoftware) throws SQLException, ClassNotFoundException {
int runDataDescriptionKey = -1;
String query = "INSERT INTO run_data_description SET label = ?";
PreparedStatement pstmt = getConn().prepareStatement(query, Statement.NO_GENERATED_KEYS);
try {
pstmt.setString(1, description);
pstmt.execute();
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next()) {
runDataDescriptionKey = rs.getInt(1);
}
query = "INSERT INTO run_data_description_axis_value (run_data_description_id, run_data_description_axis_id, value) values (?,?,?)";
pstmt.setInt(1, runDataDescriptionKey);
// pstmt.setIn
// not done yet
return -1;
} catch (Exception e) {
}
return -1;
}
public SoftwareIdentification getSoftwareIdentification(int i) throws ApolloDatabaseKeyNotFoundException, SQLException,
ClassNotFoundException {
String query = "SELECT developer, name, version, service_type FROM software_identification WHERE " + "id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, i);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
SoftwareIdentification softwareIdentification = new SoftwareIdentification();
softwareIdentification.setSoftwareDeveloper(rs.getString(1));
softwareIdentification.setSoftwareName(rs.getString(2));
softwareIdentification.setSoftwareVersion(rs.getString(3));
softwareIdentification.setSoftwareType(ApolloSoftwareTypeEnum.fromValue(rs.getString(4)));
return softwareIdentification;
} else {
throw new ApolloDatabaseKeyNotFoundException("No entry found in software_identification where id = " + i);
}
}
public String getUrlForSoftwareIdentification(SoftwareIdentification softwareIdentification) throws SQLException,
ApolloDatabaseKeyNotFoundException, ClassNotFoundException {
String query = "SELECT wsdl_url FROM software_identification WHERE developer = ? and name = ? and version = ? and service_type = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, softwareIdentification.getSoftwareDeveloper());
pstmt.setString(2, softwareIdentification.getSoftwareName());
pstmt.setString(3, softwareIdentification.getSoftwareVersion());
pstmt.setString(4, softwareIdentification.getSoftwareType().toString());
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getString(1);
} else {
throw new ApolloDatabaseKeyNotFoundException("Unable to get wsdl_url from software_identification where developer = "
+ softwareIdentification.getSoftwareDeveloper() + " and name = " + softwareIdentification.getSoftwareName()
+ " and version = " + softwareIdentification.getSoftwareVersion() + " and service_type = "
+ softwareIdentification.getSoftwareType().toString());
}
}
public String getUrlForSoftwareIdentification(int softwareIdentificaitonKey) throws ApolloDatabaseKeyNotFoundException,
SQLException, ClassNotFoundException {
String query = "SELECT wsdl_url FROM software_identification WHERE id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, softwareIdentificaitonKey);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getString(1);
} else {
throw new ApolloDatabaseKeyNotFoundException(
"Unable to get wsdl_url from software_identification where software_identification.id = "
+ softwareIdentificaitonKey);
}
}
// get files
public int addSimulationRun(RunSimulationMessage runSimulationMessage) throws ApolloDatabaseRecordNotInsertedException,
SQLException, ClassNotFoundException, ApolloDatabaseKeyNotFoundException {
int softwareKey = getSoftwareIdentificationKey(runSimulationMessage.getSimulatorIdentification());
String query = "INSERT INTO run (md5_hash_of_run_message, software_id, requester_id, last_service_to_be_called) VALUES (?, ?, ?, ?)";
PreparedStatement pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
pstmt.setString(1, getMd5(runSimulationMessage));
pstmt.setInt(2, softwareKey);
pstmt.setInt(3, 1);
pstmt.setInt(4, 4); // 4 is translator
pstmt.execute();
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new ApolloDatabaseRecordNotInsertedException("Record not inserted!");
}
}
public int getSimulationRunId(RunSimulationMessage runSimulationMessage) throws ApolloDatabaseKeyNotFoundException,
SQLException, ClassNotFoundException {
int softwareKey = getSoftwareIdentificationKey(runSimulationMessage.getSimulatorIdentification());
String md5Hash = getMd5(runSimulationMessage);
String query = "SELECT id FROM run WHERE md5_hash_of_run_message = ? AND software_id = ? and requester_id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setString(1, md5Hash);
pstmt.setInt(2, softwareKey);
pstmt.setInt(3, 1);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new ApolloDatabaseKeyNotFoundException("No id found for simulation run where md5_hash_of_run_message = "
+ md5Hash + " and softare_id = " + softwareKey + " and requester_id = 1");
}
}
/**
*
* @param runId
* @param softwareIdentificationKey
* @return The number of rows that were updated (either 1 or 0).
* @throws SQLException
* @throws ClassNotFoundException
*/
public int updateLastServiceToBeCalledForRun(Integer runId, Integer softwareIdentificationKey) throws SQLException,
ClassNotFoundException {
String query = "UPDATE run SET last_service_to_be_called = ? WHERE id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, softwareIdentificationKey);
pstmt.setInt(2, runId);
return pstmt.executeUpdate();
}
public int updateLastServiceToBeCalledForRun(Integer runId, SoftwareIdentification softwareIdentification)
throws ApolloDatabaseKeyNotFoundException, SQLException, ClassNotFoundException {
int softwareIdentificationKey = getSoftwareIdentificationKey(softwareIdentification);
return updateLastServiceToBeCalledForRun(runId, softwareIdentificationKey);
}
public int getIdOfLastServiceToBeCalledForRun(Integer runId) throws ApolloDatabaseKeyNotFoundException, SQLException,
ClassNotFoundException {
String query = "SELECT last_service_to_be_called FROM run WHERE id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, runId);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new ApolloDatabaseKeyNotFoundException("No last_service_to_be_called found for simulation run where id = "
+ runId);
}
}
public SoftwareIdentification getLastServiceToBeCalledForRun(Integer runId) throws ApolloDatabaseKeyNotFoundException,
SQLException, ClassNotFoundException {
int softwareId = getIdOfLastServiceToBeCalledForRun(runId);
String query = "SELECT developer, name, version, service_type FROM software_identification WHERE id = ?";
PreparedStatement pstmt = getConn().prepareStatement(query);
pstmt.setInt(1, softwareId);
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
String developer = rs.getString("developer");
String name = rs.getString("name");
String version = rs.getString("version");
String type = rs.getString("service_type");
SoftwareIdentification softwareIdentification = new SoftwareIdentification();
softwareIdentification.setSoftwareDeveloper(developer);
softwareIdentification.setSoftwareName(name);
softwareIdentification.setSoftwareVersion(version);
softwareIdentification.setSoftwareType(ApolloSoftwareTypeEnum.valueOf(type));
return softwareIdentification;
} else {
throw new ApolloDatabaseKeyNotFoundException("No software identification found for id = " + softwareId);
}
}
public int getNewSimulationGroupId() throws SQLException, ClassNotFoundException, ApolloDatabaseRecordNotInsertedException {
String query = "INSERT INTO simulation_groups DEFAULT";
PreparedStatement pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
pstmt.execute();
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new ApolloDatabaseRecordNotInsertedException("Unable to create new simulation group, insert failed.");
}
}
public void addRunIdsToSimulationGroup(int simulationGroupId, List<String> simulationRunIds) throws SQLException,
ClassNotFoundException {
String query = "INSERT INTO simulation_groups_definition (simulation_group_id, run_id) VALUES (?,?)";
PreparedStatement pstmt = getConn().prepareStatement(query);
for (String simulationRunId : simulationRunIds) {
pstmt.setInt(1, simulationGroupId);
pstmt.setInt(2, Integer.valueOf(simulationRunId));
pstmt.execute();
}
}
public int addVisualizationRun(RunVisualizationMessage runVisualizationMessage) throws ApolloDatabaseKeyNotFoundException,
SQLException, ClassNotFoundException, ApolloDatabaseRecordNotInsertedException {
int softwareKey = getSoftwareIdentificationKey(runVisualizationMessage.getVisualizerIdentification());
int simulationGroupId = getNewSimulationGroupId();
addRunIdsToSimulationGroup(simulationGroupId, runVisualizationMessage.getSimulationRunIds());
String query = "INSERT INTO run (md5_hash_of_run_message, software_id, requester_id, last_service_to_be_called, simulation_group_id) VALUES (?, ?, ?, ?)";
PreparedStatement pstmt = getConn().prepareStatement(query, Statement.RETURN_GENERATED_KEYS);
pstmt.setString(1, getMd5(runVisualizationMessage));
pstmt.setInt(2, softwareKey);
pstmt.setInt(3, 1);
pstmt.setInt(4, 4); // 4 is translator
pstmt.setInt(5, simulationGroupId);
pstmt.execute();
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next()) {
return rs.getInt(1);
} else {
throw new ApolloDatabaseRecordNotInsertedException("Record not inserted!");
}
}
}
|
Added a method to remove all data in the database for a run.
|
apollo-ws/service-skeletons/java/trunk/apollo-db-common/src/main/java/edu/pitt/apollo/db/ApolloDbUtils.java
|
Added a method to remove all data in the database for a run.
|
|
Java
|
apache-2.0
|
71b51c46e16c2fd89d2262f085ade8171d91fb6e
| 0
|
apache/jackrabbit,apache/jackrabbit,apache/jackrabbit
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.servlet.jackrabbit;
import java.io.IOException;
import java.io.Writer;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.jackrabbit.api.stats.RepositoryStatistics;
import org.apache.jackrabbit.api.stats.TimeSeries;
import org.apache.jackrabbit.core.RepositoryContext;
/**
* Servlet that makes Jackrabbit repository statistics available as
* a JSON object.
*
* @since Apache Jackrabbit 2.3.1
*/
public class StatisticsServlet extends HttpServlet {
/** Serial version UID */
private static final long serialVersionUID = -7494195499389135951L;
@Override
protected void doGet(
HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String klass = RepositoryContext.class.getName();
String name = getServletConfig().getInitParameter(klass);
if (name == null) {
name = klass;
}
RepositoryContext context = (RepositoryContext)
getServletContext().getAttribute(name);
if (context != null) {
RepositoryStatistics statistics = context.getRepositoryStatistics();
response.setContentType("application/json");
Writer writer = response.getWriter();
writer.write('{');
write(writer, "read", statistics.getTimeSeries(
RepositoryStatistics.Type.SESSION_READ_COUNTER));
writer.write(',');
write(writer, "write", statistics.getTimeSeries(
RepositoryStatistics.Type.SESSION_WRITE_COUNTER));
writer.write(',');
write(writer, "login", statistics.getTimeSeries(
RepositoryStatistics.Type.SESSION_LOGIN_COUNTER));
writer.write('}');
} else {
response.sendError(
HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
"Jackrabbit repository internals are not available");
}
}
private void write(Writer writer, String name, TimeSeries series)
throws IOException {
writer.write('"');
writer.write(name);
writer.write('"');
writer.write(':');
writer.write('{');
write(writer, "second", series.getValuePerSecond());
writer.write(',');
write(writer, "minute", series.getValuePerMinute());
writer.write(',');
write(writer, "hour", series.getValuePerHour());
writer.write(',');
write(writer, "week", series.getValuePerWeek());
writer.write('}');
}
private void write(Writer writer, String name, long[] values)
throws IOException {
writer.write('"');
writer.write(name);
writer.write('"');
writer.write(':');
writer.write('[');
for (int i = 0; i < values.length; i++) {
if (i > 0) {
writer.write(',');
}
writer.write(String.valueOf(values[i]));
}
writer.write(']');
}
}
|
jackrabbit-jcr-servlet/src/main/java/org/apache/jackrabbit/servlet/jackrabbit/StatisticsServlet.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.servlet.jackrabbit;
import java.io.IOException;
import java.io.Writer;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.jackrabbit.core.RepositoryContext;
import org.apache.jackrabbit.core.stats.RepositoryStatistics;
import org.apache.jackrabbit.core.stats.TimeSeries;
/**
* Servlet that makes Jackrabbit repository statistics available as
* a JSON object.
*
* @since Apache Jackrabbit 2.3.1
*/
public class StatisticsServlet extends HttpServlet {
/** Serial version UID */
private static final long serialVersionUID = -7494195499389135951L;
@Override
protected void doGet(
HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String klass = RepositoryContext.class.getName();
String name = getServletConfig().getInitParameter(klass);
if (name == null) {
name = klass;
}
RepositoryContext context = (RepositoryContext)
getServletContext().getAttribute(name);
if (context != null) {
RepositoryStatistics statistics = context.getRepositoryStatistics();
response.setContentType("application/json");
Writer writer = response.getWriter();
writer.write('{');
boolean first = true;
for (Map.Entry<String, TimeSeries> entry : statistics) {
if (first) {
first = false;
} else {
writer.write(',');
}
write(writer, entry.getKey(), entry.getValue());
}
writer.write('}');
} else {
response.sendError(
HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
"Jackrabbit repository internals are not available");
}
}
private void write(Writer writer, String name, TimeSeries series)
throws IOException {
writer.write('"');
writer.write(name);
writer.write('"');
writer.write(':');
writer.write('{');
write(writer, "second", series.getEventsPerSecond());
writer.write(',');
write(writer, "minute", series.getEventsPerMinute());
writer.write(',');
write(writer, "hour", series.getEventsPerHour());
writer.write(',');
write(writer, "week", series.getEventsPerWeek());
writer.write('}');
}
private void write(Writer writer, String name, long[] values)
throws IOException {
writer.write('"');
writer.write(name);
writer.write('"');
writer.write(':');
writer.write('[');
for (int i = 0; i < values.length; i++) {
if (i > 0) {
writer.write(',');
}
writer.write(String.valueOf(values[i]));
}
writer.write(']');
}
}
|
JCR-3040 JMX Stats for the Session
Adjust StatisticsServlet to match the previous commit
git-svn-id: 02b679d096242155780e1604e997947d154ee04a@1182469 13f79535-47bb-0310-9956-ffa450edef68
|
jackrabbit-jcr-servlet/src/main/java/org/apache/jackrabbit/servlet/jackrabbit/StatisticsServlet.java
|
JCR-3040 JMX Stats for the Session
|
|
Java
|
apache-2.0
|
858cd58ae82733158155b765d590af49ec838b6f
| 0
|
datathings/greycat,Neoskai/greycat,electricalwind/greycat,datathings/greycat,datathings/greycat,Neoskai/greycat,electricalwind/greycat,datathings/greycat,electricalwind/greycat,Neoskai/greycat,electricalwind/greycat,Neoskai/greycat,electricalwind/greycat,electricalwind/greycat,Neoskai/greycat,datathings/greycat,datathings/greycat,Neoskai/greycat
|
package org.kevoree.modeling.memory.struct;
import org.kevoree.modeling.KConfig;
import org.kevoree.modeling.memory.KMemoryElement;
import org.kevoree.modeling.memory.KMemoryFactory;
import org.kevoree.modeling.memory.struct.cache.KCache;
import org.kevoree.modeling.memory.struct.cache.impl.OffHeapMemoryCache;
import org.kevoree.modeling.memory.struct.map.KUniverseOrderMap;
import org.kevoree.modeling.memory.struct.map.impl.OffHeapUniverseOrderMap;
import org.kevoree.modeling.memory.struct.segment.KMemorySegment;
import org.kevoree.modeling.memory.struct.segment.impl.OffHeapMemorySegment;
import org.kevoree.modeling.memory.struct.tree.KLongLongTree;
import org.kevoree.modeling.memory.struct.tree.KLongTree;
import org.kevoree.modeling.memory.struct.tree.impl.OffHeapLongLongTree;
import org.kevoree.modeling.memory.struct.tree.impl.OffHeapLongTree;
/**
* @ignore ts
*/
public class OffHeapMemoryFactory implements KMemoryFactory {
@Override
public KMemorySegment newCacheSegment() {
return new OffHeapMemorySegment();
}
@Override
public KLongTree newLongTree() {
return new OffHeapLongTree();
}
@Override
public KLongLongTree newLongLongTree() {
return new OffHeapLongLongTree();
}
@Override
public KUniverseOrderMap newUniverseMap(int initSize, String p_className) {
//return new OffHeapUniverseOrderMap(initSize, KConfig.CACHE_LOAD_FACTOR, p_className);
return null;
}
@Override
public KMemoryElement newFromKey(long universe, long time, long uuid) {
KMemoryElement result;
boolean isUniverseNotNull = universe != KConfig.NULL_LONG;
if (KConfig.END_OF_TIME == uuid) {
if (isUniverseNotNull) {
result = newLongLongTree();
} else {
result = newUniverseMap(0, null);
}
} else {
boolean isTimeNotNull = time != KConfig.NULL_LONG;
boolean isObjNotNull = uuid != KConfig.NULL_LONG;
if (isUniverseNotNull && isTimeNotNull && isObjNotNull) {
result = newCacheSegment();
} else if (isUniverseNotNull && !isTimeNotNull && isObjNotNull) {
result = newLongTree();
} else {
result = newUniverseMap(0, null);
}
}
return result;
}
@Override
public KCache newCache() {
return new OffHeapMemoryCache();
}
}
|
org.kevoree.modeling.microframework/src/main/java/org/kevoree/modeling/memory/struct/OffHeapMemoryFactory.java
|
package org.kevoree.modeling.memory.struct;
import org.kevoree.modeling.KConfig;
import org.kevoree.modeling.memory.KMemoryElement;
import org.kevoree.modeling.memory.KMemoryFactory;
import org.kevoree.modeling.memory.struct.cache.KCache;
import org.kevoree.modeling.memory.struct.cache.impl.OffHeapMemoryCache;
import org.kevoree.modeling.memory.struct.map.KUniverseOrderMap;
import org.kevoree.modeling.memory.struct.map.impl.OffHeapUniverseOrderMap;
import org.kevoree.modeling.memory.struct.segment.KMemorySegment;
import org.kevoree.modeling.memory.struct.segment.impl.OffHeapMemorySegment;
import org.kevoree.modeling.memory.struct.tree.KLongLongTree;
import org.kevoree.modeling.memory.struct.tree.KLongTree;
import org.kevoree.modeling.memory.struct.tree.impl.OffHeapLongLongTree;
import org.kevoree.modeling.memory.struct.tree.impl.OffHeapLongTree;
/**
* @ignore ts
*/
public class OffHeapMemoryFactory implements KMemoryFactory {
@Override
public KMemorySegment newCacheSegment() {
return new OffHeapMemorySegment();
}
@Override
public KLongTree newLongTree() {
return new OffHeapLongTree();
}
@Override
public KLongLongTree newLongLongTree() {
return new OffHeapLongLongTree();
}
@Override
public KUniverseOrderMap newUniverseMap(int initSize, String p_className) {
return new OffHeapUniverseOrderMap(initSize, KConfig.CACHE_LOAD_FACTOR, p_className);
}
@Override
public KMemoryElement newFromKey(long universe, long time, long uuid) {
KMemoryElement result;
boolean isUniverseNotNull = universe != KConfig.NULL_LONG;
if (KConfig.END_OF_TIME == uuid) {
if (isUniverseNotNull) {
result = newLongLongTree();
} else {
result = newUniverseMap(0, null);
}
} else {
boolean isTimeNotNull = time != KConfig.NULL_LONG;
boolean isObjNotNull = uuid != KConfig.NULL_LONG;
if (isUniverseNotNull && isTimeNotNull && isObjNotNull) {
result = newCacheSegment();
} else if (isUniverseNotNull && !isTimeNotNull && isObjNotNull) {
result = newLongTree();
} else {
result = newUniverseMap(0, null);
}
}
return result;
}
@Override
public KCache newCache() {
return new OffHeapMemoryCache();
}
}
|
* offheap stuff
|
org.kevoree.modeling.microframework/src/main/java/org/kevoree/modeling/memory/struct/OffHeapMemoryFactory.java
|
* offheap stuff
|
|
Java
|
apache-2.0
|
38cd9fc6e2b67eddbf75a2c8b954b7c9a5f96816
| 0
|
nssales/OG-Platform,McLeodMoores/starling,ChinaQuants/OG-Platform,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,jerome79/OG-Platform,McLeodMoores/starling,nssales/OG-Platform,jeorme/OG-Platform,codeaudit/OG-Platform,jeorme/OG-Platform,codeaudit/OG-Platform,jerome79/OG-Platform,codeaudit/OG-Platform,ChinaQuants/OG-Platform,nssales/OG-Platform,DevStreet/FinanceAnalytics,jeorme/OG-Platform,DevStreet/FinanceAnalytics,McLeodMoores/starling,jerome79/OG-Platform,McLeodMoores/starling,nssales/OG-Platform,ChinaQuants/OG-Platform,jerome79/OG-Platform,jeorme/OG-Platform,DevStreet/FinanceAnalytics
|
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.maven.scripts;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.SystemUtils;
import org.apache.maven.artifact.DependencyResolutionRequiredException;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.project.MavenProject;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.opengamma.maven.MojoUtils;
import com.opengamma.util.ClasspathUtils;
import com.opengamma.util.annotation.ClassNameAnnotationScanner;
import com.opengamma.util.generate.scripts.Scriptable;
import com.opengamma.util.generate.scripts.ScriptsGenerator;
import freemarker.cache.FileTemplateLoader;
import freemarker.template.Configuration;
import freemarker.template.Template;
/**
* Maven plugin for generating scripts to run tools annotated with {@link Scriptable}.
* <p>
* Variables available in the Freemarker template are:
* <ul>
* <li> className - the fully-qualified Java class name
* </ul>
*
* @goal scripts-tools
* @requiresDependencyResolution compile
* @threadSafe
*/
public class ScriptableScriptGeneratorMojo extends AbstractMojo {
/**
* @parameter alias="outputDir" default-value="${project.build.directory}/scripts-tools"
* @required
*/
private File _outputDir;
/**
* @parameter alias="template"
*/
private String _template;
/**
* @parameter alias="baseClassTemplateMap"
*/
private Map<String, String> _baseClassTemplateMap;
/**
* @parameter alias="resourceArtifact"
*/
private String _resourceArtifact;
/**
* @parameter alias="additionalScripts"
*/
private String[] _additionalScripts;
/**
* @parameter default-value="${project.basedir}"
* @required
*/
private File _baseDir;
/**
* @component
*/
private MavenProject _project;
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
boolean templateSet = !StringUtils.isBlank(_template);
boolean templateMapSet = _baseClassTemplateMap != null && _baseClassTemplateMap.isEmpty();
if ((templateSet && templateMapSet) || (!templateSet && !templateMapSet)) {
throw new MojoExecutionException("Exactly one of 'template' or 'baseClassTemplateMap' must be set");
}
if (!_outputDir.exists()) {
try {
getLog().debug("Creating output directory " + _outputDir);
if (!_outputDir.mkdirs()) {
throw new MojoExecutionException("Unable to create output directory " + _outputDir.getAbsolutePath());
}
} catch (Exception e) {
throw new MojoExecutionException("Error creating output directory " + _outputDir.getAbsolutePath());
}
}
List<String> classpathElementList;
try {
classpathElementList = _project.getCompileClasspathElements();
} catch (DependencyResolutionRequiredException e) {
throw new MojoExecutionException("Error obtaining dependencies", e);
}
URL[] classpathUrls = ClasspathUtils.getClasspathURLs(classpathElementList);
Set<String> annotationClasses = ClassNameAnnotationScanner.scan(classpathUrls, Scriptable.class.getName());
getLog().info("Generating " + annotationClasses.size() + " scripts");
ClassLoader classLoader = new URLClassLoader(classpathUrls, this.getClass().getClassLoader());
Map<Class<?>, Template> templateMap = resolveTemplateMap(classLoader);
for (String className : annotationClasses) {
Map<String, Object> templateData = new HashMap<String, Object>();
templateData.put("className", className);
Template template = getTemplateForClass(className, classLoader, templateMap);
if (template == null) {
getLog().warn("No template for scriptable class " + className);
continue;
}
ScriptsGenerator.generate(className, _outputDir, template, templateData, SystemUtils.IS_OS_WINDOWS);
}
if (_additionalScripts != null) {
getLog().info("Copying " + _additionalScripts.length + " additional script(s)");
for (String script : _additionalScripts) {
File scriptFile = new File(script);
if (scriptFile.exists()) {
if (!scriptFile.isFile()) {
throw new MojoExecutionException("Can only copy files, but additional script '" + scriptFile + "' is not a file");
}
try {
FileUtils.copyFileToDirectory(scriptFile, _outputDir);
File copiedFile = new File(_outputDir, scriptFile.getName());
copiedFile.setReadable(true, false);
copiedFile.setExecutable(true, false);
} catch (IOException e) {
throw new MojoExecutionException("Unable to copy additional script '" + scriptFile + "'", e);
}
}
}
}
}
//-------------------------------------------------------------------------
private Map<Class<?>, Template> resolveTemplateMap(ClassLoader classLoader) throws MojoExecutionException {
if (_baseClassTemplateMap == null || _baseClassTemplateMap.isEmpty()) {
return ImmutableMap.<Class<?>, Template>of(Object.class, getTemplate(_template));
}
Map<Class<?>, Template> templateMap = new HashMap<Class<?>, Template>();
for (Map.Entry<String, String> unresolvedEntry : _baseClassTemplateMap.entrySet()) {
String className = unresolvedEntry.getKey();
Class<?> clazz;
try {
clazz = classLoader.loadClass(className);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Unable to resolve class " + className);
}
Template template = getTemplate(unresolvedEntry.getValue());
templateMap.put(clazz, template);
}
return templateMap;
}
private Template getTemplateForClass(String className, ClassLoader classLoader, Map<Class<?>, Template> templateMap) throws MojoExecutionException {
if (templateMap.size() == 1 && Object.class.equals(Iterables.getOnlyElement(templateMap.keySet()))) {
return Iterables.getOnlyElement(templateMap.values());
}
Class<?> clazz;
try {
clazz = classLoader.loadClass(className);
} catch (ClassNotFoundException e) {
throw new MojoExecutionException("Unable to resolve class " + className);
}
for (Map.Entry<Class<?>, Template> templateMapEntry : templateMap.entrySet()) {
if (templateMapEntry.getKey().isAssignableFrom(clazz)) {
return templateMapEntry.getValue();
}
}
return null;
}
private Template getTemplate(String templateName) throws MojoExecutionException {
try {
if (_resourceArtifact == null) {
return getTemplateFromFile(_baseDir, templateName);
} else {
ClassLoader resourceLoader = getResourceLoader(_resourceArtifact, _project);
InputStream resourceStream = resourceLoader.getResourceAsStream(templateName);
if (resourceStream == null) {
throw new MojoExecutionException("Resource '" + templateName + "' not found");
}
String templateStr;
try {
templateStr = IOUtils.toString(resourceStream);
} finally {
resourceStream.close();
}
return new Template(templateName, new StringReader(templateStr), new Configuration());
}
} catch (IOException e) {
throw new MojoExecutionException("Error loading Freemarker template from '" + templateName + "'", e);
}
}
private static ClassLoader getResourceLoader(String resourceArtifact, MavenProject project) throws MojoExecutionException, MalformedURLException {
if (StringUtils.isBlank(resourceArtifact)) {
return ScriptableScriptGeneratorMojo.class.getClassLoader();
}
File artifactFile = MojoUtils.getArtifactFile(resourceArtifact, project);
return new URLClassLoader(new URL[] { artifactFile.toURI().toURL() });
}
private static Template getTemplateFromFile(File baseDir, String templateFile) throws IOException {
Template template;
Configuration cfg = new Configuration();
cfg.setTemplateLoader(new FileTemplateLoader(baseDir));
template = cfg.getTemplate(templateFile);
return template;
}
}
|
projects/OG-MavenPlugin/src/main/java/com/opengamma/maven/scripts/ScriptableScriptGeneratorMojo.java
|
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.maven.scripts;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.artifact.DependencyResolutionRequiredException;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.project.MavenProject;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.opengamma.maven.MojoUtils;
import com.opengamma.util.ClasspathUtils;
import com.opengamma.util.annotation.ClassNameAnnotationScanner;
import com.opengamma.util.generate.scripts.Scriptable;
import com.opengamma.util.generate.scripts.ScriptsGenerator;
import freemarker.cache.FileTemplateLoader;
import freemarker.template.Configuration;
import freemarker.template.Template;
/**
* Maven plugin for generating scripts to run tools annotated with {@link Scriptable}.
* <p>
* Variables available in the Freemarker template are:
* <ul>
* <li> className - the fully-qualified Java class name
* </ul>
*
* @goal scripts-tools
* @requiresDependencyResolution compile
* @threadSafe
*/
public class ScriptableScriptGeneratorMojo extends AbstractMojo {
/**
* @parameter alias="outputDir" default-value="${project.build.directory}/scripts-tools"
* @required
*/
private File _outputDir;
/**
* @parameter alias="template"
*/
private String _template;
/**
* @parameter alias="baseClassTemplateMap"
*/
private Map<String, String> _baseClassTemplateMap;
/**
* @parameter alias="resourceArtifact"
*/
private String _resourceArtifact;
/**
* @parameter alias="additionalScripts"
*/
private String[] _additionalScripts;
/**
* @parameter default-value="${project.basedir}"
* @required
*/
private File _baseDir;
/**
* @component
*/
private MavenProject _project;
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
boolean templateSet = !StringUtils.isBlank(_template);
boolean templateMapSet = _baseClassTemplateMap != null && _baseClassTemplateMap.isEmpty();
if ((templateSet && templateMapSet) || (!templateSet && !templateMapSet)) {
throw new MojoExecutionException("Exactly one of 'template' or 'baseClassTemplateMap' must be set");
}
if (!_outputDir.exists()) {
try {
getLog().debug("Creating output directory " + _outputDir);
if (!_outputDir.mkdirs()) {
throw new MojoExecutionException("Unable to create output directory " + _outputDir.getAbsolutePath());
}
} catch (Exception e) {
throw new MojoExecutionException("Error creating output directory " + _outputDir.getAbsolutePath());
}
}
List<String> classpathElementList;
try {
classpathElementList = _project.getCompileClasspathElements();
} catch (DependencyResolutionRequiredException e) {
throw new MojoExecutionException("Error obtaining dependencies", e);
}
URL[] classpathUrls = ClasspathUtils.getClasspathURLs(classpathElementList);
Set<String> annotationClasses = ClassNameAnnotationScanner.scan(classpathUrls, Scriptable.class.getName());
getLog().info("Generating " + annotationClasses.size() + " scripts");
ClassLoader classLoader = new URLClassLoader(classpathUrls, this.getClass().getClassLoader());
Map<Class<?>, Template> templateMap = resolveTemplateMap(classLoader);
for (String className : annotationClasses) {
Map<String, Object> templateData = new HashMap<String, Object>();
templateData.put("className", className);
Template template = getTemplateForClass(className, classLoader, templateMap);
if (template == null) {
getLog().warn("No template for scriptable class " + className);
continue;
}
ScriptsGenerator.generate(className, _outputDir, template, templateData);
}
if (_additionalScripts != null) {
getLog().info("Copying " + _additionalScripts.length + " additional script(s)");
for (String script : _additionalScripts) {
File scriptFile = new File(script);
if (scriptFile.exists()) {
if (!scriptFile.isFile()) {
throw new MojoExecutionException("Can only copy files, but additional script '" + scriptFile + "' is not a file");
}
try {
FileUtils.copyFileToDirectory(scriptFile, _outputDir);
File copiedFile = new File(_outputDir, scriptFile.getName());
copiedFile.setReadable(true, false);
copiedFile.setExecutable(true, false);
} catch (IOException e) {
throw new MojoExecutionException("Unable to copy additional script '" + scriptFile + "'", e);
}
}
}
}
}
//-------------------------------------------------------------------------
private Map<Class<?>, Template> resolveTemplateMap(ClassLoader classLoader) throws MojoExecutionException {
if (_baseClassTemplateMap == null || _baseClassTemplateMap.isEmpty()) {
return ImmutableMap.<Class<?>, Template>of(Object.class, getTemplate(_template));
}
Map<Class<?>, Template> templateMap = new HashMap<Class<?>, Template>();
for (Map.Entry<String, String> unresolvedEntry : _baseClassTemplateMap.entrySet()) {
String className = unresolvedEntry.getKey();
Class<?> clazz;
try {
clazz = classLoader.loadClass(className);
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Unable to resolve class " + className);
}
Template template = getTemplate(unresolvedEntry.getValue());
templateMap.put(clazz, template);
}
return templateMap;
}
private Template getTemplateForClass(String className, ClassLoader classLoader, Map<Class<?>, Template> templateMap) throws MojoExecutionException {
if (templateMap.size() == 1 && Object.class.equals(Iterables.getOnlyElement(templateMap.keySet()))) {
return Iterables.getOnlyElement(templateMap.values());
}
Class<?> clazz;
try {
clazz = classLoader.loadClass(className);
} catch (ClassNotFoundException e) {
throw new MojoExecutionException("Unable to resolve class " + className);
}
for (Map.Entry<Class<?>, Template> templateMapEntry : templateMap.entrySet()) {
if (templateMapEntry.getKey().isAssignableFrom(clazz)) {
return templateMapEntry.getValue();
}
}
return null;
}
private Template getTemplate(String templateName) throws MojoExecutionException {
try {
if (_resourceArtifact == null) {
return getTemplateFromFile(_baseDir, templateName);
} else {
ClassLoader resourceLoader = getResourceLoader(_resourceArtifact, _project);
InputStream resourceStream = resourceLoader.getResourceAsStream(templateName);
if (resourceStream == null) {
throw new MojoExecutionException("Resource '" + templateName + "' not found");
}
String templateStr;
try {
templateStr = IOUtils.toString(resourceStream);
} finally {
resourceStream.close();
}
return new Template(templateName, new StringReader(templateStr), new Configuration());
}
} catch (IOException e) {
throw new MojoExecutionException("Error loading Freemarker template from '" + templateName + "'", e);
}
}
private static ClassLoader getResourceLoader(String resourceArtifact, MavenProject project) throws MojoExecutionException, MalformedURLException {
if (StringUtils.isBlank(resourceArtifact)) {
return ScriptableScriptGeneratorMojo.class.getClassLoader();
}
File artifactFile = MojoUtils.getArtifactFile(resourceArtifact, project);
return new URLClassLoader(new URL[] { artifactFile.toURI().toURL() });
}
private static Template getTemplateFromFile(File baseDir, String templateFile) throws IOException {
Template template;
Configuration cfg = new Configuration();
cfg.setTemplateLoader(new FileTemplateLoader(baseDir));
template = cfg.getTemplate(templateFile);
return template;
}
}
|
Fix compile error from recent change to script generator
|
projects/OG-MavenPlugin/src/main/java/com/opengamma/maven/scripts/ScriptableScriptGeneratorMojo.java
|
Fix compile error from recent change to script generator
|
|
Java
|
apache-2.0
|
bdc5da05b8adfdc288add36c169d950744d5a706
| 0
|
AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,androidx/androidx,AndroidX/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,androidx/androidx,aosp-mirror/platform_frameworks_support,aosp-mirror/platform_frameworks_support,androidx/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,androidx/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,androidx/androidx,AndroidX/androidx
|
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.v7.app;
import android.app.Dialog;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.support.v7.media.MediaControlIntent;
import android.support.v7.media.MediaRouteSelector;
import android.support.v7.media.MediaRouter;
import android.support.v7.mediarouter.R;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
/**
* This class implements the route chooser dialog for {@link MediaRouter}.
* <p>
* This dialog allows the user to choose a route that matches a given selector.
* </p>
*
* @see MediaRouteButton
* @see MediaRouteActionProvider
*/
public class MediaRouteChooserDialog extends Dialog {
private final MediaRouter mRouter;
private final MediaRouterCallback mCallback;
private MediaRouteSelector mSelector = MediaRouteSelector.EMPTY;
private ArrayList<MediaRouter.RouteInfo> mRoutes;
private RouteAdapter mAdapter;
private ListView mListView;
private boolean mAttachedToWindow;
public MediaRouteChooserDialog(Context context) {
this(context, 0);
}
public MediaRouteChooserDialog(Context context, int theme) {
super(MediaRouterThemeHelper.createThemedContext(context), theme);
context = getContext();
mRouter = MediaRouter.getInstance(context);
mCallback = new MediaRouterCallback();
}
/**
* Gets the media route selector for filtering the routes that the user can select.
*
* @return The selector, never null.
*/
@NonNull
public MediaRouteSelector getRouteSelector() {
return mSelector;
}
/**
* Sets the media route selector for filtering the routes that the user can select.
*
* @param selector The selector, must not be null.
*/
public void setRouteSelector(@NonNull MediaRouteSelector selector) {
if (selector == null) {
throw new IllegalArgumentException("selector must not be null");
}
if (!mSelector.equals(selector)) {
mSelector = selector;
if (mAttachedToWindow) {
mRouter.removeCallback(mCallback);
mRouter.addCallback(selector, mCallback,
MediaRouter.CALLBACK_FLAG_PERFORM_ACTIVE_SCAN);
}
refreshRoutes();
}
}
/**
* Called to filter the set of routes that should be included in the list.
* <p>
* The default implementation iterates over all routes in the provided list and
* removes those for which {@link #onFilterRoute} returns false.
* </p>
*
* @param routes The list of routes to filter in-place, never null.
*/
public void onFilterRoutes(@NonNull List<MediaRouter.RouteInfo> routes) {
for (int i = routes.size(); i-- > 0; ) {
if (!onFilterRoute(routes.get(i))) {
routes.remove(i);
}
}
}
/**
* Returns true if the route should be included in the list.
* <p>
* The default implementation returns true for enabled non-default routes that
* match the selector. Subclasses can override this method to filter routes
* differently.
* </p>
*
* @param route The route to consider, never null.
* @return True if the route should be included in the chooser dialog.
*/
public boolean onFilterRoute(@NonNull MediaRouter.RouteInfo route) {
return !route.isDefault() && route.isEnabled() && route.matchesSelector(mSelector);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.mr_media_route_chooser_dialog);
setTitle(R.string.mr_media_route_chooser_title);
mRoutes = new ArrayList<MediaRouter.RouteInfo>();
mAdapter = new RouteAdapter(getContext(), mRoutes);
mListView = (ListView)findViewById(R.id.media_route_list);
mListView.setAdapter(mAdapter);
mListView.setOnItemClickListener(mAdapter);
mListView.setEmptyView(findViewById(android.R.id.empty));
}
@Override
public void onAttachedToWindow() {
super.onAttachedToWindow();
mAttachedToWindow = true;
mRouter.addCallback(mSelector, mCallback, MediaRouter.CALLBACK_FLAG_PERFORM_ACTIVE_SCAN);
refreshRoutes();
}
@Override
public void onDetachedFromWindow() {
mAttachedToWindow = false;
mRouter.removeCallback(mCallback);
super.onDetachedFromWindow();
}
/**
* Refreshes the list of routes that are shown in the chooser dialog.
*/
public void refreshRoutes() {
if (mAttachedToWindow) {
mRoutes.clear();
mRoutes.addAll(mRouter.getRoutes());
onFilterRoutes(mRoutes);
RouteComparator.loadRouteUsageScores(getContext(), mRoutes);
Collections.sort(mRoutes, RouteComparator.sInstance);
mAdapter.notifyDataSetChanged();
}
}
private final class RouteAdapter extends ArrayAdapter<MediaRouter.RouteInfo>
implements ListView.OnItemClickListener {
private final LayoutInflater mInflater;
private final Drawable mDefaultIcon;
private final Drawable mSpeakerIcon;
private final Drawable mSpeakerGroupIcon;
public RouteAdapter(Context context, List<MediaRouter.RouteInfo> routes) {
super(context, 0, routes);
mInflater = LayoutInflater.from(context);
int[] attrs = new int[] {
R.attr.mediaRouteDefaultIconDrawable,
R.attr.mediaRouteSpeakerIconDrawable,
R.attr.mediaRouteSpeakerGroupIconDrawable };
TypedArray styledAttributes = getContext().obtainStyledAttributes(attrs);
mDefaultIcon = styledAttributes.getDrawable(0);
mSpeakerIcon = styledAttributes.getDrawable(1);
mSpeakerGroupIcon = styledAttributes.getDrawable(2);
}
@Override
public boolean areAllItemsEnabled() {
return false;
}
@Override
public boolean isEnabled(int position) {
return getItem(position).isEnabled();
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view = convertView;
if (view == null) {
view = mInflater.inflate(R.layout.mr_media_route_list_item, parent, false);
}
MediaRouter.RouteInfo route = getItem(position);
TextView text1 = (TextView) view.findViewById(android.R.id.text1);
TextView text2 = (TextView) view.findViewById(android.R.id.text2);
text1.setText(route.getName());
String description = route.getDescription();
if (TextUtils.isEmpty(description)) {
text2.setVisibility(View.GONE);
text2.setText("");
} else {
text2.setVisibility(View.VISIBLE);
text2.setText(description);
}
view.setEnabled(route.isEnabled());
ImageView iconView = (ImageView) view.findViewById(R.id.routeIcon);
if (iconView != null) {
iconView.setImageDrawable(getIconDrawable(route));
}
return view;
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
MediaRouter.RouteInfo route = getItem(position);
if (route.isEnabled()) {
route.select();
RouteComparator.storeRouteUsageScores(getContext(), route.getId());
dismiss();
}
}
private Drawable getIconDrawable(MediaRouter.RouteInfo route) {
if (route instanceof MediaRouter.RouteGroup) {
// Only speakers can be grouped for now.
return mSpeakerGroupIcon;
}
if (route.supportsControlCategory(MediaControlIntent.CATEGORY_LIVE_AUDIO)
&& !route.supportsControlCategory(MediaControlIntent.CATEGORY_LIVE_VIDEO)) {
return mSpeakerIcon;
}
return mDefaultIcon;
}
}
private final class MediaRouterCallback extends MediaRouter.Callback {
@Override
public void onRouteAdded(MediaRouter router, MediaRouter.RouteInfo info) {
refreshRoutes();
}
@Override
public void onRouteRemoved(MediaRouter router, MediaRouter.RouteInfo info) {
refreshRoutes();
}
@Override
public void onRouteChanged(MediaRouter router, MediaRouter.RouteInfo info) {
refreshRoutes();
}
@Override
public void onRouteSelected(MediaRouter router, MediaRouter.RouteInfo route) {
dismiss();
}
}
private static final class RouteComparator implements Comparator<MediaRouter.RouteInfo> {
private static final String PREF_ROUTE_IDS =
"android.support.v7.app.MediaRouteChooserDialog_route_ids";
private static final String PREF_USAGE_SCORE_PREFIX =
"android.support.v7.app.MediaRouteChooserDialog_route_usage_score_";
// Routes with the usage score less than MIN_USAGE_SCORE are decayed.
private static final float MIN_USAGE_SCORE = 0.1f;
private static final float USAGE_SCORE_DECAY_FACTOR = 0.95f;
// Should match to SystemMediaRouteProvider.PACKAGE_NAME.
static final String SYSTEM_MEDIA_ROUTE_PROVIDER_PACKAGE_NAME = "android";
public static final RouteComparator sInstance = new RouteComparator();
public static final HashMap<String, Float> sRouteUsageScoreMap = new HashMap();
@Override
public int compare(MediaRouter.RouteInfo lhs, MediaRouter.RouteInfo rhs) {
if (isSystemLiveAudioOnlyRoute(lhs)) {
if (!isSystemLiveAudioOnlyRoute(rhs)) {
return 1;
}
} else if (isSystemLiveAudioOnlyRoute(rhs)) {
return -1;
}
Float lhsUsageScore = sRouteUsageScoreMap.get(lhs.getId());
if (lhsUsageScore == null) {
lhsUsageScore = 0f;
}
Float rhsUsageScore = sRouteUsageScoreMap.get(rhs.getId());
if (rhsUsageScore == null) {
rhsUsageScore = 0f;
}
if (!lhsUsageScore.equals(rhsUsageScore)) {
return lhsUsageScore > rhsUsageScore ? -1 : 1;
}
return lhs.getName().compareTo(rhs.getName());
}
private boolean isSystemLiveAudioOnlyRoute(MediaRouter.RouteInfo route) {
return isSystemMediaRouteProvider(route)
&& route.supportsControlCategory(MediaControlIntent.CATEGORY_LIVE_AUDIO)
&& !route.supportsControlCategory(MediaControlIntent.CATEGORY_LIVE_VIDEO);
}
private boolean isSystemMediaRouteProvider(MediaRouter.RouteInfo route) {
return TextUtils.equals(route.getProviderInstance().getMetadata().getPackageName(),
SYSTEM_MEDIA_ROUTE_PROVIDER_PACKAGE_NAME);
}
private static void loadRouteUsageScores(
Context context, List<MediaRouter.RouteInfo> routes) {
sRouteUsageScoreMap.clear();
SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(context);
for (MediaRouter.RouteInfo route : routes) {
sRouteUsageScoreMap.put(route.getId(),
preferences.getFloat(PREF_USAGE_SCORE_PREFIX + route.getId(), 0f));
}
}
private static void storeRouteUsageScores(Context context, String selectedRouteId) {
SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(context);
SharedPreferences.Editor prefEditor = preferences.edit();
List<String> routeIds = new ArrayList<String>(
Arrays.asList(preferences.getString(PREF_ROUTE_IDS, "").split(",")));
if (!routeIds.contains(selectedRouteId)) {
routeIds.add(selectedRouteId);
}
StringBuilder routeIdsBuilder = new StringBuilder();
for (String routeId : routeIds) {
// The new route usage score is calculated as follows:
// 1) usageScore * USAGE_SCORE_DECAY_FACTOR + 1, if the route is selected,
// 2) 0, if usageScore * USAGE_SCORE_DECAY_FACTOR < MIN_USAGE_SCORE, or
// 3) usageScore * USAGE_SCORE_DECAY_FACTOR, otherwise,
String routeUsageScoreKey = PREF_USAGE_SCORE_PREFIX + routeId;
float newUsageScore = preferences.getFloat(routeUsageScoreKey, 0f)
* USAGE_SCORE_DECAY_FACTOR;
if (selectedRouteId.equals(routeId)) {
newUsageScore += 1f;
}
if (newUsageScore < MIN_USAGE_SCORE) {
prefEditor.remove(routeId);
} else {
prefEditor.putFloat(routeUsageScoreKey, newUsageScore);
if (routeIdsBuilder.length() > 0) {
routeIdsBuilder.append(',');
}
routeIdsBuilder.append(routeId);
}
}
prefEditor.putString(PREF_ROUTE_IDS, routeIdsBuilder.toString());
prefEditor.commit();
}
}
}
|
v7/mediarouter/src/android/support/v7/app/MediaRouteChooserDialog.java
|
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.v7.app;
import android.app.Dialog;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.support.v7.media.MediaControlIntent;
import android.support.v7.media.MediaRouteSelector;
import android.support.v7.media.MediaRouter;
import android.support.v7.mediarouter.R;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* This class implements the route chooser dialog for {@link MediaRouter}.
* <p>
* This dialog allows the user to choose a route that matches a given selector.
* </p>
*
* @see MediaRouteButton
* @see MediaRouteActionProvider
*/
public class MediaRouteChooserDialog extends Dialog {
private static final String PREF_ROUTE_IDS = "android.support.v7.media.MediaRoute_route_ids";
private static final float MIN_USAGE_SCORE = 0.1f;
private static final float MEMORY_DECAY_FACTOR = 0.95f;
private final MediaRouter mRouter;
private final MediaRouterCallback mCallback;
private MediaRouteSelector mSelector = MediaRouteSelector.EMPTY;
private ArrayList<MediaRouter.RouteInfo> mRoutes;
private RouteAdapter mAdapter;
private ListView mListView;
private boolean mAttachedToWindow;
public MediaRouteChooserDialog(Context context) {
this(context, 0);
}
public MediaRouteChooserDialog(Context context, int theme) {
super(MediaRouterThemeHelper.createThemedContext(context), theme);
context = getContext();
mRouter = MediaRouter.getInstance(context);
mCallback = new MediaRouterCallback();
}
/**
* Gets the media route selector for filtering the routes that the user can select.
*
* @return The selector, never null.
*/
@NonNull
public MediaRouteSelector getRouteSelector() {
return mSelector;
}
/**
* Sets the media route selector for filtering the routes that the user can select.
*
* @param selector The selector, must not be null.
*/
public void setRouteSelector(@NonNull MediaRouteSelector selector) {
if (selector == null) {
throw new IllegalArgumentException("selector must not be null");
}
if (!mSelector.equals(selector)) {
mSelector = selector;
if (mAttachedToWindow) {
mRouter.removeCallback(mCallback);
mRouter.addCallback(selector, mCallback,
MediaRouter.CALLBACK_FLAG_PERFORM_ACTIVE_SCAN);
}
refreshRoutes();
}
}
/**
* Called to filter the set of routes that should be included in the list.
* <p>
* The default implementation iterates over all routes in the provided list and
* removes those for which {@link #onFilterRoute} returns false.
* </p>
*
* @param routes The list of routes to filter in-place, never null.
*/
public void onFilterRoutes(@NonNull List<MediaRouter.RouteInfo> routes) {
for (int i = routes.size(); i-- > 0; ) {
if (!onFilterRoute(routes.get(i))) {
routes.remove(i);
}
}
}
/**
* Returns true if the route should be included in the list.
* <p>
* The default implementation returns true for enabled non-default routes that
* match the selector. Subclasses can override this method to filter routes
* differently.
* </p>
*
* @param route The route to consider, never null.
* @return True if the route should be included in the chooser dialog.
*/
public boolean onFilterRoute(@NonNull MediaRouter.RouteInfo route) {
return !route.isDefault() && route.isEnabled() && route.matchesSelector(mSelector);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.mr_media_route_chooser_dialog);
setTitle(R.string.mr_media_route_chooser_title);
mRoutes = new ArrayList<MediaRouter.RouteInfo>();
mAdapter = new RouteAdapter(getContext(), mRoutes);
mListView = (ListView)findViewById(R.id.media_route_list);
mListView.setAdapter(mAdapter);
mListView.setOnItemClickListener(mAdapter);
mListView.setEmptyView(findViewById(android.R.id.empty));
}
@Override
public void onAttachedToWindow() {
super.onAttachedToWindow();
mAttachedToWindow = true;
mRouter.addCallback(mSelector, mCallback, MediaRouter.CALLBACK_FLAG_PERFORM_ACTIVE_SCAN);
refreshRoutes();
}
@Override
public void onDetachedFromWindow() {
mAttachedToWindow = false;
mRouter.removeCallback(mCallback);
super.onDetachedFromWindow();
}
/**
* Refreshes the list of routes that are shown in the chooser dialog.
*/
public void refreshRoutes() {
if (mAttachedToWindow) {
mRoutes.clear();
mRoutes.addAll(mRouter.getRoutes());
onFilterRoutes(mRoutes);
readRouteUsageScore(RouteComparator.sRouteUsageScoreMap);
Collections.sort(mRoutes, RouteComparator.sInstance);
mAdapter.notifyDataSetChanged();
}
}
private void readRouteUsageScore(HashMap<String, Float> usageScoreMap) {
usageScoreMap.clear();
SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(getContext());
for (MediaRouter.RouteInfo route : mRoutes) {
usageScoreMap.put(route.getId(), preferences.getFloat(route.getId(), 0f));
}
}
private void writeRouteUsageScore(String selectedRouteId) {
SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(getContext());
SharedPreferences.Editor prefEditor = preferences.edit();
Set<String> routeIdSet = new HashSet<String>(
Arrays.asList(preferences.getString(PREF_ROUTE_IDS, "").split(",")));
routeIdSet.add(selectedRouteId);
StringBuilder routeIdsBuilder = new StringBuilder();
for (String routeId : routeIdSet) {
// The next route usage score will be calculated as follows:
// 1) usageScore * MEMORY_DECAY_FACTOR + 1, if the route is the selected one,
// 2) 0, if usageScore * MEMORY_DECAY_FACTOR < MIN_USAGE_SCORE, and
// 3) usageScore * MEMORY_DECAY_FACTOR, otherwise,
float newUsageScore = preferences.getFloat(routeId, 0f) * MEMORY_DECAY_FACTOR;
if (selectedRouteId.equals(routeId)) {
newUsageScore += 1f;
}
if (newUsageScore < MIN_USAGE_SCORE) {
// We only keep the usage score for more than MIN_USAGE_SCORE.
prefEditor.remove(routeId);
} else {
prefEditor.putFloat(routeId, newUsageScore);
if (routeIdsBuilder.length() > 0) {
routeIdsBuilder.append(',');
}
routeIdsBuilder.append(routeId);
}
}
// Save the route ids in order to apply MEMORY_DECAY_FACTOR for all the saved usage scores.
prefEditor.putString(PREF_ROUTE_IDS, routeIdsBuilder.toString());
prefEditor.commit();
}
private final class RouteAdapter extends ArrayAdapter<MediaRouter.RouteInfo>
implements ListView.OnItemClickListener {
private final LayoutInflater mInflater;
private final Drawable mDefaultIcon;
private final Drawable mSpeakerIcon;
private final Drawable mSpeakerGroupIcon;
public RouteAdapter(Context context, List<MediaRouter.RouteInfo> routes) {
super(context, 0, routes);
mInflater = LayoutInflater.from(context);
int[] attrs = new int[] {
R.attr.mediaRouteDefaultIconDrawable,
R.attr.mediaRouteSpeakerIconDrawable,
R.attr.mediaRouteSpeakerGroupIconDrawable };
TypedArray styledAttributes = getContext().obtainStyledAttributes(attrs);
mDefaultIcon = styledAttributes.getDrawable(0);
mSpeakerIcon = styledAttributes.getDrawable(1);
mSpeakerGroupIcon = styledAttributes.getDrawable(2);
}
@Override
public boolean areAllItemsEnabled() {
return false;
}
@Override
public boolean isEnabled(int position) {
return getItem(position).isEnabled();
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view = convertView;
if (view == null) {
view = mInflater.inflate(R.layout.mr_media_route_list_item, parent, false);
}
MediaRouter.RouteInfo route = getItem(position);
TextView text1 = (TextView) view.findViewById(android.R.id.text1);
TextView text2 = (TextView) view.findViewById(android.R.id.text2);
text1.setText(route.getName());
String description = route.getDescription();
if (TextUtils.isEmpty(description)) {
text2.setVisibility(View.GONE);
text2.setText("");
} else {
text2.setVisibility(View.VISIBLE);
text2.setText(description);
}
view.setEnabled(route.isEnabled());
ImageView iconView = (ImageView) view.findViewById(R.id.routeIcon);
if (iconView != null) {
iconView.setImageDrawable(getIconDrawable(route));
}
return view;
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
MediaRouter.RouteInfo route = getItem(position);
if (route.isEnabled()) {
route.select();
writeRouteUsageScore(route.getId());
dismiss();
}
}
private Drawable getIconDrawable(MediaRouter.RouteInfo route) {
if (route instanceof MediaRouter.RouteGroup) {
// Only speakers can be grouped for now.
return mSpeakerGroupIcon;
}
if (route.supportsControlCategory(MediaControlIntent.CATEGORY_LIVE_AUDIO)
&& !route.supportsControlCategory(MediaControlIntent.CATEGORY_LIVE_VIDEO)) {
return mSpeakerIcon;
}
return mDefaultIcon;
}
}
private final class MediaRouterCallback extends MediaRouter.Callback {
@Override
public void onRouteAdded(MediaRouter router, MediaRouter.RouteInfo info) {
refreshRoutes();
}
@Override
public void onRouteRemoved(MediaRouter router, MediaRouter.RouteInfo info) {
refreshRoutes();
}
@Override
public void onRouteChanged(MediaRouter router, MediaRouter.RouteInfo info) {
refreshRoutes();
}
@Override
public void onRouteSelected(MediaRouter router, MediaRouter.RouteInfo route) {
dismiss();
}
}
private static final class RouteComparator implements Comparator<MediaRouter.RouteInfo> {
// Should match to SystemMediaRouteProvider.PACKAGE_NAME.
static final String SYSTEM_MEDIA_ROUTE_PROVIDER_PACKAGE_NAME = "android";
public static final RouteComparator sInstance = new RouteComparator();
public static final HashMap<String, Float> sRouteUsageScoreMap = new HashMap();
@Override
public int compare(MediaRouter.RouteInfo lhs, MediaRouter.RouteInfo rhs) {
if (isSystemLiveAudioOnlyRoute(lhs)) {
if (!isSystemLiveAudioOnlyRoute(rhs)) {
return 1;
}
} else if (isSystemLiveAudioOnlyRoute(rhs)) {
return -1;
}
Float lhsUsageScore = sRouteUsageScoreMap.get(lhs.getId());
if (lhsUsageScore == null) {
lhsUsageScore = 0f;
}
Float rhsUsageScore = sRouteUsageScoreMap.get(rhs.getId());
if (rhsUsageScore == null) {
rhsUsageScore = 0f;
}
if (lhsUsageScore.equals(rhsUsageScore)) {
return lhs.getName().compareTo(rhs.getName());
}
return lhsUsageScore > rhsUsageScore ? -1 : 1;
}
private boolean isSystemLiveAudioOnlyRoute(MediaRouter.RouteInfo route) {
return isSystemMediaRouteProvider(route)
&& route.supportsControlCategory(MediaControlIntent.CATEGORY_LIVE_AUDIO)
&& !route.supportsControlCategory(MediaControlIntent.CATEGORY_LIVE_VIDEO);
}
private boolean isSystemMediaRouteProvider(MediaRouter.RouteInfo route) {
return TextUtils.equals(route.getProviderInstance().getMetadata().getPackageName(),
SYSTEM_MEDIA_ROUTE_PROVIDER_PACKAGE_NAME);
}
}
}
|
Move usage scoring to RouteComparator
This CL applies the comments in ag/751814.
Change-Id: I9bfc43228bf9365d5908c8e8db4613cca7a06ef3
|
v7/mediarouter/src/android/support/v7/app/MediaRouteChooserDialog.java
|
Move usage scoring to RouteComparator
|
|
Java
|
apache-2.0
|
093c7060524a8a379a59a2f89fafad927c28f493
| 0
|
jamesfalkner/jboss-daytrader,jamesfalkner/jboss-daytrader,jamesfalkner/jboss-daytrader
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.daytrader.javaee6.web.prims.ejb3;
import java.io.IOException;
import javax.annotation.Resource;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.MessageProducer;
import javax.jms.Queue;
import javax.jms.Session;
import javax.jms.TextMessage;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.annotation.WebServlet;
import org.apache.geronimo.daytrader.javaee6.utils.Log;
import org.apache.geronimo.daytrader.javaee6.utils.TradeConfig;
/**
* This primitive is designed to run inside the TradeApplication and relies upon
* the {@link org.apache.geronimo.samples.daytrader.util.TradeConfig} class to
* set config parameters. PingServlet2MDBQueue tests key functionality of a
* servlet call to a post a message to an MDB Queue. The TradeBrokerMDB receives
* the message This servlet makes use of the MDB EJB
* {@link org.apache.geronimo.samples.daytrader.ejb.TradeBrokerMDB} by posting a
* message to the MDB Queue
*/
@WebServlet("/ejb3/PingServlet2MDBQueue")
public class PingServlet2MDBQueue extends HttpServlet {
private static String initTime;
private static int hitCount;
@Resource(name = "jms/QueueConnectionFactory")
private ConnectionFactory queueConnectionFactory;
@Resource(name = "jms/TradeBrokerQueue")
private Queue tradeBrokerQueue;
public void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
doGet(req, res);
}
public void doGet(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException {
res.setContentType("text/html");
java.io.PrintWriter out = res.getWriter();
// use a stringbuffer to avoid concatenation of Strings
StringBuffer output = new StringBuffer(100);
output.append("<html><head><title>PingServlet2MDBQueue</title></head>" + "<body><HR><FONT size=\"+2\" color=\"#000066\">PingServlet2MDBQueue<BR></FONT>" + "<FONT size=\"-1\" color=\"#000066\">" + "Tests the basic operation of a servlet posting a message to an EJB MDB through a JMS Queue.<BR>"
+ "<FONT color=\"red\"><B>Note:</B> Not intended for performance testing.</FONT>");
try {
Connection conn = queueConnectionFactory.createConnection();
try {
TextMessage message = null;
int iter = TradeConfig.getPrimIterations();
for (int ii = 0; ii < iter; ii++) {
Session sess = conn.createSession(false, Session.AUTO_ACKNOWLEDGE);
try {
MessageProducer producer = sess.createProducer(tradeBrokerQueue);
message = sess.createTextMessage();
String command = "ping";
message.setStringProperty("command", command);
message.setLongProperty("publishTime", System.currentTimeMillis());
message.setText("Ping message for queue java:comp/env/jms/TradeBrokerQueue sent from PingServlet2MDBQueue at " + new java.util.Date());
producer.send(message);
} finally {
sess.close();
}
}
// write out the output
output.append("<HR>initTime: ").append(initTime);
output.append("<BR>Hit Count: ").append(hitCount++);
output.append("<HR>Posted Text message to java:comp/env/jms/TradeBrokerQueue destination");
output.append("<BR>Message: ").append(message);
output.append("<BR><BR>Message text: ").append(message.getText());
output.append("<BR><HR></FONT></BODY></HTML>");
out.println(output.toString());
} catch (Exception e) {
Log.error("PingServlet2MDBQueue.doGet(...):exception posting message to TradeBrokerQueue destination ");
throw e;
} finally {
conn.close();
}
} // this is where I actually handle the exceptions
catch (Exception e) {
Log.error(e, "PingServlet2MDBQueue.doGet(...): error");
res.sendError(500, "PingServlet2MDBQueue.doGet(...): error, " + e.toString());
}
}
public String getServletInfo() {
return "web primitive, configured with trade runtime configs, tests Servlet to Session EJB path";
}
public void init(ServletConfig config) throws ServletException {
super.init(config);
hitCount = 0;
initTime = new java.util.Date().toString();
}
}
|
javaee6/modules/web/src/main/java/org/apache/geronimo/daytrader/javaee6/web/prims/ejb3/PingServlet2MDBQueue.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.daytrader.javaee6.web.prims.ejb3;
import java.io.IOException;
import javax.annotation.Resource;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.MessageProducer;
import javax.jms.Queue;
import javax.jms.Session;
import javax.jms.TextMessage;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.annotation.WebServlet;
import org.apache.geronimo.daytrader.javaee6.utils.Log;
import org.apache.geronimo.daytrader.javaee6.utils.TradeConfig;
/**
* This primitive is designed to run inside the TradeApplication and relies upon
* the {@link org.apache.geronimo.samples.daytrader.util.TradeConfig} class to
* set config parameters. PingServlet2MDBQueue tests key functionality of a
* servlet call to a post a message to an MDB Queue. The TradeBrokerMDB receives
* the message This servlet makes use of the MDB EJB
* {@link org.apache.geronimo.samples.daytrader.ejb.TradeBrokerMDB} by posting a
* message to the MDB Queue
*/
@WebServlet("/ejb3/PingServlet2MDBQueue")
public class PingServlet2MDBQueue extends HttpServlet {
private static String initTime;
private static int hitCount;
@Resource(name = "/ConnectionFactory")
private ConnectionFactory queueConnectionFactory;
@Resource(name = "jms/TradeBrokerQueue")
private Queue tradeBrokerQueue;
public void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
doGet(req, res);
}
public void doGet(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException {
res.setContentType("text/html");
java.io.PrintWriter out = res.getWriter();
// use a stringbuffer to avoid concatenation of Strings
StringBuffer output = new StringBuffer(100);
output.append("<html><head><title>PingServlet2MDBQueue</title></head>" + "<body><HR><FONT size=\"+2\" color=\"#000066\">PingServlet2MDBQueue<BR></FONT>" + "<FONT size=\"-1\" color=\"#000066\">" + "Tests the basic operation of a servlet posting a message to an EJB MDB through a JMS Queue.<BR>"
+ "<FONT color=\"red\"><B>Note:</B> Not intended for performance testing.</FONT>");
try {
Connection conn = queueConnectionFactory.createConnection();
try {
TextMessage message = null;
int iter = TradeConfig.getPrimIterations();
for (int ii = 0; ii < iter; ii++) {
Session sess = conn.createSession(false, Session.AUTO_ACKNOWLEDGE);
try {
MessageProducer producer = sess.createProducer(tradeBrokerQueue);
message = sess.createTextMessage();
String command = "ping";
message.setStringProperty("command", command);
message.setLongProperty("publishTime", System.currentTimeMillis());
message.setText("Ping message for queue java:comp/env/jms/TradeBrokerQueue sent from PingServlet2MDBQueue at " + new java.util.Date());
producer.send(message);
} finally {
sess.close();
}
}
// write out the output
output.append("<HR>initTime: ").append(initTime);
output.append("<BR>Hit Count: ").append(hitCount++);
output.append("<HR>Posted Text message to java:comp/env/jms/TradeBrokerQueue destination");
output.append("<BR>Message: ").append(message);
output.append("<BR><BR>Message text: ").append(message.getText());
output.append("<BR><HR></FONT></BODY></HTML>");
out.println(output.toString());
} catch (Exception e) {
Log.error("PingServlet2MDBQueue.doGet(...):exception posting message to TradeBrokerQueue destination ");
throw e;
} finally {
conn.close();
}
} // this is where I actually handle the exceptions
catch (Exception e) {
Log.error(e, "PingServlet2MDBQueue.doGet(...): error");
res.sendError(500, "PingServlet2MDBQueue.doGet(...): error, " + e.toString());
}
}
public String getServletInfo() {
return "web primitive, configured with trade runtime configs, tests Servlet to Session EJB path";
}
public void init(ServletConfig config) throws ServletException {
super.init(config);
hitCount = 0;
initTime = new java.util.Date().toString();
}
}
|
Update PingServlet2MDBQueue.java
|
javaee6/modules/web/src/main/java/org/apache/geronimo/daytrader/javaee6/web/prims/ejb3/PingServlet2MDBQueue.java
|
Update PingServlet2MDBQueue.java
|
|
Java
|
apache-2.0
|
727c9bc53b97d9c566d367165778ecf4a23e5947
| 0
|
apache/jmeter,apache/jmeter,apache/jmeter,ham1/jmeter,apache/jmeter,etnetera/jmeter,etnetera/jmeter,benbenw/jmeter,ham1/jmeter,ham1/jmeter,benbenw/jmeter,ham1/jmeter,benbenw/jmeter,apache/jmeter,etnetera/jmeter,benbenw/jmeter,etnetera/jmeter,etnetera/jmeter,ham1/jmeter
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.gui.action;
import java.awt.event.ActionEvent;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LinkNightlyBuild extends AbstractAction {
private static final Logger log = LoggerFactory.getLogger(LinkNightlyBuild.class);
private static final Set<String> commands = new HashSet<>();
static {
commands.add(ActionNames.LINK_NIGHTLY_BUILD);
}
/**
* @see org.apache.jmeter.gui.action.Command#doAction(ActionEvent)
*/
@Override
public void doAction(ActionEvent e) {
String url = "http://jmeter.apache.org/nightly.html";
try {
java.awt.Desktop.getDesktop().browse(java.net.URI.create(url));
} catch (IOException err) {
log.error("LinkNightlyBuild: User default browser is not found, or it fails to be launched, or the default handler application failed to be launched on {}", err);
} catch (UnsupportedOperationException err) {
log.error("LinkNightlyBuild: Current platform does not support the Desktop.Action.BROWSE actionon {}", err);
} catch (SecurityException err) {
log.error("LinkNightlyBuild: Security problem on {}", err);
} catch (Exception err) {
log.error("LinkNightlyBuild on {}", err);
}
}
@Override
public Set<String> getActionNames() {
return commands;
}
}
|
src/core/org/apache/jmeter/gui/action/LinkNightlyBuild.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.gui.action;
import java.awt.event.ActionEvent;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LinkNightlyBuild extends AbstractAction {
private static final Logger log = LoggerFactory.getLogger(LinkNightlyBuild.class);
private static final Set<String> commands = new HashSet<>();
static {
commands.add(ActionNames.LINK_NIGHTLY_BUILD);
}
/**
* @see org.apache.jmeter.gui.action.Command#doAction(ActionEvent)
*/
@Override
public void doAction(ActionEvent e) {
String url = "http://jmeter.apache.org/nightly.html";
try {
java.awt.Desktop.getDesktop().browse(java.net.URI.create(url));
} catch (IOException err) {
log.error("LinkNightlyBuild: User default browser is not found, or it fails to be launched, or the default handler application failed to be launched on {}", err);
} catch (UnsupportedOperationException err) {
log.error("LinkNightlyBuild: Current platform does not support the Desktop.Action.BROWSE actionon {}", err);
} catch (SecurityException err) {
log.error("LinkNightlyBuild: Security problem on {}", err);
} catch (Exception err) {
log.error("LinkNightlyBuild on {}", err);
}
}
@Override
public Set<String> getActionNames() {
return commands;
}
}
|
[Bug 61775] Add a link to help menu to download nighty builds (it open the browser with the correct link) - add missing new line
git-svn-id: https://svn.apache.org/repos/asf/jmeter/trunk@1815693 13f79535-47bb-0310-9956-ffa450edef68
Former-commit-id: ae987c352bda2f6d8624681e0508e9f3f98a4f71
|
src/core/org/apache/jmeter/gui/action/LinkNightlyBuild.java
|
[Bug 61775] Add a link to help menu to download nighty builds (it open the browser with the correct link) - add missing new line
|
|
Java
|
apache-2.0
|
7a3f0edf7451344039c4f4d9dc58797052aa3885
| 0
|
xzel23/meja,xzel23/meja
|
/*
* Copyright 2015 Axel Howind (axel@dua3.com).
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.dua3.meja.ui.swing;
import java.awt.Component;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.util.List;
import java.util.Optional;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.event.TableModelListener;
import javax.swing.filechooser.FileFilter;
import javax.swing.table.AbstractTableModel;
import javax.swing.table.TableModel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.dua3.meja.io.FileType;
import com.dua3.meja.io.OpenMode;
import com.dua3.meja.model.Cell;
import com.dua3.meja.model.Row;
import com.dua3.meja.model.Sheet;
import com.dua3.meja.model.Sheet.RowInfo;
import com.dua3.meja.model.Workbook;
import com.dua3.meja.model.WorkbookFactory;
import com.dua3.meja.util.MejaHelper;
import com.dua3.meja.util.Option;
import com.dua3.meja.util.Options;
/**
* Helper class.
*
* @author Axel Howind (axel@dua3.com)
*/
public class MejaSwingHelper {
private static final class SheetTableModel extends AbstractTableModel {
private final Sheet sheet;
private final boolean firstRowIsHeader;
private static final long serialVersionUID = 1L;
private SheetListener sl;
private SheetTableModel(Sheet sheet, boolean firstRowIsHeader) {
this.sheet = sheet;
this.firstRowIsHeader = firstRowIsHeader;
sl = new SheetListener();
}
class SheetListener implements PropertyChangeListener {
private final Logger LOG = LoggerFactory.getLogger(SheetListener.class);
public SheetListener() {
}
@Override
public void propertyChange(PropertyChangeEvent evt) {
switch (evt.getPropertyName()) {
case Sheet.PROPERTY_ROWS_ADDED: {
RowInfo ri = (RowInfo) evt.getNewValue();
fireTableRowsInserted(getRowNumber(ri.getFirstRow()), getRowNumber(ri.getLastRow()));
LOG.debug("forwarded event {}", evt);
}
break;
case Sheet.PROPERTY_CELL_CONTENT:
if (firstRowIsHeader && ((Cell)evt.getSource()).getRowNumber()==0) {
fireTableStructureChanged();
} else {
fireTableDataChanged();
}
break;
case Sheet.PROPERTY_COLUMNS_ADDED:
fireTableStructureChanged();
break;
}
}
public void detach() {
sheet.removePropertyChangeListener(this);
}
public void attach() {
sheet.addPropertyChangeListener(this);
}
}
@Override
public Class<?> getColumnClass(int columnIndex) {
return Cell.class;
}
@Override
public int getColumnCount() {
return sheet.getColumnCount();
}
@Override
public String getColumnName(int columnIndex) {
if (firstRowIsHeader) {
return sheet.getRow(0).getCell(columnIndex).toString();
} else {
return Sheet.getColumnName(columnIndex);
}
}
@Override
public int getRowCount() {
int n = sheet.getRowCount();
return firstRowIsHeader && n>0 ? n -1 : n;
}
@Override
public Object getValueAt(int i, int j) {
Row row = sheet.getRow(getRowNumber(i));
return row == null ? null : row.getCell(j);
}
@Override
public boolean isCellEditable(int rowIndex, int columnIndex) {
return false;
}
@Override
public void setValueAt(Object aValue, int rowIndex, int columnIndex) {
throw new UnsupportedOperationException("Not supported yet.");
}
private int getRowNumber(int i) {
return firstRowIsHeader ? i+1 : i;
}
@Override
public void removeTableModelListener(TableModelListener l) {
super.removeTableModelListener(l);
if (listenerList.getListenerCount()==0) {
sl.detach();
LOG.debug("last TableModelListener was removed, detaching sheet listener from sheet");
}
}
@Override
public void addTableModelListener(TableModelListener l) {
super.addTableModelListener(l);
if (listenerList.getListenerCount()==1) {
sl.attach();
LOG.debug("first TableModelListener was added, attaching sheet listener to sheet");
}
}
}
private static final Logger LOG = LoggerFactory.getLogger(MejaSwingHelper.class);
/**
* Create a TableModel to be used with JTable.
*
* @param sheet
* the sheet to create a model for
* @param firstRowIsHeader
* if set to true, the first row of the sheet will be displayed as the table header
* @return table model instance of {@code JTableModel} for the sheet
*/
public static TableModel getTableModel(final Sheet sheet, boolean firstRowIsHeader) {
return new SheetTableModel(sheet, firstRowIsHeader);
}
/**
* Show a file open dialog and load the selected workbook.
*
* @param parent
* the parent component to use for the dialog
* @param path
* the directory to set in the open dialog or the default path
* @return the workbook the user chose or null if dialog was canceled
* @throws IOException
* if a workbook was selected but could not be loaded
*/
public static Optional<Workbook> showDialogAndOpenWorkbook(Component parent, Path path) throws IOException {
boolean defaultFS = path.getFileSystem().equals(FileSystems.getDefault());
File file = defaultFS ? path.toFile() : new File(".");
JFileChooser jfc = new JFileChooser(file == null || file.isDirectory() ? file : file.getParentFile());
for (FileFilter filter : SwingFileFilter.getFilters(OpenMode.READ)) {
jfc.addChoosableFileFilter(filter);
}
int rc = jfc.showOpenDialog(parent);
if (rc != JFileChooser.APPROVE_OPTION) {
return Optional.empty();
}
path = jfc.getSelectedFile().toPath();
FileFilter filter = jfc.getFileFilter();
if (filter instanceof SwingFileFilter) {
// get factory from the used filter definition
final SwingFileFilter swingFileFilter = (SwingFileFilter) filter;
final FileType fileType = swingFileFilter.getFileType();
return openWorkbook(parent, path, fileType);
} else {
// another filter was used (ie. "all files")
return Optional.of(MejaHelper.openWorkbook(path));
}
}
public static Optional<Workbook> openWorkbook(Component parent, Path path) throws IOException {
return openWorkbook(parent, path, FileType.forPath(path).orElse(FileType.CSV));
}
public static Optional<Workbook> openWorkbook(Component parent, Path path, final FileType fileType) throws IOException {
if (fileType==null) {
return Optional.empty();
}
final WorkbookFactory<?> factory = fileType.getFactory();
// ask user for file type specific settings
List<Option<?>> settings = fileType.getSettings();
Options importSettings = Options.empty(); // default is empty
if (!settings.isEmpty()) {
SettingsDialog dialog = new SettingsDialog(parent, fileType.name() + " - Settings",
"Please verify the import settings:", settings);
dialog.setVisible(true);
importSettings = dialog.getResult();
}
// load
return Optional.of(factory.open(path, importSettings));
}
/**
* Show file selection dialog and save workbook.
* <p>
* A file selection dialog is shown and the workbook is saved to the
* selected file. If the file already exists, a confirmation dialog is
* shown, asking the user whether to overwrite the file.
* </p>
*
* @param parent
* the parent component for the dialog
* @param workbook
* the workbook to save
* @param path
* the path to set the default path in the dialog
* @return the URI the file was saved to or {@code null} if the user
* canceled the dialog
* @throws IOException
* if an exception occurs while saving
*/
public static Optional<Path> showDialogAndSaveWorkbook(Component parent, Workbook workbook, Path path)
throws IOException {
boolean defaultFS = path.getFileSystem().equals(FileSystems.getDefault());
File file = defaultFS ? path.toFile() : new File(".");
JFileChooser jfc = new JFileChooser(file == null || file.isDirectory() ? file : file.getParentFile());
int rc = jfc.showSaveDialog(parent);
if (rc != JFileChooser.APPROVE_OPTION) {
return Optional.empty();
}
file = jfc.getSelectedFile();
if (file.exists()) {
rc = JOptionPane.showConfirmDialog(
parent,
"File '" + file.getAbsolutePath() + "' already exists. Overwrite?",
"File exists",
JOptionPane.YES_NO_OPTION);
if (rc != JOptionPane.YES_OPTION) {
LOG.info("User chose not to overwrite file.");
return Optional.empty();
}
}
Optional<FileType> type = FileType.forFile(file);
if (type.isPresent()) {
type.get().getWriter().write(workbook, file.toPath());
} else {
workbook.write(file.toPath());
}
return Optional.of(file.toPath());
}
private MejaSwingHelper() {
}
}
|
meja-ui-swing/src/main/java/com/dua3/meja/ui/swing/MejaSwingHelper.java
|
/*
* Copyright 2015 Axel Howind (axel@dua3.com).
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.dua3.meja.ui.swing;
import java.awt.Component;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.util.List;
import java.util.Optional;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.filechooser.FileFilter;
import javax.swing.table.AbstractTableModel;
import javax.swing.table.TableModel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.dua3.meja.io.FileType;
import com.dua3.meja.io.OpenMode;
import com.dua3.meja.model.Cell;
import com.dua3.meja.model.Row;
import com.dua3.meja.model.Sheet;
import com.dua3.meja.model.Sheet.RowInfo;
import com.dua3.meja.model.Workbook;
import com.dua3.meja.model.WorkbookFactory;
import com.dua3.meja.util.MejaHelper;
import com.dua3.meja.util.Option;
import com.dua3.meja.util.Options;
/**
* Helper class.
*
* @author Axel Howind (axel@dua3.com)
*/
public class MejaSwingHelper {
private static final class SheetTableModel extends AbstractTableModel {
private final Sheet sheet;
private final boolean firstRowIsHeader;
private static final long serialVersionUID = 1L;
private SheetListener sl;
private SheetTableModel(Sheet sheet, boolean firstRowIsHeader) {
this.sheet = sheet;
this.firstRowIsHeader = firstRowIsHeader;
sl = new SheetListener();
}
class SheetListener implements PropertyChangeListener {
private final Logger LOG = LoggerFactory.getLogger(SheetListener.class);
public SheetListener() {
sheet.addPropertyChangeListener(this);
}
@Override
public void propertyChange(PropertyChangeEvent evt) {
switch (evt.getPropertyName()) {
case Sheet.PROPERTY_ROWS_ADDED: {
RowInfo ri = (RowInfo) evt.getNewValue();
fireTableRowsInserted(getRowNumber(ri.getFirstRow()), getRowNumber(ri.getLastRow()));
LOG.debug("forwarded event {}", evt);
}
break;
case Sheet.PROPERTY_CELL_CONTENT:
if (firstRowIsHeader && ((Cell)evt.getSource()).getRowNumber()==0) {
fireTableStructureChanged();
} else {
fireTableDataChanged();
}
break;
case Sheet.PROPERTY_COLUMNS_ADDED:
fireTableStructureChanged();
break;
}
}
}
@Override
public Class<?> getColumnClass(int columnIndex) {
return Cell.class;
}
@Override
public int getColumnCount() {
return sheet.getColumnCount();
}
@Override
public String getColumnName(int columnIndex) {
if (firstRowIsHeader) {
return sheet.getRow(0).getCell(columnIndex).toString();
} else {
return Sheet.getColumnName(columnIndex);
}
}
@Override
public int getRowCount() {
int n = sheet.getRowCount();
return firstRowIsHeader && n>0 ? n -1 : n;
}
@Override
public Object getValueAt(int i, int j) {
Row row = sheet.getRow(getRowNumber(i));
return row == null ? null : row.getCell(j);
}
@Override
public boolean isCellEditable(int rowIndex, int columnIndex) {
return false;
}
@Override
public void setValueAt(Object aValue, int rowIndex, int columnIndex) {
throw new UnsupportedOperationException("Not supported yet.");
}
private int getRowNumber(int i) {
return firstRowIsHeader ? i+1 : i;
}
}
private static final Logger LOG = LoggerFactory.getLogger(MejaSwingHelper.class);
/**
* Create a TableModel to be used with JTable.
*
* @param sheet
* the sheet to create a model for
* @param firstRowIsHeader
* if set to true, the first row of the sheet will be displayed as the table header
* @return table model instance of {@code JTableModel} for the sheet
*/
public static TableModel getTableModel(final Sheet sheet, boolean firstRowIsHeader) {
return new SheetTableModel(sheet, firstRowIsHeader);
}
/**
* Show a file open dialog and load the selected workbook.
*
* @param parent
* the parent component to use for the dialog
* @param path
* the directory to set in the open dialog or the default path
* @return the workbook the user chose or null if dialog was canceled
* @throws IOException
* if a workbook was selected but could not be loaded
*/
public static Optional<Workbook> showDialogAndOpenWorkbook(Component parent, Path path) throws IOException {
boolean defaultFS = path.getFileSystem().equals(FileSystems.getDefault());
File file = defaultFS ? path.toFile() : new File(".");
JFileChooser jfc = new JFileChooser(file == null || file.isDirectory() ? file : file.getParentFile());
for (FileFilter filter : SwingFileFilter.getFilters(OpenMode.READ)) {
jfc.addChoosableFileFilter(filter);
}
int rc = jfc.showOpenDialog(parent);
if (rc != JFileChooser.APPROVE_OPTION) {
return Optional.empty();
}
path = jfc.getSelectedFile().toPath();
FileFilter filter = jfc.getFileFilter();
if (filter instanceof SwingFileFilter) {
// get factory from the used filter definition
final SwingFileFilter swingFileFilter = (SwingFileFilter) filter;
final FileType fileType = swingFileFilter.getFileType();
return openWorkbook(parent, path, fileType);
} else {
// another filter was used (ie. "all files")
return Optional.of(MejaHelper.openWorkbook(path));
}
}
public static Optional<Workbook> openWorkbook(Component parent, Path path) throws IOException {
return openWorkbook(parent, path, FileType.forPath(path).orElse(FileType.CSV));
}
public static Optional<Workbook> openWorkbook(Component parent, Path path, final FileType fileType) throws IOException {
if (fileType==null) {
return Optional.empty();
}
final WorkbookFactory<?> factory = fileType.getFactory();
// ask user for file type specific settings
List<Option<?>> settings = fileType.getSettings();
Options importSettings = Options.empty(); // default is empty
if (!settings.isEmpty()) {
SettingsDialog dialog = new SettingsDialog(parent, fileType.name() + " - Settings",
"Please verify the import settings:", settings);
dialog.setVisible(true);
importSettings = dialog.getResult();
}
// load
return Optional.of(factory.open(path, importSettings));
}
/**
* Show file selection dialog and save workbook.
* <p>
* A file selection dialog is shown and the workbook is saved to the
* selected file. If the file already exists, a confirmation dialog is
* shown, asking the user whether to overwrite the file.
* </p>
*
* @param parent
* the parent component for the dialog
* @param workbook
* the workbook to save
* @param path
* the path to set the default path in the dialog
* @return the URI the file was saved to or {@code null} if the user
* canceled the dialog
* @throws IOException
* if an exception occurs while saving
*/
public static Optional<Path> showDialogAndSaveWorkbook(Component parent, Workbook workbook, Path path)
throws IOException {
boolean defaultFS = path.getFileSystem().equals(FileSystems.getDefault());
File file = defaultFS ? path.toFile() : new File(".");
JFileChooser jfc = new JFileChooser(file == null || file.isDirectory() ? file : file.getParentFile());
int rc = jfc.showSaveDialog(parent);
if (rc != JFileChooser.APPROVE_OPTION) {
return Optional.empty();
}
file = jfc.getSelectedFile();
if (file.exists()) {
rc = JOptionPane.showConfirmDialog(
parent,
"File '" + file.getAbsolutePath() + "' already exists. Overwrite?",
"File exists",
JOptionPane.YES_NO_OPTION);
if (rc != JOptionPane.YES_OPTION) {
LOG.info("User chose not to overwrite file.");
return Optional.empty();
}
}
Optional<FileType> type = FileType.forFile(file);
if (type.isPresent()) {
type.get().getWriter().write(workbook, file.toPath());
} else {
workbook.write(file.toPath());
}
return Optional.of(file.toPath());
}
private MejaSwingHelper() {
}
}
|
automatically attach/detach SheetListener to Sheet as needed
|
meja-ui-swing/src/main/java/com/dua3/meja/ui/swing/MejaSwingHelper.java
|
automatically attach/detach SheetListener to Sheet as needed
|
|
Java
|
apache-2.0
|
26f1b13ad82f9729636f671948dd98e42fa989fa
| 0
|
firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk,firebase/firebase-android-sdk
|
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.firebase.firestore.remote;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.firebase.FirebaseOptions;
import com.google.firebase.heartbeatinfo.HeartBeatInfo;
import com.google.firebase.inject.Provider;
import com.google.firebase.platforminfo.UserAgentPublisher;
import io.grpc.Metadata;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
public class FirebaseClientGrpcMetadataProviderTest {
private Provider<UserAgentPublisher> mockUserAgentProvider = mock(Provider.class);
private Provider<HeartBeatInfo> mockHeartBeatProvider = mock(Provider.class);
private UserAgentPublisher mockUserAgent = mock(UserAgentPublisher.class);
private HeartBeatInfo mockHeartBeat = mock(HeartBeatInfo.class);
private FirebaseOptions options =
new FirebaseOptions.Builder()
.setApplicationId("app_id")
.setApiKey("apikey")
.setProjectId("projectid")
.build();
private static final Metadata.Key<String> HEART_BEAT_HEADER =
Metadata.Key.of("x-firebase-client-log-type", Metadata.ASCII_STRING_MARSHALLER);
private static final Metadata.Key<String> USER_AGENT_HEADER =
Metadata.Key.of("x-firebase-client", Metadata.ASCII_STRING_MARSHALLER);
private static final Metadata.Key<String> GMP_APP_ID_HEADER =
Metadata.Key.of("x-firebase-gmpid", Metadata.ASCII_STRING_MARSHALLER);
@Test
public void noUpdateWhenBothNullProvider() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(null);
when(mockHeartBeatProvider.get()).thenReturn(null);
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(0);
}
@Test
public void noUpdateWhenHeartbeatNullProvider() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(mockUserAgent);
when(mockHeartBeatProvider.get()).thenReturn(null);
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(0);
}
@Test
public void noUpdateWhenUserAgentNullProvider() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(null);
when(mockHeartBeatProvider.get()).thenReturn(mockHeartBeat);
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(0);
}
@Test
public void updateHeaderWhenHBCodeisGlobalHeartBeat() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(mockUserAgent);
when(mockHeartBeatProvider.get()).thenReturn(mockHeartBeat);
when(mockHeartBeat.getHeartBeatCode(any())).thenReturn(HeartBeatInfo.HeartBeat.GLOBAL);
when(mockUserAgent.getUserAgent()).thenReturn("foo:1.2.1");
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(3);
assertThat(metadata.get(HEART_BEAT_HEADER)).isEqualTo("2");
assertThat(metadata.get(USER_AGENT_HEADER)).isEqualTo("foo:1.2.1");
assertThat(metadata.get(GMP_APP_ID_HEADER)).isEqualTo("app_id");
}
@Test
public void updateHeaderWhenHBCodeisSDKHeartBeat() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(mockUserAgent);
when(mockHeartBeatProvider.get()).thenReturn(mockHeartBeat);
when(mockHeartBeat.getHeartBeatCode(any())).thenReturn(HeartBeatInfo.HeartBeat.SDK);
when(mockUserAgent.getUserAgent()).thenReturn("foo:1.2.1");
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(3);
assertThat(metadata.get(HEART_BEAT_HEADER)).isEqualTo("1");
assertThat(metadata.get(USER_AGENT_HEADER)).isEqualTo("foo:1.2.1");
assertThat(metadata.get(GMP_APP_ID_HEADER)).isEqualTo("app_id");
}
@Test
public void updateHeaderWhenHBCodeisCombinedHeartBeat() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(mockUserAgent);
when(mockHeartBeatProvider.get()).thenReturn(mockHeartBeat);
when(mockHeartBeat.getHeartBeatCode(any())).thenReturn(HeartBeatInfo.HeartBeat.COMBINED);
when(mockUserAgent.getUserAgent()).thenReturn("foo:1.2.1");
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(3);
assertThat(metadata.get(HEART_BEAT_HEADER)).isEqualTo("3");
assertThat(metadata.get(USER_AGENT_HEADER)).isEqualTo("foo:1.2.1");
assertThat(metadata.get(GMP_APP_ID_HEADER)).isEqualTo("app_id");
}
@Test
public void headerIsUpdatedEvenWhenHeartBeatIsZero() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(mockUserAgent);
when(mockHeartBeatProvider.get()).thenReturn(mockHeartBeat);
when(mockHeartBeat.getHeartBeatCode(any())).thenReturn(HeartBeatInfo.HeartBeat.NONE);
when(mockUserAgent.getUserAgent()).thenReturn("foo:1.2.1");
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(2);
assertThat(metadata.get(USER_AGENT_HEADER)).isEqualTo("foo:1.2.1");
assertThat(metadata.get(GMP_APP_ID_HEADER)).isEqualTo("app_id");
}
@Test
public void noGmpAppIdWhenOptionsAreNull() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(mockUserAgent);
when(mockHeartBeatProvider.get()).thenReturn(mockHeartBeat);
when(mockHeartBeat.getHeartBeatCode(any())).thenReturn(HeartBeatInfo.HeartBeat.SDK);
when(mockUserAgent.getUserAgent()).thenReturn("foo:1.2.1");
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(mockUserAgentProvider, mockHeartBeatProvider, null);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(2);
assertThat(metadata.get(HEART_BEAT_HEADER)).isEqualTo("1");
assertThat(metadata.get(USER_AGENT_HEADER)).isEqualTo("foo:1.2.1");
}
}
|
firebase-firestore/src/androidTest/java/com/google/firebase/firestore/remote/FirebaseClientGrpcMetadataProviderTest.java
|
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.firebase.firestore.remote;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.firebase.FirebaseOptions;
import com.google.firebase.heartbeatinfo.HeartBeatInfo;
import com.google.firebase.inject.Provider;
import com.google.firebase.platforminfo.UserAgentPublisher;
import io.grpc.Metadata;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
public class FirebaseClientGrpcMetadataProviderTest {
private Provider<UserAgentPublisher> mockUserAgentProvider = mock(Provider.class);
private Provider<HeartBeatInfo> mockHeartBeatProvider = mock(Provider.class);
private UserAgentPublisher mockUserAgent = mock(UserAgentPublisher.class);
private HeartBeatInfo mockHeartBeat = mock(HeartBeatInfo.class);
private FirebaseOptions options =
new FirebaseOptions.Builder()
.setApplicationId("app_id")
.setApiKey("apikey")
.setProjectId("projectid")
.build();
private static final Metadata.Key<String> HEART_BEAT_HEADER =
Metadata.Key.of("x-firebase-client-log-type", Metadata.ASCII_STRING_MARSHALLER);
private static final Metadata.Key<String> USER_AGENT_HEADER =
Metadata.Key.of("x-firebase-client", Metadata.ASCII_STRING_MARSHALLER);
private static final Metadata.Key<String> GMP_APP_ID_HEADER =
Metadata.Key.of("x-firebase-gmpid", Metadata.ASCII_STRING_MARSHALLER);
@Test
public void noUpdateWhenBothNullProvider() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(null);
when(mockHeartBeatProvider.get()).thenReturn(null);
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(0);
}
@Test
public void noUpdateWhenHeartbeatNullProvider() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(mockUserAgent);
when(mockHeartBeatProvider.get()).thenReturn(null);
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(0);
}
@Test
public void noUpdateWhenUserAgentNullProvider() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(null);
when(mockHeartBeatProvider.get()).thenReturn(mockHeartBeat);
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(0);
}
@Test
public void updateHeaderWhenHBCodeisGlobalHeartBeat() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(mockUserAgent);
when(mockHeartBeatProvider.get()).thenReturn(mockHeartBeat);
when(mockHeartBeat.getHeartBeatCode(any())).thenReturn(HeartBeatInfo.HeartBeat.GLOBAL);
when(mockUserAgent.getUserAgent()).thenReturn("foo:1.2.1");
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(3);
assertThat(metadata.get(HEART_BEAT_HEADER)).isEqualTo("2");
assertThat(metadata.get(USER_AGENT_HEADER)).isEqualTo("foo:1.2.1");
assertThat(metadata.get(GMP_APP_ID_HEADER)).isEqualTo("app_id");
}
@Test
public void updateHeaderWhenHBCodeisSDKHeartBeat() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(mockUserAgent);
when(mockHeartBeatProvider.get()).thenReturn(mockHeartBeat);
when(mockHeartBeat.getHeartBeatCode(any())).thenReturn(HeartBeatInfo.HeartBeat.SDK);
when(mockUserAgent.getUserAgent()).thenReturn("foo:1.2.1");
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(3);
assertThat(metadata.get(HEART_BEAT_HEADER)).isEqualTo("1");
assertThat(metadata.get(USER_AGENT_HEADER)).isEqualTo("foo:1.2.1");
assertThat(metadata.get(GMP_APP_ID_HEADER)).isEqualTo("app_id");
}
@Test
public void updateHeaderWhenHBCodeisCombinedHeartBeat() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(mockUserAgent);
when(mockHeartBeatProvider.get()).thenReturn(mockHeartBeat);
when(mockHeartBeat.getHeartBeatCode(any())).thenReturn(HeartBeatInfo.HeartBeat.COMBINED);
when(mockUserAgent.getUserAgent()).thenReturn("foo:1.2.1");
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(3);
assertThat(metadata.get(HEART_BEAT_HEADER)).isEqualTo("3");
assertThat(metadata.get(USER_AGENT_HEADER)).isEqualTo("foo:1.2.1");
assertThat(metadata.get(GMP_APP_ID_HEADER)).isEqualTo("app_id");
}
@Test
public void noUpdateHeaderWhenHBCodeisZero() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(mockUserAgent);
when(mockHeartBeatProvider.get()).thenReturn(mockHeartBeat);
when(mockHeartBeat.getHeartBeatCode(any())).thenReturn(HeartBeatInfo.HeartBeat.NONE);
when(mockUserAgent.getUserAgent()).thenReturn("foo:1.2.1");
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(
mockUserAgentProvider, mockHeartBeatProvider, options);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(0);
}
@Test
public void noGmpAppIdWhenOptionsAreNull() {
Metadata metadata = new Metadata();
when(mockUserAgentProvider.get()).thenReturn(mockUserAgent);
when(mockHeartBeatProvider.get()).thenReturn(mockHeartBeat);
when(mockHeartBeat.getHeartBeatCode(any())).thenReturn(HeartBeatInfo.HeartBeat.SDK);
when(mockUserAgent.getUserAgent()).thenReturn("foo:1.2.1");
GrpcMetadataProvider metadataProvider =
new FirebaseClientGrpcMetadataProvider(mockUserAgentProvider, mockHeartBeatProvider, null);
metadataProvider.updateMetadata(metadata);
assertThat(metadata.keys().size()).isEqualTo(2);
assertThat(metadata.get(HEART_BEAT_HEADER)).isEqualTo("1");
assertThat(metadata.get(USER_AGENT_HEADER)).isEqualTo("foo:1.2.1");
}
}
|
Fix the heartbeat integration test in Firestore (#2024)
|
firebase-firestore/src/androidTest/java/com/google/firebase/firestore/remote/FirebaseClientGrpcMetadataProviderTest.java
|
Fix the heartbeat integration test in Firestore (#2024)
|
|
Java
|
apache-2.0
|
d156a5a78b8b82376c04ab20ba85bf4d683b9509
| 0
|
quadrama/DramaNLP,quadrama/DramaNLP,quadrama/DramaNLP
|
package de.unistuttgart.quadrama.core;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.fit.component.JCasAnnotator_ImplBase;
import org.apache.uima.fit.descriptor.ConfigurationParameter;
import org.apache.uima.fit.descriptor.TypeCapability;
import org.apache.uima.fit.factory.AnnotationFactory;
import org.apache.uima.fit.util.JCasUtil;
import org.apache.uima.jcas.JCas;
import org.apache.uima.util.Level;
import de.unistuttgart.quadrama.api.Drama;
import de.unistuttgart.quadrama.api.Figure;
import de.unistuttgart.quadrama.api.Speaker;
import de.unistuttgart.quadrama.api.SpeakerFigure;
@TypeCapability(inputs = { "de.unistuttgart.quadrama.api.Figure",
"de.unistuttgart.quadrama.api.Speaker" }, outputs = {
"de.unistuttgart.quadrama.api.Speaker:Figure",
"de.unistuttgart.quadrama.api.SpeakerFigure",
"de.unistuttgart.quadrama.api.Figure:Id" })
public class SpeakerIdentifier extends JCasAnnotator_ImplBase {
public static final String PARAM_CREATE_SPEAKER_FIGURE =
"Create speaker figure";
@ConfigurationParameter(name = PARAM_CREATE_SPEAKER_FIGURE,
mandatory = false)
boolean createSpeakerFigure = false;
int threshold = 2;
@Override
public void process(JCas jcas) throws AnalysisEngineProcessException {
getLogger().log(
Level.INFO,
"Running "
+ getClass().getName()
+ " on "
+ JCasUtil.selectSingle(jcas, Drama.class)
.getDocumentId());
Map<String, Figure> map = new HashMap<String, Figure>();
int figureId = 0;
for (Figure figure : JCasUtil.select(jcas, Figure.class)) {
figure.setId(figureId++);
map.put(figure.getCoveredText().trim().toLowerCase(), figure);
}
Set<Speaker> unassigned = new HashSet<Speaker>();
for (Speaker cm : JCasUtil.select(jcas, Speaker.class)) {
String sName =
cm.getCoveredText().trim().replaceAll("[.,;]", "")
.toLowerCase();
if (map.containsKey(sName))
cm.setFigure(map.get(sName));
else {
unassigned.add(cm);
}
}
unassigned = assignLevel2(map.values(), unassigned);
// unassigned = assignLevel3(map.values(), unassigned);
unassigned = assignLevDistance(map.values(), unassigned, threshold);
if (!unassigned.isEmpty())
getLogger().log(
Level.WARNING,
unassigned.size() + " unassigned speakers: "
+ JCasUtil.toText(unassigned));
if (createSpeakerFigure) {
Map<String, TreeSet<Speaker>> unassignedMap =
new HashMap<String, TreeSet<Speaker>>();
for (Speaker speaker : unassigned) {
if (!unassignedMap.containsKey(speaker.getCoveredText().trim()))
unassignedMap.put(speaker.getCoveredText().trim(),
new TreeSet<Speaker>(new AnnotationComparator()));
unassignedMap.get(speaker.getCoveredText().trim()).add(speaker);
}
for (String s : unassignedMap.keySet()) {
Speaker speaker = unassignedMap.get(s).first();
getLogger().log(
Level.WARNING,
"Creating SpeakerFigure for "
+ speaker.getCoveredText());
Figure fig =
AnnotationFactory.createAnnotation(jcas,
speaker.getBegin(), speaker.getEnd(),
SpeakerFigure.class);
fig.setId(figureId++);
for (Speaker sp : unassignedMap.get(s)) {
sp.setFigure(fig);
}
}
}
}
protected Set<Speaker> assignLevel2(Collection<Figure> figures,
Collection<Speaker> speakers) {
Set<Speaker> unassigned = new HashSet<Speaker>();
for (Speaker speaker : speakers) {
for (Figure figure : figures) {
String[] nameParts =
figure.getCoveredText().toLowerCase().split(" +");
if (ArrayUtils.contains(nameParts, speaker.getCoveredText()
.toLowerCase().trim())) {
speaker.setFigure(figure);
} else {}
}
if (speaker.getFigure() == null) unassigned.add(speaker);
}
return unassigned;
}
protected Set<Speaker> assignLevel3(Collection<Figure> figures,
Collection<Speaker> speakers) {
Set<Speaker> unassigned = new HashSet<Speaker>();
for (Speaker speaker : speakers) {
for (Figure figure : figures) {
if (figure.getDescription() != null) {
String[] nameParts =
figure.getDescription().getCoveredText()
.toLowerCase().split(" +");
if (ArrayUtils
.contains(nameParts, speaker.getCoveredText())) {
speaker.setFigure(figure);
}
}
}
if (speaker.getFigure() == null) unassigned.add(speaker);
}
return unassigned;
}
protected Set<Speaker> assignLevDistance(Collection<Figure> figures,
Collection<Speaker> speakers, int maxDistance) {
Set<Speaker> unassigned = new HashSet<Speaker>();
for (Speaker speaker : speakers) {
String sName = speaker.getCoveredText().trim().toLowerCase();
for (Figure figure : figures) {
String fName = figure.getCoveredText().trim().toLowerCase();
int lev = StringUtils.getLevenshteinDistance(sName, fName);
if (lev <= maxDistance) {
speaker.setFigure(figure);
}
}
if (speaker.getFigure() == null) unassigned.add(speaker);
}
return unassigned;
}
}
|
de.unistuttgart.quadrama/de.unistuttgart.quadrama.core/src/main/java/de/unistuttgart/quadrama/core/SpeakerIdentifier.java
|
package de.unistuttgart.quadrama.core;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.fit.component.JCasAnnotator_ImplBase;
import org.apache.uima.fit.descriptor.ConfigurationParameter;
import org.apache.uima.fit.descriptor.TypeCapability;
import org.apache.uima.fit.factory.AnnotationFactory;
import org.apache.uima.fit.util.JCasUtil;
import org.apache.uima.jcas.JCas;
import org.apache.uima.util.Level;
import de.unistuttgart.quadrama.api.Drama;
import de.unistuttgart.quadrama.api.Figure;
import de.unistuttgart.quadrama.api.Speaker;
import de.unistuttgart.quadrama.api.SpeakerFigure;
@TypeCapability(inputs = { "de.unistuttgart.quadrama.api.Figure",
"de.unistuttgart.quadrama.api.Speaker" }, outputs = {
"de.unistuttgart.quadrama.api.Speaker:Figure",
"de.unistuttgart.quadrama.api.SpeakerFigure",
"de.unistuttgart.quadrama.api.Figure:Id" })
public class SpeakerIdentifier extends JCasAnnotator_ImplBase {
public static final String PARAM_CREATE_SPEAKER_FIGURE =
"Create speaker figure";
@ConfigurationParameter(name = PARAM_CREATE_SPEAKER_FIGURE,
mandatory = false)
boolean createSpeakerFigure = false;
int threshold = 2;
@Override
public void process(JCas jcas) throws AnalysisEngineProcessException {
getLogger().log(
Level.FINE,
"Now processing "
+ JCasUtil.selectSingle(jcas, Drama.class)
.getDocumentId());
Map<String, Figure> map = new HashMap<String, Figure>();
int figureId = 0;
for (Figure figure : JCasUtil.select(jcas, Figure.class)) {
figure.setId(figureId++);
map.put(figure.getCoveredText().trim().toLowerCase(), figure);
}
Set<Speaker> unassigned = new HashSet<Speaker>();
for (Speaker cm : JCasUtil.select(jcas, Speaker.class)) {
String sName =
cm.getCoveredText().trim().replaceAll("[.,;]", "")
.toLowerCase();
if (map.containsKey(sName))
cm.setFigure(map.get(sName));
else {
unassigned.add(cm);
}
}
unassigned = assignLevel2(map.values(), unassigned);
// unassigned = assignLevel3(map.values(), unassigned);
unassigned = assignLevDistance(map.values(), unassigned, threshold);
if (!unassigned.isEmpty())
getLogger().log(
Level.WARNING,
unassigned.size() + " unassigned speakers: "
+ JCasUtil.toText(unassigned));
if (createSpeakerFigure) {
Map<String, TreeSet<Speaker>> unassignedMap =
new HashMap<String, TreeSet<Speaker>>();
for (Speaker speaker : unassigned) {
if (!unassignedMap.containsKey(speaker.getCoveredText().trim()))
unassignedMap.put(speaker.getCoveredText().trim(),
new TreeSet<Speaker>(new AnnotationComparator()));
unassignedMap.get(speaker.getCoveredText().trim()).add(speaker);
}
for (String s : unassignedMap.keySet()) {
Speaker speaker = unassignedMap.get(s).first();
getLogger().log(
Level.WARNING,
"Creating SpeakerFigure for "
+ speaker.getCoveredText());
Figure fig =
AnnotationFactory.createAnnotation(jcas,
speaker.getBegin(), speaker.getEnd(),
SpeakerFigure.class);
fig.setId(figureId++);
for (Speaker sp : unassignedMap.get(s)) {
sp.setFigure(fig);
}
}
}
}
protected Set<Speaker> assignLevel2(Collection<Figure> figures,
Collection<Speaker> speakers) {
Set<Speaker> unassigned = new HashSet<Speaker>();
for (Speaker speaker : speakers) {
for (Figure figure : figures) {
String[] nameParts =
figure.getCoveredText().toLowerCase().split(" +");
if (ArrayUtils.contains(nameParts, speaker.getCoveredText()
.toLowerCase().trim())) {
speaker.setFigure(figure);
} else {}
}
if (speaker.getFigure() == null) unassigned.add(speaker);
}
return unassigned;
}
protected Set<Speaker> assignLevel3(Collection<Figure> figures,
Collection<Speaker> speakers) {
Set<Speaker> unassigned = new HashSet<Speaker>();
for (Speaker speaker : speakers) {
for (Figure figure : figures) {
if (figure.getDescription() != null) {
String[] nameParts =
figure.getDescription().getCoveredText()
.toLowerCase().split(" +");
if (ArrayUtils
.contains(nameParts, speaker.getCoveredText())) {
speaker.setFigure(figure);
}
}
}
if (speaker.getFigure() == null) unassigned.add(speaker);
}
return unassigned;
}
protected Set<Speaker> assignLevDistance(Collection<Figure> figures,
Collection<Speaker> speakers, int maxDistance) {
Set<Speaker> unassigned = new HashSet<Speaker>();
for (Speaker speaker : speakers) {
String sName = speaker.getCoveredText().trim().toLowerCase();
for (Figure figure : figures) {
String fName = figure.getCoveredText().trim().toLowerCase();
int lev = StringUtils.getLevenshteinDistance(sName, fName);
if (lev <= maxDistance) {
speaker.setFigure(figure);
}
}
if (speaker.getFigure() == null) unassigned.add(speaker);
}
return unassigned;
}
}
|
updated logging
|
de.unistuttgart.quadrama/de.unistuttgart.quadrama.core/src/main/java/de/unistuttgart/quadrama/core/SpeakerIdentifier.java
|
updated logging
|
|
Java
|
apache-2.0
|
833b5ed1e01909a4341876056c7cb8258ce8c9fd
| 0
|
mini2Dx/mini2Dx,mini2Dx/mini2Dx
|
/*******************************************************************************
* Copyright 2019 See AUTHORS file
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.mini2Dx.ui;
import org.mini2Dx.core.Graphics;
import org.mini2Dx.core.Mdx;
import org.mini2Dx.core.assets.AssetManager;
import org.mini2Dx.core.game.GameContainer;
import org.mini2Dx.core.graphics.viewport.Viewport;
import org.mini2Dx.core.input.GamePadType;
import org.mini2Dx.core.input.button.GamePadButton;
import org.mini2Dx.gdx.Input;
import org.mini2Dx.gdx.InputProcessor;
import org.mini2Dx.gdx.math.MathUtils;
import org.mini2Dx.gdx.math.Vector2;
import org.mini2Dx.gdx.utils.Array;
import org.mini2Dx.gdx.utils.IntArray;
import org.mini2Dx.gdx.utils.IntSet;
import org.mini2Dx.gdx.utils.ObjectSet;
import org.mini2Dx.ui.gamepad.GamePadUiInput;
import org.mini2Dx.ui.element.*;
import org.mini2Dx.ui.event.EventTrigger;
import org.mini2Dx.ui.event.params.EventTriggerParamsPool;
import org.mini2Dx.ui.event.params.GamePadEventTriggerParams;
import org.mini2Dx.ui.event.params.KeyboardEventTriggerParams;
import org.mini2Dx.ui.event.params.MouseEventTriggerParams;
import org.mini2Dx.ui.layout.PixelLayoutUtils;
import org.mini2Dx.ui.layout.ScreenSize;
import org.mini2Dx.ui.listener.ScreenSizeListener;
import org.mini2Dx.ui.listener.UiContainerListener;
import org.mini2Dx.ui.listener.UiInputSourceListener;
import org.mini2Dx.ui.navigation.UiNavigation;
import org.mini2Dx.ui.render.*;
import org.mini2Dx.ui.style.StyleRule;
import org.mini2Dx.ui.style.UiTheme;
import org.mini2Dx.ui.util.IdAllocator;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* The container for all UI elements. {@link #update(float)} and
* {@link #render(Graphics)} must be called by your {@link GameContainer}
*/
public class UiContainer extends ParentUiElement implements InputProcessor {
private static final String LOGGING_TAG = UiContainer.class.getSimpleName();
private static final Vector2 SHARED_VECTOR = new Vector2();
private static final Array<UiContainer> uiContainerInstances = new Array<UiContainer>(true, 2, UiContainer.class);
private static Visibility defaultVisibility = Visibility.HIDDEN;
private static UiTheme UI_THEME;
private static UiContainerState STATE = UiContainerState.NOOP;
private final Array<GamePadUiInput<?>> controllerInputs = new Array<GamePadUiInput<?>>(true,1, GamePadUiInput.class);
private final Array<UiInputSourceListener> inputSourceListeners = new Array<UiInputSourceListener>(true,1, UiInputSourceListener.class);
private final Array<UiContainerListener> containerListeners = new Array<UiContainerListener>(true,1, UiContainerListener.class);
private final IntSet receivedKeyDowns = new IntSet();
private final ObjectSet<String> receivedButtonDowns = new ObjectSet<String>();
private final AtomicBoolean forceRenderTreeLayout = new AtomicBoolean(false);
private final UiContainerRenderTree renderTree;
private InputSource lastInputSource, nextInputSource;
private GamePadType lastGamePadType = GamePadType.UNKNOWN, nextGamePadType = GamePadType.UNKNOWN;
private int lastMouseX, lastMouseY;
private float scaleX = 1f;
private float scaleY = 1f;
private String lastThemeId;
private boolean themeWarningIssued, initialThemeLayoutComplete;
private float inputSourceChangeTimer = 0f, inputSourceChangeThreshold = 0.1f;
private final IntArray actionKeys = new IntArray();
private Navigatable activeNavigation;
private ActionableRenderNode activeAction;
private TextInputableRenderNode activeTextInput;
private NavigationMode navigationMode = NavigationMode.BUTTON_OR_POINTER;
private boolean textInputIgnoredFirstEnter = false;
private ScreenSizeScaleMode screenSizeScaleMode = ScreenSizeScaleMode.NO_SCALING;
private boolean passThroughMouseMovement = false;
private Viewport viewport;
/**
* Constructor
*
* @param gc
* Your game's {@link GameContainer}
* @param assetManager
* The {@link AssetManager} for the game
*/
public UiContainer(GameContainer gc, AssetManager assetManager) {
this(gc.getWidth(), gc.getHeight(), assetManager);
}
public UiContainer(int width, int height, AssetManager assetManager) {
super(IdAllocator.getNextId("ui-container-root"));
this.width = width;
this.height = height;
actionKeys.add(Input.Keys.ENTER);
switch (Mdx.platform) {
case ANDROID:
case IOS:
lastInputSource = InputSource.TOUCHSCREEN;
break;
case MAC:
case LINUX:
case WINDOWS:
lastInputSource = InputSource.KEYBOARD_MOUSE;
break;
default:
break;
}
renderTree = new UiContainerRenderTree(this, assetManager);
super.renderNode = renderTree;
setVisibility(Visibility.VISIBLE);
uiContainerInstances.add(this);
}
public static void relayoutAllUiContainers() {
Mdx.log.info(LOGGING_TAG, "Triggering re-layout for all UiContainer instances");
for(int i = uiContainerInstances.size - 1; i >= 0; i--) {
uiContainerInstances.get(i).forceRenderTreeLayout();
}
}
private void forceRenderTreeLayout() {
forceRenderTreeLayout.set(true);
}
public void dispose() {
uiContainerInstances.removeValue(this, false);
}
@Override
protected ParentRenderNode<?, ?> createRenderNode(ParentRenderNode<?, ?> parent) {
return renderTree;
}
/**
* Updates all {@link UiElement}s
*
* @param delta
* The time since the last frame (in seconds)
*/
public void update(float delta) {
updateLastInputSource(delta);
updateLastGamePadType();
if (!isThemeApplied()) {
if (!themeWarningIssued) {
if (Mdx.log != null) {
Mdx.log.error(LOGGING_TAG, "No theme applied to UI - cannot update or render UI.");
}
themeWarningIssued = true;
}
return;
}
if(lastThemeId == null || (lastThemeId != null && !lastThemeId.equals(UI_THEME.getId()))) {
renderTree.setDirty();
initialThemeLayoutComplete = false;
Mdx.log.info(LOGGING_TAG, "Applied theme - " + UI_THEME.getId());
}
lastThemeId = UI_THEME.getId();
if(forceRenderTreeLayout.get()) {
renderTree.onResize(width, height);
forceRenderTreeLayout.set(false);
}
notifyPreUpdate(delta);
for (int i = controllerInputs.size - 1; i >= 0; i--) {
controllerInputs.get(i).update(delta);
}
if (renderTree.isDirty()) {
STATE = UiContainerState.LAYOUT;
renderTree.layout();
STATE = UiContainerState.NOOP;
renderTree.processLayoutDeferred();
initialThemeLayoutComplete = true;
}
STATE = UiContainerState.UPDATE;
renderTree.update(delta);
notifyPostUpdate(delta);
STATE = UiContainerState.NOOP;
renderTree.processUpdateDeferred();
PixelLayoutUtils.update(delta);
}
/**
* Renders all visible {@link UiElement}s
*
* @param g
* The {@link Graphics} context
*/
public void render(Graphics g) {
if (!isThemeApplied()) {
return;
}
if (!initialThemeLayoutComplete) {
return;
}
STATE = UiContainerState.RENDER;
notifyPreRender(g);
switch (visibility) {
case HIDDEN:
case NO_RENDER:
return;
default:
float previousScaleX = g.getScaleX();
float previousScaleY = g.getScaleY();
if (scaleX != 1f || scaleY != 1f) {
g.setScale(scaleX, scaleY);
}
renderTree.render(g);
if (scaleX != 1f || scaleY != 1f) {
g.setScale(previousScaleX, previousScaleY);
}
break;
}
notifyPostRender(g);
STATE = UiContainerState.NOOP;
renderTree.processRenderDeferred();
}
@Override
public void attach(ParentRenderNode<?, ?> parentRenderNode) {
}
@Override
public void detach(ParentRenderNode<?, ?> parentRenderNode) {
}
@Override
public void setVisibility(Visibility visibility) {
this.visibility = visibility;
}
/**
* Adds a {@link ScreenSizeListener} to listen for {@link ScreenSize} change
*
* @param listener
* The {@link ScreenSizeListener} to add
*/
public void addScreenSizeListener(ScreenSizeListener listener) {
renderTree.addScreenSizeListener(listener);
}
/**
* Removes a {@link ScreenSizeListener} from this {@link UiContainer}
*
* @param listener
* The {@link ScreenSizeListener} to remove
*/
public void removeScreenSizeListener(ScreenSizeListener listener) {
renderTree.removeScreenSizeListener(listener);
}
/**
* Returns if a {@link UiTheme} has been applied to thi {@link UiContainer}
*
* @return True if the {@link UiTheme} has been applied
*/
public static boolean isThemeApplied() {
return UI_THEME != null;
}
/**
* Returns the {@link UiTheme} currently applied to this {@link UiContainer}
*
* @return Null if no {@link UiTheme} has been applied
*/
public static UiTheme getTheme() {
return UI_THEME;
}
/**
* Sets the current {@link UiTheme} for this {@link UiContainer}
*
* @param theme
* The {@link UiTheme} to apply
*/
public static void setTheme(UiTheme theme) {
if (theme == null) {
return;
}
if (UI_THEME != null && UI_THEME.getId().equals(theme.getId())) {
return;
}
UI_THEME = theme;
}
/**
* Returns the current {@link UiContainerState}
* @return
*/
public static UiContainerState getState() {
return STATE;
}
@Override
public void setStyleId(String styleId) {
}
@Override
public boolean touchDown(int screenX, int screenY, int pointer, int button) {
if (!pointerNavigationAllowed()) {
return false;
}
if (viewport != null) {
SHARED_VECTOR.x = screenX;
SHARED_VECTOR.y = screenY;
viewport.toWorldCoordinates(SHARED_VECTOR);
screenX = MathUtils.round(SHARED_VECTOR.x);
screenY = MathUtils.round(SHARED_VECTOR.y);
} else {
screenX = MathUtils.round(screenX / scaleX);
screenY = MathUtils.round(screenY / scaleY);
}
updateLastInputSource(screenX, screenY);
lastMouseX = screenX;
lastMouseY = screenY;
if (activeTextInput != null && activeTextInput.mouseDown(screenX, screenY, pointer, button) == null) {
// Release textbox control
activeTextInput = null;
activeAction = null;
switch (Mdx.platform) {
case ANDROID:
case IOS:
Mdx.input.setOnScreenKeyboardVisible(false);
break;
default:
break;
}
}
ActionableRenderNode result = renderTree.mouseDown(screenX, screenY, pointer, button);
if (result != null) {
MouseEventTriggerParams params = EventTriggerParamsPool.allocateMouseParams();
params.setMouseX(screenX);
params.setMouseY(screenY);
result.beginAction(EventTrigger.getTriggerForMouseClick(button), params);
EventTriggerParamsPool.release(params);
setActiveAction(result);
return true;
}
return false;
}
@Override
public boolean touchUp(int screenX, int screenY, int pointer, int button) {
if (!pointerNavigationAllowed()) {
return false;
}
if (viewport != null) {
SHARED_VECTOR.x = screenX;
SHARED_VECTOR.y = screenY;
viewport.toWorldCoordinates(SHARED_VECTOR);
screenX = MathUtils.round(SHARED_VECTOR.x);
screenY = MathUtils.round(SHARED_VECTOR.y);
} else {
screenX = MathUtils.round(screenX / scaleX);
screenY = MathUtils.round(screenY / scaleY);
}
updateLastInputSource(screenX, screenY);
lastMouseX = screenX;
lastMouseY = screenY;
if (activeAction == null) {
return false;
}
activeAction.mouseUp(screenX, screenY, pointer, button);
activeAction = null;
return true;
}
@Override
public boolean touchDragged(int screenX, int screenY, int pointer) {
if (!pointerNavigationAllowed()) {
return false;
}
if (viewport != null) {
SHARED_VECTOR.x = screenX;
SHARED_VECTOR.y = screenY;
viewport.toWorldCoordinates(SHARED_VECTOR);
screenX = MathUtils.round(SHARED_VECTOR.x);
screenY = MathUtils.round(SHARED_VECTOR.y);
} else {
screenX = MathUtils.round(screenX / scaleX);
screenY = MathUtils.round(screenY / scaleY);
}
updateLastInputSource(screenX, screenY);
lastMouseX = screenX;
lastMouseY = screenY;
if(passThroughMouseMovement) {
renderTree.mouseMoved(screenX, screenY);
return false;
}
return renderTree.mouseMoved(screenX, screenY);
}
@Override
public boolean mouseMoved(int screenX, int screenY) {
if (viewport != null) {
SHARED_VECTOR.x = screenX;
SHARED_VECTOR.y = screenY;
viewport.toWorldCoordinates(SHARED_VECTOR);
screenX = MathUtils.round(SHARED_VECTOR.x);
screenY = MathUtils.round(SHARED_VECTOR.y);
} else {
screenX = MathUtils.round(screenX / scaleX);
screenY = MathUtils.round(screenY / scaleY);
}
updateLastInputSource(screenX, screenY);
lastMouseX = screenX;
lastMouseY = screenY;
if (!pointerNavigationAllowed()) {
return false;
}
if(passThroughMouseMovement) {
renderTree.mouseMoved(screenX, screenY);
return false;
}
return renderTree.mouseMoved(screenX, screenY);
}
@Override
public boolean scrolled(float amountX, float amountY) {
if (!pointerNavigationAllowed()) {
return false;
}
return renderTree.mouseScrolled(lastMouseX, lastMouseY, amountX, amountY);
}
@Override
public boolean keyTyped(char character) {
if (activeTextInput == null) {
return false;
}
if (activeTextInput.isReceivingInput()) {
activeTextInput.characterReceived(character);
}
return true;
}
@Override
public boolean keyDown(int keycode) {
if (activeTextInput != null && activeTextInput.isReceivingInput()) {
receivedKeyDowns.add(keycode);
return true;
}
if (actionKeys.contains(keycode) && activeAction != null) {
receivedKeyDowns.add(keycode);
KeyboardEventTriggerParams params = EventTriggerParamsPool.allocateKeyboardParams();
params.setKey(keycode);
activeAction.setState(NodeState.ACTION);
activeAction.beginAction(EventTrigger.KEYBOARD, params);
EventTriggerParamsPool.release(params);
if (activeTextInput != null) {
textInputIgnoredFirstEnter = false;
}
return true;
}
if (handleModalKeyDown(keycode)) {
receivedKeyDowns.add(keycode);
return true;
}
receivedKeyDowns.remove(keycode);
return false;
}
@Override
public boolean keyUp(int keycode) {
// Key down was sent before this UI Container accepted input
if (!receivedKeyDowns.remove(keycode)) {
return false;
}
if (handleTextInputKeyUp(keycode)) {
return true;
}
if (actionKeys.contains(keycode) && activeAction != null) {
KeyboardEventTriggerParams params = EventTriggerParamsPool.allocateKeyboardParams();
params.setKey(keycode);
activeAction.setState(NodeState.NORMAL);
activeAction.endAction(EventTrigger.KEYBOARD, params);
EventTriggerParamsPool.release(params);
switch (Mdx.platform) {
case ANDROID:
case IOS:
Mdx.input.setOnScreenKeyboardVisible(false);
break;
default:
break;
}
return true;
}
if (handleModalKeyUp(keycode)) {
return true;
}
return false;
}
public boolean buttonDown(GamePadUiInput<?> controllerUiInput, GamePadButton button) {
if (activeNavigation == null) {
return false;
}
receivedButtonDowns.add(button.getInternalName());
ActionableRenderNode hotkeyAction = activeNavigation.hotkey(button);
if (hotkeyAction != null) {
if(!hotkeyAction.isEnabled()) {
return true;
}
GamePadEventTriggerParams params = EventTriggerParamsPool.allocateGamePadParams();
params.setGamePadButton(button);
hotkeyAction.setState(NodeState.ACTION);
hotkeyAction.beginAction(EventTrigger.CONTROLLER, params);
EventTriggerParamsPool.release(params);
} else if (activeAction != null) {
if (button.equals(controllerUiInput.getActionButton())) {
if (activeTextInput != null) {
if (!textInputIgnoredFirstEnter) {
GamePadEventTriggerParams params = EventTriggerParamsPool.allocateGamePadParams();
params.setGamePadButton(button);
activeAction.setState(NodeState.ACTION);
activeAction.beginAction(EventTrigger.CONTROLLER, params);
EventTriggerParamsPool.release(params);
}
} else {
GamePadEventTriggerParams params = EventTriggerParamsPool.allocateGamePadParams();
params.setGamePadButton(button);
activeAction.setState(NodeState.ACTION);
activeAction.beginAction(EventTrigger.CONTROLLER, params);
EventTriggerParamsPool.release(params);
}
}
}
return true;
}
public boolean buttonUp(GamePadUiInput<?> controllerUiInput, GamePadButton button) {
// Button down was sent before this UI Container accepted input
if (!receivedButtonDowns.remove(button.getInternalName())) {
return false;
}
if (activeNavigation == null) {
return false;
}
ActionableRenderNode hotkeyAction = activeNavigation.hotkey(button);
if (hotkeyAction != null) {
if(!hotkeyAction.isEnabled()) {
return true;
}
GamePadEventTriggerParams params = EventTriggerParamsPool.allocateGamePadParams();
params.setGamePadButton(button);
hotkeyAction.setState(NodeState.NORMAL);
hotkeyAction.endAction(EventTrigger.CONTROLLER, params);
EventTriggerParamsPool.release(params);
} else if (activeAction != null) {
if (activeTextInput != null && !textInputIgnoredFirstEnter) {
textInputIgnoredFirstEnter = true;
return true;
}
if (button.equals(controllerUiInput.getActionButton())) {
GamePadEventTriggerParams params = EventTriggerParamsPool.allocateGamePadParams();
params.setGamePadButton(button);
activeAction.setState(NodeState.NORMAL);
activeAction.endAction(EventTrigger.CONTROLLER, params);
EventTriggerParamsPool.release(params);
textInputIgnoredFirstEnter = false;
}
}
return true;
}
private boolean handleModalKeyDown(int keycode) {
if (activeNavigation == null) {
return false;
}
ActionableRenderNode hotkeyAction = activeNavigation.hotkey(keycode);
if (hotkeyAction == null) {
if (keyNavigationAllowed()) {
if (activeAction != null) {
activeAction.setState(NodeState.NORMAL);
}
ActionableRenderNode result = activeNavigation.navigate(keycode);
if (result != null) {
result.setState(NodeState.HOVER);
setActiveAction(result);
}
}
} else {
if(!hotkeyAction.isEnabled()) {
return true;
}
KeyboardEventTriggerParams params = EventTriggerParamsPool.allocateKeyboardParams();
params.setKey(keycode);
hotkeyAction.setState(NodeState.ACTION);
hotkeyAction.beginAction(EventTrigger.KEYBOARD, params);
EventTriggerParamsPool.release(params);
}
return true;
}
private boolean handleModalKeyUp(int keycode) {
if (activeNavigation == null) {
return false;
}
ActionableRenderNode hotkeyAction = activeNavigation.hotkey(keycode);
if (hotkeyAction != null) {
if(!hotkeyAction.isEnabled()) {
return true;
}
KeyboardEventTriggerParams params = EventTriggerParamsPool.allocateKeyboardParams();
params.setKey(keycode);
hotkeyAction.setState(NodeState.NORMAL);
hotkeyAction.endAction(EventTrigger.KEYBOARD, params);
EventTriggerParamsPool.release(params);
}
return true;
}
private boolean handleTextInputKeyUp(int keycode) {
if (activeTextInput == null) {
return false;
}
if (!activeTextInput.isReceivingInput()) {
return false;
}
switch (keycode) {
case Input.Keys.BACKSPACE:
activeTextInput.backspace();
break;
case Input.Keys.ENTER:
if (!textInputIgnoredFirstEnter) {
textInputIgnoredFirstEnter = true;
return true;
}
if (activeTextInput.enter()) {
activeTextInput = null;
activeAction = null;
switch (Mdx.platform) {
case ANDROID:
case IOS:
Mdx.input.setOnScreenKeyboardVisible(false);
break;
default:
break;
}
}
break;
case Input.Keys.RIGHT:
activeTextInput.moveCursorRight();
break;
case Input.Keys.LEFT:
activeTextInput.moveCursorLeft();
break;
}
return true;
}
public void setActiveAction(ActionableRenderNode actionable) {
if (activeAction != null && !activeAction.getId().equals(actionable.getId())) {
activeAction.setState(NodeState.NORMAL);
}
if (actionable instanceof TextInputableRenderNode) {
activeTextInput = (TextInputableRenderNode) actionable;
switch (Mdx.platform) {
case ANDROID:
case IOS:
Mdx.input.setOnScreenKeyboardVisible(true);
break;
default:
break;
}
}
activeAction = actionable;
notifyElementActivated(actionable);
}
/**
* Sets the current {@link Navigatable} for UI navigation
*
* @param activeNavigation
* The current {@link Navigatable} being navigated
*/
public void setActiveNavigation(Navigatable activeNavigation) {
if (this.activeNavigation != null && activeNavigation != null
&& this.activeNavigation.getId().equals(activeNavigation.getId())) {
return;
}
unsetExistingNavigationHover();
this.activeNavigation = activeNavigation;
if (renderTree == null) {
return;
}
if (!keyNavigationAllowed()) {
return;
}
if (activeAction != null) {
activeAction.setState(NodeState.NORMAL);
}
UiNavigation navigation = activeNavigation.getNavigation();
if (navigation == null) {
return;
}
Actionable firstActionable = navigation.resetCursor();
if (firstActionable == null) {
return;
}
RenderNode<?, ?> renderNode = renderTree.getElementById(firstActionable.getId());
if (renderNode == null) {
return;
}
setActiveAction(((ActionableRenderNode) renderNode));
((ActionableRenderNode) renderNode).setState(NodeState.HOVER);
}
private void unsetExistingNavigationHover() {
if (activeNavigation == null) {
return;
}
if (renderTree == null) {
return;
}
if (activeAction == null) {
return;
}
if (activeAction.getState() != NodeState.HOVER) {
return;
}
activeAction.setState(NodeState.NORMAL);
}
public void clearActiveAction() {
unsetExistingNavigationHover();
this.activeAction = null;
}
/**
* Clears the current {@link Navigatable} being navigated
*/
public void clearActiveNavigation() {
unsetExistingNavigationHover();
this.activeTextInput = null;
this.activeAction = null;
this.activeNavigation = null;
}
/**
* Returns the currently active {@link Navigatable}
*
* @return null if there is nothing active
*/
public Navigatable getActiveNavigation() {
return activeNavigation;
}
/**
* Returns the currently hovered {@link ActionableRenderNode}
* @return Null if nothing is hovered
*/
public ActionableRenderNode getActiveAction() {
return activeAction;
}
/**
* Returns the width of the {@link UiContainer}
*
* @return The width in pixels
*/
public float getWidth() {
return width;
}
/**
* Returns the height of the {@link UiContainer}
*
* @return The height in pixels
*/
public float getHeight() {
return height;
}
@Override
public StyleRule getStyleRule() {
return StyleRule.NOOP;
}
/**
* Sets the width and height of the {@link UiContainer}
*
* @param width
* The width in pixels
* @param height
* The height in pixels
*/
public void set(int width, int height) {
this.width = width;
this.height = height;
renderTree.onResize(width, height);
}
/**
* Returns the configured {@link Graphics} scaling during rendering
*
* @return 1f by default
*/
public float getScaleX() {
return scaleX;
}
/**
* Returns the configured {@link Graphics} scaling during rendering
*
* @return 1f by default
*/
public float getScaleY() {
return scaleY;
}
/**
* Sets the {@link Graphics} scaling during rendering. Mouse/touch
* coordinates will be scaled accordingly.
*
* @param scaleX
* Scaling along the X axis
* @param scaleY
* Scaling along the Y axis
*/
public void setScale(float scaleX, float scaleY) {
if(scaleX == this.scaleX && scaleY == this.scaleY) {
return;
}
this.scaleX = scaleX;
this.scaleY = scaleY;
renderTree.onResize(width, height);
}
/**
* Returns the last {@link InputSource} used on the {@link UiContainer}
*
* @return
*/
public InputSource getLastInputSource() {
if(lastInputSource == null) {
if(Mdx.platform.isConsole()) {
lastInputSource = InputSource.CONTROLLER;
} else if(Mdx.platform.isMobile()) {
lastInputSource = InputSource.TOUCHSCREEN;
}
}
return lastInputSource;
}
private void updateLastInputSource(int screenX, int screenY) {
if(Math.abs(screenX - lastMouseX) > 2 || Math.abs(screenY - lastMouseY) > 2) {
switch(Mdx.platform) {
case WINDOWS:
case MAC:
case LINUX:
default:
setLastInputSource(InputSource.KEYBOARD_MOUSE);
break;
case ANDROID:
case IOS:
setLastInputSource(InputSource.TOUCHSCREEN);
break;
}
}
}
private void updateLastInputSource(float delta) {
if (nextInputSource == null) {
return;
}
if (this.lastInputSource.equals(nextInputSource)) {
inputSourceChangeTimer = 0f;
return;
}
inputSourceChangeTimer += delta;
if(inputSourceChangeTimer < inputSourceChangeThreshold) {
return;
}
inputSourceChangeTimer = 0f;
InputSource oldInputSource = this.lastInputSource;
this.lastInputSource = nextInputSource;
notifyInputSourceChange(oldInputSource, lastInputSource);
}
/**
* Sets the last {@link InputSource} used on the {@link UiContainer}
*
* @param lastInputSource
* The {@link InputSource} last used
*/
public void setLastInputSource(InputSource lastInputSource) {
this.nextInputSource = lastInputSource;
}
/**
* Returns the last {@link GamePadType} used on the {@link UiContainer}
*
* @return
*/
public GamePadType getLastGamePadType() {
return lastGamePadType;
}
private void updateLastGamePadType() {
if (nextGamePadType == null) {
return;
}
if (this.lastGamePadType.equals(nextGamePadType)) {
return;
}
GamePadType oldGamePadType = this.lastGamePadType;
this.lastGamePadType = nextGamePadType;
notifyGamePadTypeChange(oldGamePadType, lastGamePadType);
}
/**
* Sets the last {@link GamePadType} used on the {@link UiContainer}
*
* @param lastGamePadType
* The {@link GamePadType} last used
*/
public void setLastGamePadType(GamePadType lastGamePadType) {
this.nextGamePadType = lastGamePadType;
}
/**
* Adds a {@link GamePadUiInput} instance to this {@link UiContainer}
*
* @param input
* The instance to add
*/
public void addGamePadInput(GamePadUiInput<?> input) {
controllerInputs.add(input);
}
/**
* Removes a {@link GamePadUiInput} instance from this
* {@link UiContainer}
*
* @param input
* The instance to remove
*/
public void removeGamePadInput(GamePadUiInput<?> input) {
controllerInputs.removeValue(input, false);
}
@Override
public void setZIndex(int zIndex) {
}
/**
* Add the key used for triggering actions (i.e. selecting a menu option)
*
* @param keycode
* The {@link Input.Keys} value
*/
public void addActionKey(int keycode) {
actionKeys.add(keycode);
}
/**
* Removes a key used for triggering actions (i.e. selecting a menu option)
*
* @param keycode
* The {@link Input.Keys} value
*/
public void removeActionKey(int keycode) {
actionKeys.removeValue(keycode);
}
/**
* Clears the keys used for triggering actions (i.e. selecting a menu option)
*/
public void clearActionKeys() {
actionKeys.clear();
}
/**
* Set to true if mouseMoved() events to should pass through this input handler regardless
* @param passThroughMouseMovement
*/
public void setPassThroughMouseMovement(boolean passThroughMouseMovement) {
this.passThroughMouseMovement = passThroughMouseMovement;
}
/**
* Sets the duration of new input that needs to occur before the input source is considered changed
* @param inputSourceChangeThreshold The time in seconds
*/
public void setInputSourceChangeThreshold(float inputSourceChangeThreshold) {
this.inputSourceChangeThreshold = inputSourceChangeThreshold;
}
/**
* Returns if this {@link UiContainer} can be navigated by keyboard/gamepad
* @return True by default
*/
public boolean keyNavigationAllowed() {
switch (Mdx.platform) {
case ANDROID:
case IOS:
return false;
case MAC:
case LINUX:
case WINDOWS:
default:
switch(navigationMode) {
case BUTTON_ONLY:
case BUTTON_OR_POINTER:
return true;
default:
case POINTER_ONLY:
return false;
}
}
}
/**
* Returns if this {@link UiContainer} can be navigated by touch/mouse
* @return True by default
*/
public boolean pointerNavigationAllowed() {
switch (Mdx.platform) {
case ANDROID:
case IOS:
return true;
case MAC:
case LINUX:
case WINDOWS:
default:
switch(navigationMode) {
default:
case BUTTON_ONLY:
return false;
case BUTTON_OR_POINTER:
case POINTER_ONLY:
return true;
}
}
}
/**
* Sets the {@link NavigationMode} on this {@link UiContainer}
* @param navigationMode The {@link NavigationMode}
*/
public void setNavigationMode(NavigationMode navigationMode) {
if(navigationMode == null) {
return;
}
this.navigationMode = navigationMode;
}
/**
* Returns the scaling mode used for {@link ScreenSize} values
* @return {@link ScreenSizeScaleMode#NO_SCALING} by default
*/
public ScreenSizeScaleMode getScreenSizeScaleMode() {
return screenSizeScaleMode;
}
/**
* Sets the scaling mode used for {@link ScreenSize} values
* @param screenSizeScaleMode The {@link ScreenSizeScaleMode} to set
*/
public void setScreenSizeScaleMode(ScreenSizeScaleMode screenSizeScaleMode) {
if(screenSizeScaleMode == null) {
return;
}
if(this.screenSizeScaleMode == screenSizeScaleMode) {
return;
}
this.screenSizeScaleMode = screenSizeScaleMode;
renderTree.onResize(width, height);
}
/**
* Adds a {@link UiContainerListener} to this {@link UiContainer}
*
* @param listener
* The {@link UiContainerListener} to be notified of events
*/
public void addUiContainerListener(UiContainerListener listener) {
containerListeners.add(listener);
inputSourceListeners.add(listener);
addScreenSizeListener(listener);
}
/**
* Removes a {@link UiContainerListener} from this {@link UiContainer}
*
* @param listener
* The {@link UiContainerListener} to stop receiving events
*/
public void removeUiContainerListener(UiContainerListener listener) {
containerListeners.removeValue(listener, false);
inputSourceListeners.removeValue(listener, false);
removeScreenSizeListener(listener);
}
/**
* Adds a {@link UiInputSourceListener} to this {@link UiContainer}
*
* @param listener
* The {@link UiInputSourceListener} to be notified of events
*/
public void addInputSourceListener(UiInputSourceListener listener) {
inputSourceListeners.add(listener);
}
/**
* Removes a {@link UiInputSourceListener} from this {@link UiContainer}
*
* @param listener
* The {@link UiInputSourceListener} to stop receiving events
*/
public void removeUiContainerListener(UiInputSourceListener listener) {
inputSourceListeners.removeValue(listener, false);
}
private void notifyPreUpdate(float delta) {
for (int i = containerListeners.size - 1; i >= 0; i--) {
containerListeners.get(i).preUpdate(this, delta);
}
}
private void notifyPostUpdate(float delta) {
for (int i = containerListeners.size - 1; i >= 0; i--) {
containerListeners.get(i).postUpdate(this, delta);
}
}
private void notifyPreRender(Graphics g) {
for (int i = containerListeners.size - 1; i >= 0; i--) {
containerListeners.get(i).preRender(this, g);
}
}
private void notifyPostRender(Graphics g) {
for (int i = containerListeners.size - 1; i >= 0; i--) {
containerListeners.get(i).postRender(this, g);
}
}
/**
* Sends input source change event to all containers with the current input source as both the old and new
*/
public void notifyInputSource() {
notifyInputSourceChange(lastInputSource, lastInputSource);
}
/**
* Sends game pad change event to all containers with the current game pad type as both the old and new
*/
public void notifyGamePadType() {
notifyGamePadTypeChange(lastGamePadType, lastGamePadType);
}
private void notifyInputSourceChange(InputSource oldSource, InputSource newSource) {
for(int i = uiContainerInstances.size - 1; i >= 0; i--) {
uiContainerInstances.get(i).notifyInputSourceChange(uiContainerInstances.get(i), oldSource, newSource);
}
}
private void notifyInputSourceChange(UiContainer uiContainer, InputSource oldSource, InputSource newSource) {
for (int i = uiContainer.inputSourceListeners.size - 1; i >= 0; i--) {
uiContainer.inputSourceListeners.get(i).inputSourceChanged(uiContainer, oldSource, newSource);
}
}
private void notifyGamePadTypeChange(GamePadType oldGamePadType, GamePadType newGamePadType) {
for(int i = uiContainerInstances.size - 1; i >= 0; i--) {
uiContainerInstances.get(i).notifyGamePadTypeChange(uiContainerInstances.get(i), oldGamePadType, newGamePadType);
}
}
private void notifyGamePadTypeChange(UiContainer uiContainer, GamePadType oldGamePadType, GamePadType newGamePadType) {
for (int i = uiContainer.inputSourceListeners.size - 1; i >= 0; i--) {
uiContainer.inputSourceListeners.get(i).gamePadTypeChanged(uiContainer, oldGamePadType, newGamePadType);
}
}
private void notifyElementActivated(ActionableRenderNode actionable) {
for (int i = containerListeners.size - 1; i >= 0; i--) {
containerListeners.get(i).onElementAction(this, actionable.getElement());
}
}
/**
* Returns the default {@link Visibility} for newly created
* {@link UiElement} objects
*
* @return A non-null {@link Visibility} value. {@link Visibility#HIDDEN} by
* default
*/
public static Visibility getDefaultVisibility() {
return defaultVisibility;
}
/**
* Sets the default {@link Visibility} for newly created {@link UiElement}
* objects
*
* @param defaultVisibility
* The {@link Visibility} to set as default
*/
public static void setDefaultVisibility(Visibility defaultVisibility) {
if (defaultVisibility == null) {
return;
}
UiContainer.defaultVisibility = defaultVisibility;
}
public void setViewport(Viewport viewport) {
this.viewport = viewport;
}
}
|
ui/src/main/java/org/mini2Dx/ui/UiContainer.java
|
/*******************************************************************************
* Copyright 2019 See AUTHORS file
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.mini2Dx.ui;
import org.mini2Dx.core.Graphics;
import org.mini2Dx.core.Mdx;
import org.mini2Dx.core.assets.AssetManager;
import org.mini2Dx.core.game.GameContainer;
import org.mini2Dx.core.graphics.viewport.Viewport;
import org.mini2Dx.core.input.GamePadType;
import org.mini2Dx.core.input.button.GamePadButton;
import org.mini2Dx.gdx.Input;
import org.mini2Dx.gdx.InputProcessor;
import org.mini2Dx.gdx.math.MathUtils;
import org.mini2Dx.gdx.math.Vector2;
import org.mini2Dx.gdx.utils.Array;
import org.mini2Dx.gdx.utils.IntArray;
import org.mini2Dx.gdx.utils.IntSet;
import org.mini2Dx.gdx.utils.ObjectSet;
import org.mini2Dx.ui.gamepad.GamePadUiInput;
import org.mini2Dx.ui.element.*;
import org.mini2Dx.ui.event.EventTrigger;
import org.mini2Dx.ui.event.params.EventTriggerParamsPool;
import org.mini2Dx.ui.event.params.GamePadEventTriggerParams;
import org.mini2Dx.ui.event.params.KeyboardEventTriggerParams;
import org.mini2Dx.ui.event.params.MouseEventTriggerParams;
import org.mini2Dx.ui.layout.PixelLayoutUtils;
import org.mini2Dx.ui.layout.ScreenSize;
import org.mini2Dx.ui.listener.ScreenSizeListener;
import org.mini2Dx.ui.listener.UiContainerListener;
import org.mini2Dx.ui.listener.UiInputSourceListener;
import org.mini2Dx.ui.navigation.UiNavigation;
import org.mini2Dx.ui.render.*;
import org.mini2Dx.ui.style.StyleRule;
import org.mini2Dx.ui.style.UiTheme;
import org.mini2Dx.ui.util.IdAllocator;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* The container for all UI elements. {@link #update(float)} and
* {@link #render(Graphics)} must be called by your {@link GameContainer}
*/
public class UiContainer extends ParentUiElement implements InputProcessor {
private static final String LOGGING_TAG = UiContainer.class.getSimpleName();
private static final Vector2 SHARED_VECTOR = new Vector2();
private static final Array<UiContainer> uiContainerInstances = new Array<UiContainer>(true, 2, UiContainer.class);
private static Visibility defaultVisibility = Visibility.HIDDEN;
private static UiTheme UI_THEME;
private static UiContainerState STATE = UiContainerState.NOOP;
private final Array<GamePadUiInput<?>> controllerInputs = new Array<GamePadUiInput<?>>(true,1, GamePadUiInput.class);
private final Array<UiInputSourceListener> inputSourceListeners = new Array<UiInputSourceListener>(true,1, UiInputSourceListener.class);
private final Array<UiContainerListener> containerListeners = new Array<UiContainerListener>(true,1, UiContainerListener.class);
private final IntSet receivedKeyDowns = new IntSet();
private final ObjectSet<String> receivedButtonDowns = new ObjectSet<String>();
private final AtomicBoolean forceRenderTreeLayout = new AtomicBoolean(false);
private final UiContainerRenderTree renderTree;
private InputSource lastInputSource, nextInputSource;
private GamePadType lastGamePadType = GamePadType.UNKNOWN, nextGamePadType = GamePadType.UNKNOWN;
private int lastMouseX, lastMouseY;
private float scaleX = 1f;
private float scaleY = 1f;
private String lastThemeId;
private boolean themeWarningIssued, initialThemeLayoutComplete;
private float inputSourceChangeTimer = 0f, inputSourceChangeThreshold = 0.1f;
private final IntArray actionKeys = new IntArray();
private Navigatable activeNavigation;
private ActionableRenderNode activeAction;
private TextInputableRenderNode activeTextInput;
private NavigationMode navigationMode = NavigationMode.BUTTON_OR_POINTER;
private boolean textInputIgnoredFirstEnter = false;
private ScreenSizeScaleMode screenSizeScaleMode = ScreenSizeScaleMode.NO_SCALING;
private boolean passThroughMouseMovement = false;
private Viewport viewport;
/**
* Constructor
*
* @param gc
* Your game's {@link GameContainer}
* @param assetManager
* The {@link AssetManager} for the game
*/
public UiContainer(GameContainer gc, AssetManager assetManager) {
this(gc.getWidth(), gc.getHeight(), assetManager);
}
public UiContainer(int width, int height, AssetManager assetManager) {
super(IdAllocator.getNextId("ui-container-root"));
this.width = width;
this.height = height;
actionKeys.add(Input.Keys.ENTER);
switch (Mdx.platform) {
case ANDROID:
case IOS:
lastInputSource = InputSource.TOUCHSCREEN;
break;
case MAC:
case LINUX:
case WINDOWS:
lastInputSource = InputSource.KEYBOARD_MOUSE;
break;
default:
break;
}
renderTree = new UiContainerRenderTree(this, assetManager);
super.renderNode = renderTree;
setVisibility(Visibility.VISIBLE);
uiContainerInstances.add(this);
}
public static void relayoutAllUiContainers() {
Mdx.log.info(LOGGING_TAG, "Triggering re-layout for all UiContainer instances");
for(int i = uiContainerInstances.size - 1; i >= 0; i--) {
uiContainerInstances.get(i).forceRenderTreeLayout();
}
}
private void forceRenderTreeLayout() {
forceRenderTreeLayout.set(true);
}
public void dispose() {
uiContainerInstances.removeValue(this, false);
}
@Override
protected ParentRenderNode<?, ?> createRenderNode(ParentRenderNode<?, ?> parent) {
return renderTree;
}
/**
* Updates all {@link UiElement}s
*
* @param delta
* The time since the last frame (in seconds)
*/
public void update(float delta) {
updateLastInputSource(delta);
updateLastGamePadType();
if (!isThemeApplied()) {
if (!themeWarningIssued) {
if (Mdx.log != null) {
Mdx.log.error(LOGGING_TAG, "No theme applied to UI - cannot update or render UI.");
}
themeWarningIssued = true;
}
return;
}
if(lastThemeId == null || (lastThemeId != null && !lastThemeId.equals(UI_THEME.getId()))) {
renderTree.setDirty();
initialThemeLayoutComplete = false;
Mdx.log.info(LOGGING_TAG, "Applied theme - " + UI_THEME.getId());
}
lastThemeId = UI_THEME.getId();
if(forceRenderTreeLayout.get()) {
renderTree.onResize(width, height);
forceRenderTreeLayout.set(false);
}
notifyPreUpdate(delta);
for (int i = controllerInputs.size - 1; i >= 0; i--) {
controllerInputs.get(i).update(delta);
}
if (renderTree.isDirty()) {
STATE = UiContainerState.LAYOUT;
renderTree.layout();
STATE = UiContainerState.NOOP;
renderTree.processLayoutDeferred();
initialThemeLayoutComplete = true;
}
STATE = UiContainerState.UPDATE;
renderTree.update(delta);
notifyPostUpdate(delta);
STATE = UiContainerState.NOOP;
renderTree.processUpdateDeferred();
PixelLayoutUtils.update(delta);
}
/**
* Renders all visible {@link UiElement}s
*
* @param g
* The {@link Graphics} context
*/
public void render(Graphics g) {
if (!isThemeApplied()) {
return;
}
if (!initialThemeLayoutComplete) {
return;
}
STATE = UiContainerState.RENDER;
notifyPreRender(g);
switch (visibility) {
case HIDDEN:
case NO_RENDER:
return;
default:
float previousScaleX = g.getScaleX();
float previousScaleY = g.getScaleY();
if (scaleX != 1f || scaleY != 1f) {
g.setScale(scaleX, scaleY);
}
renderTree.render(g);
if (scaleX != 1f || scaleY != 1f) {
g.setScale(previousScaleX, previousScaleY);
}
break;
}
notifyPostRender(g);
STATE = UiContainerState.NOOP;
renderTree.processRenderDeferred();
}
@Override
public void attach(ParentRenderNode<?, ?> parentRenderNode) {
}
@Override
public void detach(ParentRenderNode<?, ?> parentRenderNode) {
}
@Override
public void setVisibility(Visibility visibility) {
this.visibility = visibility;
}
/**
* Adds a {@link ScreenSizeListener} to listen for {@link ScreenSize} change
*
* @param listener
* The {@link ScreenSizeListener} to add
*/
public void addScreenSizeListener(ScreenSizeListener listener) {
renderTree.addScreenSizeListener(listener);
}
/**
* Removes a {@link ScreenSizeListener} from this {@link UiContainer}
*
* @param listener
* The {@link ScreenSizeListener} to remove
*/
public void removeScreenSizeListener(ScreenSizeListener listener) {
renderTree.removeScreenSizeListener(listener);
}
/**
* Returns if a {@link UiTheme} has been applied to thi {@link UiContainer}
*
* @return True if the {@link UiTheme} has been applied
*/
public static boolean isThemeApplied() {
return UI_THEME != null;
}
/**
* Returns the {@link UiTheme} currently applied to this {@link UiContainer}
*
* @return Null if no {@link UiTheme} has been applied
*/
public static UiTheme getTheme() {
return UI_THEME;
}
/**
* Sets the current {@link UiTheme} for this {@link UiContainer}
*
* @param theme
* The {@link UiTheme} to apply
*/
public static void setTheme(UiTheme theme) {
if (theme == null) {
return;
}
if (UI_THEME != null && UI_THEME.getId().equals(theme.getId())) {
return;
}
UI_THEME = theme;
}
/**
* Returns the current {@link UiContainerState}
* @return
*/
public static UiContainerState getState() {
return STATE;
}
@Override
public void setStyleId(String styleId) {
}
@Override
public boolean touchDown(int screenX, int screenY, int pointer, int button) {
if (!pointerNavigationAllowed()) {
return false;
}
if (viewport != null) {
SHARED_VECTOR.x = screenX;
SHARED_VECTOR.y = screenY;
viewport.toWorldCoordinates(SHARED_VECTOR);
screenX = MathUtils.round(SHARED_VECTOR.x);
screenY = MathUtils.round(SHARED_VECTOR.y);
} else {
screenX = MathUtils.round(screenX / scaleX);
screenY = MathUtils.round(screenY / scaleY);
}
updateLastInputSource(screenX, screenY);
lastMouseX = screenX;
lastMouseY = screenY;
if (activeTextInput != null && activeTextInput.mouseDown(screenX, screenY, pointer, button) == null) {
// Release textbox control
activeTextInput = null;
activeAction = null;
switch (Mdx.platform) {
case ANDROID:
case IOS:
Mdx.input.setOnScreenKeyboardVisible(false);
break;
default:
break;
}
}
ActionableRenderNode result = renderTree.mouseDown(screenX, screenY, pointer, button);
if (result != null) {
MouseEventTriggerParams params = EventTriggerParamsPool.allocateMouseParams();
params.setMouseX(screenX);
params.setMouseY(screenY);
result.beginAction(EventTrigger.getTriggerForMouseClick(button), params);
EventTriggerParamsPool.release(params);
setActiveAction(result);
return true;
}
return false;
}
@Override
public boolean touchUp(int screenX, int screenY, int pointer, int button) {
if (!pointerNavigationAllowed()) {
return false;
}
if (viewport != null) {
SHARED_VECTOR.x = screenX;
SHARED_VECTOR.y = screenY;
viewport.toWorldCoordinates(SHARED_VECTOR);
screenX = MathUtils.round(SHARED_VECTOR.x);
screenY = MathUtils.round(SHARED_VECTOR.y);
} else {
screenX = MathUtils.round(screenX / scaleX);
screenY = MathUtils.round(screenY / scaleY);
}
updateLastInputSource(screenX, screenY);
lastMouseX = screenX;
lastMouseY = screenY;
if (activeAction == null) {
return false;
}
activeAction.mouseUp(screenX, screenY, pointer, button);
activeAction = null;
return true;
}
@Override
public boolean touchDragged(int screenX, int screenY, int pointer) {
if (!pointerNavigationAllowed()) {
return false;
}
if (viewport != null) {
SHARED_VECTOR.x = screenX;
SHARED_VECTOR.y = screenY;
viewport.toWorldCoordinates(SHARED_VECTOR);
screenX = MathUtils.round(SHARED_VECTOR.x);
screenY = MathUtils.round(SHARED_VECTOR.y);
} else {
screenX = MathUtils.round(screenX / scaleX);
screenY = MathUtils.round(screenY / scaleY);
}
updateLastInputSource(screenX, screenY);
lastMouseX = screenX;
lastMouseY = screenY;
if(passThroughMouseMovement) {
renderTree.mouseMoved(screenX, screenY);
return false;
}
return renderTree.mouseMoved(screenX, screenY);
}
@Override
public boolean mouseMoved(int screenX, int screenY) {
if (viewport != null) {
SHARED_VECTOR.x = screenX;
SHARED_VECTOR.y = screenY;
viewport.toWorldCoordinates(SHARED_VECTOR);
screenX = MathUtils.round(SHARED_VECTOR.x);
screenY = MathUtils.round(SHARED_VECTOR.y);
} else {
screenX = MathUtils.round(screenX / scaleX);
screenY = MathUtils.round(screenY / scaleY);
}
updateLastInputSource(screenX, screenY);
lastMouseX = screenX;
lastMouseY = screenY;
if (!pointerNavigationAllowed()) {
return false;
}
if(passThroughMouseMovement) {
renderTree.mouseMoved(screenX, screenY);
return false;
}
return renderTree.mouseMoved(screenX, screenY);
}
@Override
public boolean scrolled(float amountX, float amountY) {
if (!pointerNavigationAllowed()) {
return false;
}
return renderTree.mouseScrolled(lastMouseX, lastMouseY, amountX, amountY);
}
@Override
public boolean keyTyped(char character) {
if (activeTextInput == null) {
return false;
}
if (activeTextInput.isReceivingInput()) {
activeTextInput.characterReceived(character);
}
return true;
}
@Override
public boolean keyDown(int keycode) {
if (activeTextInput != null && activeTextInput.isReceivingInput()) {
receivedKeyDowns.add(keycode);
return true;
}
if (actionKeys.contains(keycode) && activeAction != null) {
receivedKeyDowns.add(keycode);
KeyboardEventTriggerParams params = EventTriggerParamsPool.allocateKeyboardParams();
params.setKey(keycode);
activeAction.setState(NodeState.ACTION);
activeAction.beginAction(EventTrigger.KEYBOARD, params);
EventTriggerParamsPool.release(params);
if (activeTextInput != null) {
textInputIgnoredFirstEnter = false;
}
return true;
}
if (handleModalKeyDown(keycode)) {
receivedKeyDowns.add(keycode);
return true;
}
receivedKeyDowns.remove(keycode);
return false;
}
@Override
public boolean keyUp(int keycode) {
// Key down was sent before this UI Container accepted input
if (!receivedKeyDowns.remove(keycode)) {
return false;
}
if (handleTextInputKeyUp(keycode)) {
return true;
}
if (actionKeys.contains(keycode) && activeAction != null) {
KeyboardEventTriggerParams params = EventTriggerParamsPool.allocateKeyboardParams();
params.setKey(keycode);
activeAction.setState(NodeState.NORMAL);
activeAction.endAction(EventTrigger.KEYBOARD, params);
EventTriggerParamsPool.release(params);
switch (Mdx.platform) {
case ANDROID:
case IOS:
Mdx.input.setOnScreenKeyboardVisible(false);
break;
default:
break;
}
return true;
}
if (handleModalKeyUp(keycode)) {
return true;
}
return false;
}
public boolean buttonDown(GamePadUiInput<?> controllerUiInput, GamePadButton button) {
if (activeNavigation == null) {
return false;
}
receivedButtonDowns.add(button.getInternalName());
ActionableRenderNode hotkeyAction = activeNavigation.hotkey(button);
if (hotkeyAction != null) {
if(!hotkeyAction.isEnabled()) {
return true;
}
GamePadEventTriggerParams params = EventTriggerParamsPool.allocateGamePadParams();
params.setGamePadButton(button);
hotkeyAction.setState(NodeState.ACTION);
hotkeyAction.beginAction(EventTrigger.CONTROLLER, params);
EventTriggerParamsPool.release(params);
} else if (activeAction != null) {
if (button.equals(controllerUiInput.getActionButton())) {
if (activeTextInput != null) {
if (!textInputIgnoredFirstEnter) {
GamePadEventTriggerParams params = EventTriggerParamsPool.allocateGamePadParams();
params.setGamePadButton(button);
activeAction.setState(NodeState.ACTION);
activeAction.beginAction(EventTrigger.CONTROLLER, params);
EventTriggerParamsPool.release(params);
}
} else {
GamePadEventTriggerParams params = EventTriggerParamsPool.allocateGamePadParams();
params.setGamePadButton(button);
activeAction.setState(NodeState.ACTION);
activeAction.beginAction(EventTrigger.CONTROLLER, params);
EventTriggerParamsPool.release(params);
}
}
}
return true;
}
public boolean buttonUp(GamePadUiInput<?> controllerUiInput, GamePadButton button) {
// Button down was sent before this UI Container accepted input
if (!receivedButtonDowns.remove(button.getInternalName())) {
return false;
}
if (activeNavigation == null) {
return false;
}
ActionableRenderNode hotkeyAction = activeNavigation.hotkey(button);
if (hotkeyAction != null) {
if(!hotkeyAction.isEnabled()) {
return true;
}
GamePadEventTriggerParams params = EventTriggerParamsPool.allocateGamePadParams();
params.setGamePadButton(button);
hotkeyAction.setState(NodeState.NORMAL);
hotkeyAction.endAction(EventTrigger.CONTROLLER, params);
EventTriggerParamsPool.release(params);
} else if (activeAction != null) {
if (activeTextInput != null && !textInputIgnoredFirstEnter) {
textInputIgnoredFirstEnter = true;
return true;
}
if (button.equals(controllerUiInput.getActionButton())) {
GamePadEventTriggerParams params = EventTriggerParamsPool.allocateGamePadParams();
params.setGamePadButton(button);
activeAction.setState(NodeState.NORMAL);
activeAction.endAction(EventTrigger.CONTROLLER, params);
EventTriggerParamsPool.release(params);
textInputIgnoredFirstEnter = false;
}
}
return true;
}
private boolean handleModalKeyDown(int keycode) {
if (activeNavigation == null) {
return false;
}
ActionableRenderNode hotkeyAction = activeNavigation.hotkey(keycode);
if (hotkeyAction == null) {
if (keyNavigationAllowed()) {
if (activeAction != null) {
activeAction.setState(NodeState.NORMAL);
}
ActionableRenderNode result = activeNavigation.navigate(keycode);
if (result != null) {
result.setState(NodeState.HOVER);
setActiveAction(result);
}
}
} else {
if(!hotkeyAction.isEnabled()) {
return true;
}
KeyboardEventTriggerParams params = EventTriggerParamsPool.allocateKeyboardParams();
params.setKey(keycode);
hotkeyAction.setState(NodeState.ACTION);
hotkeyAction.beginAction(EventTrigger.KEYBOARD, params);
EventTriggerParamsPool.release(params);
}
return true;
}
private boolean handleModalKeyUp(int keycode) {
if (activeNavigation == null) {
return false;
}
ActionableRenderNode hotkeyAction = activeNavigation.hotkey(keycode);
if (hotkeyAction != null) {
if(!hotkeyAction.isEnabled()) {
return true;
}
KeyboardEventTriggerParams params = EventTriggerParamsPool.allocateKeyboardParams();
params.setKey(keycode);
hotkeyAction.setState(NodeState.NORMAL);
hotkeyAction.endAction(EventTrigger.KEYBOARD, params);
EventTriggerParamsPool.release(params);
}
return true;
}
private boolean handleTextInputKeyUp(int keycode) {
if (activeTextInput == null) {
return false;
}
if (!activeTextInput.isReceivingInput()) {
return false;
}
switch (keycode) {
case Input.Keys.BACKSPACE:
activeTextInput.backspace();
break;
case Input.Keys.ENTER:
if (!textInputIgnoredFirstEnter) {
textInputIgnoredFirstEnter = true;
return true;
}
if (activeTextInput.enter()) {
activeTextInput = null;
activeAction = null;
switch (Mdx.platform) {
case ANDROID:
case IOS:
Mdx.input.setOnScreenKeyboardVisible(false);
break;
default:
break;
}
}
break;
case Input.Keys.RIGHT:
activeTextInput.moveCursorRight();
break;
case Input.Keys.LEFT:
activeTextInput.moveCursorLeft();
break;
}
return true;
}
public void setActiveAction(ActionableRenderNode actionable) {
if (activeAction != null && !activeAction.getId().equals(actionable.getId())) {
activeAction.setState(NodeState.NORMAL);
}
if (actionable instanceof TextInputableRenderNode) {
activeTextInput = (TextInputableRenderNode) actionable;
switch (Mdx.platform) {
case ANDROID:
case IOS:
Mdx.input.setOnScreenKeyboardVisible(true);
break;
default:
break;
}
}
activeAction = actionable;
notifyElementActivated(actionable);
}
/**
* Sets the current {@link Navigatable} for UI navigation
*
* @param activeNavigation
* The current {@link Navigatable} being navigated
*/
public void setActiveNavigation(Navigatable activeNavigation) {
if (this.activeNavigation != null && activeNavigation != null
&& this.activeNavigation.getId().equals(activeNavigation.getId())) {
return;
}
unsetExistingNavigationHover();
this.activeNavigation = activeNavigation;
if (renderTree == null) {
return;
}
if (!keyNavigationAllowed()) {
return;
}
if (activeAction != null) {
activeAction.setState(NodeState.NORMAL);
}
UiNavigation navigation = activeNavigation.getNavigation();
if (navigation == null) {
return;
}
Actionable firstActionable = navigation.resetCursor();
if (firstActionable == null) {
return;
}
RenderNode<?, ?> renderNode = renderTree.getElementById(firstActionable.getId());
if (renderNode == null) {
return;
}
setActiveAction(((ActionableRenderNode) renderNode));
((ActionableRenderNode) renderNode).setState(NodeState.HOVER);
}
private void unsetExistingNavigationHover() {
if (activeNavigation == null) {
return;
}
if (renderTree == null) {
return;
}
if (activeAction == null) {
return;
}
if (activeAction.getState() != NodeState.HOVER) {
return;
}
activeAction.setState(NodeState.NORMAL);
}
public void clearActiveAction() {
unsetExistingNavigationHover();
this.activeAction = null;
}
/**
* Clears the current {@link Navigatable} being navigated
*/
public void clearActiveNavigation() {
unsetExistingNavigationHover();
this.activeTextInput = null;
this.activeAction = null;
this.activeNavigation = null;
}
/**
* Returns the currently active {@link Navigatable}
*
* @return null if there is nothing active
*/
public Navigatable getActiveNavigation() {
return activeNavigation;
}
/**
* Returns the currently hovered {@link ActionableRenderNode}
* @return Null if nothing is hovered
*/
public ActionableRenderNode getActiveAction() {
return activeAction;
}
/**
* Returns the width of the {@link UiContainer}
*
* @return The width in pixels
*/
public float getWidth() {
return width;
}
/**
* Returns the height of the {@link UiContainer}
*
* @return The height in pixels
*/
public float getHeight() {
return height;
}
@Override
public StyleRule getStyleRule() {
return StyleRule.NOOP;
}
/**
* Sets the width and height of the {@link UiContainer}
*
* @param width
* The width in pixels
* @param height
* The height in pixels
*/
public void set(int width, int height) {
this.width = width;
this.height = height;
renderTree.onResize(width, height);
}
/**
* Returns the configured {@link Graphics} scaling during rendering
*
* @return 1f by default
*/
public float getScaleX() {
return scaleX;
}
/**
* Returns the configured {@link Graphics} scaling during rendering
*
* @return 1f by default
*/
public float getScaleY() {
return scaleY;
}
/**
* Sets the {@link Graphics} scaling during rendering. Mouse/touch
* coordinates will be scaled accordingly.
*
* @param scaleX
* Scaling along the X axis
* @param scaleY
* Scaling along the Y axis
*/
public void setScale(float scaleX, float scaleY) {
if(scaleX == this.scaleX && scaleY == this.scaleY) {
return;
}
this.scaleX = scaleX;
this.scaleY = scaleY;
renderTree.onResize(width, height);
}
/**
* Returns the last {@link InputSource} used on the {@link UiContainer}
*
* @return
*/
public InputSource getLastInputSource() {
return lastInputSource;
}
private void updateLastInputSource(int screenX, int screenY) {
if(Math.abs(screenX - lastMouseX) > 2 || Math.abs(screenY - lastMouseY) > 2) {
switch(Mdx.platform) {
case WINDOWS:
case MAC:
case LINUX:
default:
setLastInputSource(InputSource.KEYBOARD_MOUSE);
break;
case ANDROID:
case IOS:
setLastInputSource(InputSource.TOUCHSCREEN);
break;
}
}
}
private void updateLastInputSource(float delta) {
if (nextInputSource == null) {
return;
}
if (this.lastInputSource.equals(nextInputSource)) {
inputSourceChangeTimer = 0f;
return;
}
inputSourceChangeTimer += delta;
if(inputSourceChangeTimer < inputSourceChangeThreshold) {
return;
}
inputSourceChangeTimer = 0f;
InputSource oldInputSource = this.lastInputSource;
this.lastInputSource = nextInputSource;
notifyInputSourceChange(oldInputSource, lastInputSource);
}
/**
* Sets the last {@link InputSource} used on the {@link UiContainer}
*
* @param lastInputSource
* The {@link InputSource} last used
*/
public void setLastInputSource(InputSource lastInputSource) {
this.nextInputSource = lastInputSource;
}
/**
* Returns the last {@link GamePadType} used on the {@link UiContainer}
*
* @return
*/
public GamePadType getLastGamePadType() {
return lastGamePadType;
}
private void updateLastGamePadType() {
if (nextGamePadType == null) {
return;
}
if (this.lastGamePadType.equals(nextGamePadType)) {
return;
}
GamePadType oldGamePadType = this.lastGamePadType;
this.lastGamePadType = nextGamePadType;
notifyGamePadTypeChange(oldGamePadType, lastGamePadType);
}
/**
* Sets the last {@link GamePadType} used on the {@link UiContainer}
*
* @param lastGamePadType
* The {@link GamePadType} last used
*/
public void setLastGamePadType(GamePadType lastGamePadType) {
this.nextGamePadType = lastGamePadType;
}
/**
* Adds a {@link GamePadUiInput} instance to this {@link UiContainer}
*
* @param input
* The instance to add
*/
public void addGamePadInput(GamePadUiInput<?> input) {
controllerInputs.add(input);
}
/**
* Removes a {@link GamePadUiInput} instance from this
* {@link UiContainer}
*
* @param input
* The instance to remove
*/
public void removeGamePadInput(GamePadUiInput<?> input) {
controllerInputs.removeValue(input, false);
}
@Override
public void setZIndex(int zIndex) {
}
/**
* Add the key used for triggering actions (i.e. selecting a menu option)
*
* @param keycode
* The {@link Input.Keys} value
*/
public void addActionKey(int keycode) {
actionKeys.add(keycode);
}
/**
* Removes a key used for triggering actions (i.e. selecting a menu option)
*
* @param keycode
* The {@link Input.Keys} value
*/
public void removeActionKey(int keycode) {
actionKeys.removeValue(keycode);
}
/**
* Clears the keys used for triggering actions (i.e. selecting a menu option)
*/
public void clearActionKeys() {
actionKeys.clear();
}
/**
* Set to true if mouseMoved() events to should pass through this input handler regardless
* @param passThroughMouseMovement
*/
public void setPassThroughMouseMovement(boolean passThroughMouseMovement) {
this.passThroughMouseMovement = passThroughMouseMovement;
}
/**
* Sets the duration of new input that needs to occur before the input source is considered changed
* @param inputSourceChangeThreshold The time in seconds
*/
public void setInputSourceChangeThreshold(float inputSourceChangeThreshold) {
this.inputSourceChangeThreshold = inputSourceChangeThreshold;
}
/**
* Returns if this {@link UiContainer} can be navigated by keyboard/gamepad
* @return True by default
*/
public boolean keyNavigationAllowed() {
switch (Mdx.platform) {
case ANDROID:
case IOS:
return false;
case MAC:
case LINUX:
case WINDOWS:
default:
switch(navigationMode) {
case BUTTON_ONLY:
case BUTTON_OR_POINTER:
return true;
default:
case POINTER_ONLY:
return false;
}
}
}
/**
* Returns if this {@link UiContainer} can be navigated by touch/mouse
* @return True by default
*/
public boolean pointerNavigationAllowed() {
switch (Mdx.platform) {
case ANDROID:
case IOS:
return true;
case MAC:
case LINUX:
case WINDOWS:
default:
switch(navigationMode) {
default:
case BUTTON_ONLY:
return false;
case BUTTON_OR_POINTER:
case POINTER_ONLY:
return true;
}
}
}
/**
* Sets the {@link NavigationMode} on this {@link UiContainer}
* @param navigationMode The {@link NavigationMode}
*/
public void setNavigationMode(NavigationMode navigationMode) {
if(navigationMode == null) {
return;
}
this.navigationMode = navigationMode;
}
/**
* Returns the scaling mode used for {@link ScreenSize} values
* @return {@link ScreenSizeScaleMode#NO_SCALING} by default
*/
public ScreenSizeScaleMode getScreenSizeScaleMode() {
return screenSizeScaleMode;
}
/**
* Sets the scaling mode used for {@link ScreenSize} values
* @param screenSizeScaleMode The {@link ScreenSizeScaleMode} to set
*/
public void setScreenSizeScaleMode(ScreenSizeScaleMode screenSizeScaleMode) {
if(screenSizeScaleMode == null) {
return;
}
if(this.screenSizeScaleMode == screenSizeScaleMode) {
return;
}
this.screenSizeScaleMode = screenSizeScaleMode;
renderTree.onResize(width, height);
}
/**
* Adds a {@link UiContainerListener} to this {@link UiContainer}
*
* @param listener
* The {@link UiContainerListener} to be notified of events
*/
public void addUiContainerListener(UiContainerListener listener) {
containerListeners.add(listener);
inputSourceListeners.add(listener);
addScreenSizeListener(listener);
}
/**
* Removes a {@link UiContainerListener} from this {@link UiContainer}
*
* @param listener
* The {@link UiContainerListener} to stop receiving events
*/
public void removeUiContainerListener(UiContainerListener listener) {
containerListeners.removeValue(listener, false);
inputSourceListeners.removeValue(listener, false);
removeScreenSizeListener(listener);
}
/**
* Adds a {@link UiInputSourceListener} to this {@link UiContainer}
*
* @param listener
* The {@link UiInputSourceListener} to be notified of events
*/
public void addInputSourceListener(UiInputSourceListener listener) {
inputSourceListeners.add(listener);
}
/**
* Removes a {@link UiInputSourceListener} from this {@link UiContainer}
*
* @param listener
* The {@link UiInputSourceListener} to stop receiving events
*/
public void removeUiContainerListener(UiInputSourceListener listener) {
inputSourceListeners.removeValue(listener, false);
}
private void notifyPreUpdate(float delta) {
for (int i = containerListeners.size - 1; i >= 0; i--) {
containerListeners.get(i).preUpdate(this, delta);
}
}
private void notifyPostUpdate(float delta) {
for (int i = containerListeners.size - 1; i >= 0; i--) {
containerListeners.get(i).postUpdate(this, delta);
}
}
private void notifyPreRender(Graphics g) {
for (int i = containerListeners.size - 1; i >= 0; i--) {
containerListeners.get(i).preRender(this, g);
}
}
private void notifyPostRender(Graphics g) {
for (int i = containerListeners.size - 1; i >= 0; i--) {
containerListeners.get(i).postRender(this, g);
}
}
/**
* Sends input source change event to all containers with the current input source as both the old and new
*/
public void notifyInputSource() {
notifyInputSourceChange(lastInputSource, lastInputSource);
}
/**
* Sends game pad change event to all containers with the current game pad type as both the old and new
*/
public void notifyGamePadType() {
notifyGamePadTypeChange(lastGamePadType, lastGamePadType);
}
private void notifyInputSourceChange(InputSource oldSource, InputSource newSource) {
for(int i = uiContainerInstances.size - 1; i >= 0; i--) {
uiContainerInstances.get(i).notifyInputSourceChange(uiContainerInstances.get(i), oldSource, newSource);
}
}
private void notifyInputSourceChange(UiContainer uiContainer, InputSource oldSource, InputSource newSource) {
for (int i = uiContainer.inputSourceListeners.size - 1; i >= 0; i--) {
uiContainer.inputSourceListeners.get(i).inputSourceChanged(uiContainer, oldSource, newSource);
}
}
private void notifyGamePadTypeChange(GamePadType oldGamePadType, GamePadType newGamePadType) {
for(int i = uiContainerInstances.size - 1; i >= 0; i--) {
uiContainerInstances.get(i).notifyGamePadTypeChange(uiContainerInstances.get(i), oldGamePadType, newGamePadType);
}
}
private void notifyGamePadTypeChange(UiContainer uiContainer, GamePadType oldGamePadType, GamePadType newGamePadType) {
for (int i = uiContainer.inputSourceListeners.size - 1; i >= 0; i--) {
uiContainer.inputSourceListeners.get(i).gamePadTypeChanged(uiContainer, oldGamePadType, newGamePadType);
}
}
private void notifyElementActivated(ActionableRenderNode actionable) {
for (int i = containerListeners.size - 1; i >= 0; i--) {
containerListeners.get(i).onElementAction(this, actionable.getElement());
}
}
/**
* Returns the default {@link Visibility} for newly created
* {@link UiElement} objects
*
* @return A non-null {@link Visibility} value. {@link Visibility#HIDDEN} by
* default
*/
public static Visibility getDefaultVisibility() {
return defaultVisibility;
}
/**
* Sets the default {@link Visibility} for newly created {@link UiElement}
* objects
*
* @param defaultVisibility
* The {@link Visibility} to set as default
*/
public static void setDefaultVisibility(Visibility defaultVisibility) {
if (defaultVisibility == null) {
return;
}
UiContainer.defaultVisibility = defaultVisibility;
}
public void setViewport(Viewport viewport) {
this.viewport = viewport;
}
}
|
Fix getLastInputSource being null on console and mobile platforms
|
ui/src/main/java/org/mini2Dx/ui/UiContainer.java
|
Fix getLastInputSource being null on console and mobile platforms
|
|
Java
|
apache-2.0
|
0671661928e27dd6201f28642edbcd89b278ae9a
| 0
|
nhl/bootique-jdbc
|
package io.bootique.jdbc.instrumented.hikaricp.healthcheck;
import com.zaxxer.hikari.HikariPoolMXBean;
import com.zaxxer.hikari.pool.HikariPool;
import io.bootique.metrics.health.HealthCheck;
import io.bootique.metrics.health.HealthCheckOutcome;
import io.bootique.metrics.health.check.Threshold;
import io.bootique.metrics.health.check.ThresholdType;
import io.bootique.metrics.health.check.ValueRange;
import io.bootique.value.Duration;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.function.Function;
/**
* HikariCP "aliveness" standard check
*/
public class ConnectivityCheck implements HealthCheck {
private final HikariPoolMXBean pool;
private final ValueRange<Duration> timeoutThresholds;
public ConnectivityCheck(HikariPoolMXBean pool, ValueRange<Duration> timeoutThresholds) {
this.pool = pool;
this.timeoutThresholds = timeoutThresholds;
}
/**
* Generates a stable qualified name for the {@link ConnectivityCheck}
*
* @param dataSourceName Bootique configuration name of the data source being checked.
* @return qualified name bq.jdbc.[dataSourceName].connectivity
*/
public static String healthCheckName(String dataSourceName) {
return "bq.jdbc." + dataSourceName + ".connectivity";
}
/**
* The health check obtains a {@link Connection} from the pool and immediately return it.
* The standard HikariCP internal "aliveness" check will be run.
*
* @return {@link HealthCheckOutcome#ok()} if an "alive" {@link Connection} can be obtained,
* otherwise {@link HealthCheckOutcome#critical()} if the connection fails or times out
*/
@Override
public HealthCheckOutcome check() {
HealthCheckOutcome warningOutcome = checkThreshold(ThresholdType.WARNING, e -> HealthCheckOutcome.warning(e));
switch (warningOutcome.getStatus()) {
case WARNING:
return warningOutcome;
case UNKNOWN:
return checkThreshold(ThresholdType.CRITICAL, e -> HealthCheckOutcome.critical(e));
}
return HealthCheckOutcome.ok();
}
protected HealthCheckOutcome checkThreshold(ThresholdType type, Function<SQLException, HealthCheckOutcome> onFailure) {
Threshold<Duration> threshold = timeoutThresholds.getThreshold(type);
if (threshold == null) {
return HealthCheckOutcome.unknown();
}
long timeout = threshold.getValue().getDuration().toMillis();
try (Connection connection = ((HikariPool) pool).getConnection(timeout)) {
return HealthCheckOutcome.ok();
} catch (SQLException e) {
return onFailure.apply(e);
}
}
}
|
bootique-jdbc-hikaricp-instrumented/src/main/java/io/bootique/jdbc/instrumented/hikaricp/healthcheck/ConnectivityCheck.java
|
package io.bootique.jdbc.instrumented.hikaricp.healthcheck;
import com.zaxxer.hikari.HikariPoolMXBean;
import com.zaxxer.hikari.pool.HikariPool;
import io.bootique.metrics.health.HealthCheck;
import io.bootique.metrics.health.HealthCheckOutcome;
import io.bootique.metrics.health.check.Threshold;
import io.bootique.metrics.health.check.ThresholdType;
import io.bootique.metrics.health.check.ValueRange;
import io.bootique.value.Duration;
import java.sql.Connection;
import java.sql.SQLException;
/**
* HikariCP "aliveness" standard check
*/
public class ConnectivityCheck implements HealthCheck {
private final HikariPoolMXBean pool;
private final ValueRange<Duration> timeoutThresholds;
public ConnectivityCheck(HikariPoolMXBean pool, ValueRange<Duration> timeoutThresholds) {
this.pool = pool;
this.timeoutThresholds = timeoutThresholds;
}
/**
* Generates a stable qualified name for the {@link ConnectivityCheck}
*
* @param dataSourceName Bootique configuration name of the data source being checked.
* @return qualified name bq.jdbc.[dataSourceName].connectivity
*/
public static String healthCheckName(String dataSourceName) {
return "bq.jdbc." + dataSourceName + ".connectivity";
}
/**
* The health check obtains a {@link Connection} from the pool and immediately return it.
* The standard HikariCP internal "aliveness" check will be run.
*
* @return {@link HealthCheckOutcome#ok()} if an "alive" {@link Connection} can be obtained,
* otherwise {@link HealthCheckOutcome#critical()} if the connection fails or times out
*/
@Override
public HealthCheckOutcome check() {
HealthCheckOutcome warningOutcome = checkThreshold(ThresholdType.WARNING);
switch (warningOutcome.getStatus()) {
case WARNING:
return warningOutcome;
// this is a case of WARNING threshold messing...
case UNKNOWN:
return checkThreshold(ThresholdType.CRITICAL);
}
return HealthCheckOutcome.ok();
}
protected HealthCheckOutcome checkThreshold(ThresholdType type) {
Threshold<Duration> threshold = timeoutThresholds.getThreshold(type);
if (threshold == null) {
return HealthCheckOutcome.unknown();
}
long timeout = threshold.getValue().getDuration().toMillis();
try (Connection connection = ((HikariPool) pool).getConnection(timeout)) {
return HealthCheckOutcome.ok();
} catch (SQLException e) {
return HealthCheckOutcome.critical(e);
}
}
}
|
Revisit the names and formats of Hikari health check configuration #81
* fixing cascading health checks
|
bootique-jdbc-hikaricp-instrumented/src/main/java/io/bootique/jdbc/instrumented/hikaricp/healthcheck/ConnectivityCheck.java
|
Revisit the names and formats of Hikari health check configuration #81
|
|
Java
|
bsd-2-clause
|
11786ad5407beb6002c8f03d915e876a82e9ed31
| 0
|
opf-labs/jhove2,opf-labs/jhove2
|
/**
* JHOVE2 - Next-generation architecture for format-aware characterization
* <p>
* Copyright (c) 2009 by The Regents of the University of California, Ithaka
* Harbors, Inc., and The Board of Trustees of the Leland Stanford Junior
* University. All rights reserved.
* </p>
* <p>
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* </p>
* <ul>
* <li>Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.</li>
* <li>Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.</li>
* <li>Neither the name of the University of California/California Digital
* Library, Ithaka Harbors/Portico, or Stanford University, nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.</li>
* </ul>
* <p>
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* </p>
*/
package org.jhove2.module.format.xml;
import java.io.EOFException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.jhove2.annotation.ReportableProperty;
import org.jhove2.core.JHOVE2;
import org.jhove2.core.JHOVE2Exception;
import org.jhove2.core.Message;
import org.jhove2.core.Message.Context;
import org.jhove2.core.Message.Severity;
import org.jhove2.core.format.Format;
import org.jhove2.core.io.Input;
import org.jhove2.core.source.Source;
import org.jhove2.module.format.BaseFormatModule;
import org.jhove2.module.format.Validator;
/**
* JHOVE2 XML module. This module parses and XML instance and captures selected
* characterization information
*
* @author rnanders
*/
public class XmlModule
extends BaseFormatModule
implements Validator
{
/** Module version identifier. */
public static final String VERSION = "2.0.0";
/** Module release date. */
public static final String RELEASE = "2010-09-10";
/** Module rights statement. */
public static final String RIGHTS = "Copyright 2010 by The Board of Trustees of the Leland Stanford Junior University. "
+ "Available under the terms of the BSD license.";
/** Module validation coverage. */
public static final Coverage COVERAGE = Coverage.Inclusive;
/** The JHOVE2 object passed in by the parse method */
protected JHOVE2 jhove2;
/** XML validation status. */
protected Validity validity;
/** Unresolved schema reference in XML document */
protected boolean unresolvedSchemaReference;
/**
* Get module validation coverage.
*
* @return the coverage
*/
@Override
public Coverage getCoverage() {
return COVERAGE;
}
/**
* Get source unit's validation status.
*
* @return the validity
*/
@Override
public Validity isValid() {
return validity;
}
/**
* Validate the XML parse results.
*
* @param jhove2
* JHOVE2 framework
* @param source
* XML source unit
* @param input XML source input
* @return the validity
*
* @throws JHOVE2Exception
* the JHOVE2 exception
*/
@Override
public Validity validate(JHOVE2 jhove2, Source source, Input input)
throws JHOVE2Exception
{
/* See if validity has been previously set to False, e.g. by parse exception trap */
if (validity == Validity.False) {
return validity;
}
/* Check to see if there were SAX parser errors of any sort */
if (validationResults.fatalParserErrors.getValidationMessageCount() > 0) {
Object[]messageArgs = new Object[]{"Fatal Parser Errors found"};
saxParserMessages.add(new Message(Severity.ERROR,
Context.OBJECT,
"org.jhove2.module.format.xml.XmlModule.validationErrorsFound",
messageArgs, jhove2.getConfigInfo()));
return (validity = Validity.False);
}
else if (validationResults.parserErrors.getValidationMessageCount() > 0) {
Object[]messageArgs = new Object[]{"Parser Errors found"};
saxParserMessages.add(new Message(Severity.ERROR,
Context.OBJECT,
"org.jhove2.module.format.xml.XmlModule.validationErrorsFound",
messageArgs, jhove2.getConfigInfo()));
return (validity = Validity.False);
}
/* Did SAX parser find a referenced schema file that could not be resolved? */
if (this.unresolvedSchemaReference) {
return (validity = Validity.Undetermined);
}
/* No validation errors found, but make sure schema validation was enabled, if appropriate */
if (namespaceInformation.hasSchemaLocations) {
if (! saxParser.hasFeature("http://apache.org/xml/features/validation/schema")) {
Object[]messageArgs = new Object[]{"Schema location(s) specified, but schema validation is disabled by SAX feature setting."};
saxParserMessages.add(new Message(Severity.WARNING,
Context.OBJECT,
"org.jhove2.module.format.xml.XmlModule.validationDisabled",
messageArgs, jhove2.getConfigInfo()));
return (validity = Validity.Undetermined);
}
}
/* No validation errors found, but make sure validation was enabled */
if (saxParser.hasFeature("http://apache.org/xml/features/validation/dynamic")) {
return (validity = Validity.True);
}
else if (saxParser.hasFeature("http://xml.org/sax/features/validation")) {
return (validity = Validity.True);
}
else {
Object[]messageArgs = new Object[]{"XML validation is disabled by SAX feature setting."};
saxParserMessages.add(new Message(Severity.WARNING,
Context.OBJECT,
"org.jhove2.module.format.xml.XmlModule.validationDisabled",
messageArgs, jhove2.getConfigInfo()));
return (validity = Validity.Undetermined);
}
}
/**
* Instantiates a new XmlModule instance.
*
* @param format
* the Format object
*/
public XmlModule(Format format) {
super(VERSION, RELEASE, RIGHTS, format);
this.validity = Validity.Undetermined;
}
public XmlModule(){
this(null);
}
/** The instance of a SAX2 XMLReader class used to parse XML instances. */
protected SaxParser saxParser;
/** If true, run a separate parse to extract numeric character references */
protected boolean ncrParser = false;
/** Data store for XML declaration information captured during the parse. */
protected XmlDeclaration xmlDeclaration = new XmlDeclaration();
/** The XML document's root element. */
protected RootElement rootElement;
/** A list of the documents document scope declarations. */
protected List<DTD> dtds = new ArrayList<DTD>();
/** Data store for XML namespace information captured during the parse. */
protected NamespaceInformation namespaceInformation = new NamespaceInformation();
/** Data store for XML notation information captured during the parse. */
protected List<Notation> notations = new ArrayList<Notation>();
/** Data store for XML entity declarations captured during the parse. */
protected List<Entity> entities = new ArrayList<Entity>();
/** Data store for XML entity references captured during the parse. */
protected EntityReferences entityReferences = new EntityReferences();
/**
* Data store for XML numeric character references captured during the
* parse.
*/
protected NumericCharacterReferenceInformation numericCharacterReferenceInformation = new NumericCharacterReferenceInformation();
/**
* Data store for XML processing instruction information captured during the
* parse.
*/
protected List<ProcessingInstruction> processingInstructions = new ArrayList<ProcessingInstruction>();
/** Data store for XML comment information captured during the parse. */
protected CommentInformation commentInformation = new CommentInformation();
/** The validation results. */
protected ValidationResults validationResults = new ValidationResults();
/** Fail fast message. */
protected Message failFastMessage;
/** SAX Parser messages. */
protected ArrayList<Message> saxParserMessages = new ArrayList<Message>();
/** The well formed status of the source unit. */
protected Validity wellFormed = Validity.Undetermined;
/**
* Sets the SaxParser object to be used for parsing the source unit.
*
* @param saxParser
* the saxParser object
*/
public void setSaxParser(SaxParser saxParser) {
this.saxParser = saxParser;
this.saxParser.setXmlModule(this);
}
/**
* Gets the SaxParser object used for parsing the source unit.
*
* @return SaxParser object
*/
@ReportableProperty(order = 1, value = "XML Parser name, features, properties")
public SaxParser getSaxParser() {
return saxParser;
}
/** Sets the flag that specifies whether or not to collect comment text */
public void setCollectCommentText(boolean collectCommentText) {
this.commentInformation.collectCommentText = collectCommentText;
}
/**
* Sets the class name of the parser to be used for extracting numeric
* character references
*/
public void setNcrParser(boolean ncrParser) {
this.ncrParser = ncrParser;
}
/**
* Gets the XML Declaration data.
*
* @return XML Declaration data
*/
@ReportableProperty(order = 2, value = "XML Declaration data")
public XmlDeclaration getXmlDeclaration() {
return xmlDeclaration;
}
/**
* Gets the root element name.
*
* @return root element name
*/
@ReportableProperty(order = 3, value = "The document's root element")
public RootElement getRootElement() {
return rootElement;
}
/**
* Gets the list of documents document scope declarations (DTDs).
*
* @return list of documents document scope declarations (DTDs)
*/
@ReportableProperty(order = 4, value = "List of Document Scope Definitions (DTDs)")
public List<DTD> getDTDs() {
return dtds;
}
/**
* Gets the list of XML namespaces.
*
* @return list of XML namespaces
*/
@ReportableProperty(order = 5, value = "XML Namespace Information")
public NamespaceInformation getNamespaceInformation() {
return namespaceInformation;
}
/**
* Gets the list of XML entity declarations.
*
* @return list of XML entity declarations
*/
@ReportableProperty(order = 6, value = "List of Entity Declarations")
public List<Entity> getEntities() {
return entities;
}
/**
* Gets the list of XML entity references.
*
* @return list of XML entity references
*/
@ReportableProperty(order = 7, value = "List of Entity References")
public ArrayList<EntityReference> getEntityReferences() {
return entityReferences.getEntityReferenceList();
}
/**
* Gets the list of XML notations.
*
* @return list of XML notations
*/
@ReportableProperty(order = 8, value = "List of Notations found in the XML document")
public List<Notation> getNotations() {
return notations;
}
/**
* Gets the list of XML Numeric Character References.
*
* @return list of XML Numeric Character References
*/
@ReportableProperty(order = 9, value = "Numeric Character Reference Information")
public NumericCharacterReferenceInformation getNumericCharacterReferenceInformation() {
return numericCharacterReferenceInformation;
}
/**
* Gets the list of XML processing instructions.
*
* @return list of XML processing instructions
*/
@ReportableProperty(order = 10, value = "List of Processing Instructions")
public List<ProcessingInstruction> getProcessingInstructions() {
return processingInstructions;
}
/**
* Gets the list of XML comments.
*
* @return list of XML comments
*/
@ReportableProperty(order = 11, value = "List of Comments")
public CommentInformation getCommentInformation() {
return commentInformation;
}
/**
* Gets the validation results.
*
* @return validation results
*/
@ReportableProperty(order = 12, value = "Warning or error messages generated during XML Validation")
public ValidationResults getValidationResults() {
return validationResults;
}
/**
* Gets the well-formedness status.
*
* @return well-formedness status
*/
@ReportableProperty(order = 13, value = "XML well-formed status")
public Validity isWellFormed() {
return wellFormed;
}
/**
* Get fail fast message.
*
* @return Fail fast message
*/
@ReportableProperty(order = 14, value = "Fail fast message.")
public Message getFailFast() {
return this.failFastMessage;
}
/**
* Get SAX Parser messages.
*
* @return SAX Parser messages
*/
@ReportableProperty(order = 15, value = "SAX Parser Messages.")
public List<Message> getSaxParserMessages() {
return this.saxParserMessages;
}
/**
* Parse a source unit.
*
* @param jhove2
* JHOVE2 framework
* @param source
* XML source unit
* @param input
* XML source input
* @return Number of bytes consumed
*
* @throws EOFException
* the EOF exception
* @throws IOException
* Signals that an I/O exception has occurred.
* @throws JHOVE2Exception
* the JHOVE2 exception
*/
@Override
public long parse(JHOVE2 jhove2, Source source, Input input)
throws IOException, JHOVE2Exception
{
this.jhove2 = jhove2;
/* Use SAX2 to get what information is available from that mechanism */
saxParser.parse(source, jhove2);
/*
* Do a separate parse of the XML Declaration at the start of the
* document
*/
input.setPosition(source.getStartingOffset());
xmlDeclaration.parse(input);
/* Do a separate parse to inventory numeric character references */
if (this.ncrParser) {
input.setPosition(source.getStartingOffset());
numericCharacterReferenceInformation.parse(input,
xmlDeclaration.encodingFromSAX2, jhove2);
}
validate(jhove2, source, input);
return 0;
}
}
|
src/main/java/org/jhove2/module/format/xml/XmlModule.java
|
/**
* JHOVE2 - Next-generation architecture for format-aware characterization
* <p>
* Copyright (c) 2009 by The Regents of the University of California, Ithaka
* Harbors, Inc., and The Board of Trustees of the Leland Stanford Junior
* University. All rights reserved.
* </p>
* <p>
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* </p>
* <ul>
* <li>Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.</li>
* <li>Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.</li>
* <li>Neither the name of the University of California/California Digital
* Library, Ithaka Harbors/Portico, or Stanford University, nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.</li>
* </ul>
* <p>
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* </p>
*/
package org.jhove2.module.format.xml;
import java.io.EOFException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.jhove2.annotation.ReportableProperty;
import org.jhove2.core.JHOVE2;
import org.jhove2.core.JHOVE2Exception;
import org.jhove2.core.Message;
import org.jhove2.core.Message.Context;
import org.jhove2.core.Message.Severity;
import org.jhove2.core.format.Format;
import org.jhove2.core.io.Input;
import org.jhove2.core.source.Source;
import org.jhove2.module.format.BaseFormatModule;
import org.jhove2.module.format.Validator;
/**
* JHOVE2 XML module. This module parses and XML instance and captures selected
* characterization information
*
* @author rnanders
*/
public class XmlModule
extends BaseFormatModule
implements Validator
{
/** Module version identifier. */
public static final String VERSION = "2.0.0";
/** Module release date. */
public static final String RELEASE = "2010-09-10";
/** Module rights statement. */
public static final String RIGHTS = "Copyright 2010 by The Board of Trustees of the Leland Stanford Junior University. "
+ "Available under the terms of the BSD license.";
/** Module validation coverage. */
public static final Coverage COVERAGE = Coverage.Inclusive;
/** The JHOVE2 object passed in by the parse method */
protected JHOVE2 jhove2;
/** XML validation status. */
protected Validity validity;
/** Unresolved schema reference in XML document */
protected boolean unresolvedSchemaReference;
/**
* Get module validation coverage.
*
* @return the coverage
*/
@Override
public Coverage getCoverage() {
return COVERAGE;
}
/**
* Get source unit's validation status.
*
* @return the validity
*/
@Override
public Validity isValid() {
return validity;
}
/**
* Validate the XML parse results.
*
* @param jhove2
* JHOVE2 framework
* @param source
* XML source unit
* @param input XML source input
* @return the validity
*
* @throws JHOVE2Exception
* the JHOVE2 exception
*/
@Override
public Validity validate(JHOVE2 jhove2, Source source, Input input)
throws JHOVE2Exception
{
/* See if validity has been previously set to False, e.g. by parse exception trap */
if (validity == Validity.False) {
return validity;
}
/* Check to see if there were SAX parser errors of any sort */
if (validationResults.fatalParserErrors.getValidationMessageCount() > 0) {
Object[]messageArgs = new Object[]{"Fatal Parser Errors found"};
saxParserMessages.add(new Message(Severity.ERROR,
Context.OBJECT,
"org.jhove2.module.format.xml.XmlModule.validationErrorsFound",
messageArgs, jhove2.getConfigInfo()));
return (validity = Validity.False);
}
else if (validationResults.parserErrors.getValidationMessageCount() > 0) {
Object[]messageArgs = new Object[]{"Parser Errors found"};
saxParserMessages.add(new Message(Severity.ERROR,
Context.OBJECT,
"org.jhove2.module.format.xml.XmlModule.validationErrorsFound",
messageArgs, jhove2.getConfigInfo()));
return (validity = Validity.False);
}
/* Did SAX parser find a referenced schema file that could not be resolved? */
if (this.unresolvedSchemaReference) {
return (validity = Validity.Undetermined);
}
/* No validation errors found, but make sure schema validation was enabled, if appropriate */
if (namespaceInformation.hasSchemaLocations) {
if (! saxParser.hasFeature("http://apache.org/xml/features/validation/schema")) {
Object[]messageArgs = new Object[]{"Schema location(s) specified, but schema validation is disabled by SAX feature setting."};
saxParserMessages.add(new Message(Severity.WARNING,
Context.OBJECT,
"org.jhove2.module.format.xml.XmlModule.validationDisabled",
messageArgs, jhove2.getConfigInfo()));
return (validity = Validity.Undetermined);
}
}
/* No validation errors found, but make sure validation was enabled */
if (saxParser.hasFeature("http://apache.org/xml/features/validation/dynamic")) {
return (validity = Validity.True);
}
else if (saxParser.hasFeature("http://xml.org/sax/features/validation")) {
return (validity = Validity.True);
}
else {
Object[]messageArgs = new Object[]{"XML validation is disabled by SAX feature setting."};
saxParserMessages.add(new Message(Severity.WARNING,
Context.OBJECT,
"org.jhove2.module.format.xml.XmlModule.validationDisabled",
messageArgs, jhove2.getConfigInfo()));
return (validity = Validity.Undetermined);
}
}
/**
* Instantiates a new XmlModule instance.
*
* @param format
* the Format object
*/
public XmlModule(Format format) {
super(VERSION, RELEASE, RIGHTS, format);
this.validity = Validity.Undetermined;
}
public XmlModule(){
this(null);
}
/** The instance of a SAX2 XMLReader class used to parse XML instances. */
protected SaxParser saxParser;
/** If true, run a separate parse to extract numeric character references */
protected boolean ncrParser = false;
/** Data store for XML declaration information captured during the parse. */
protected XmlDeclaration xmlDeclaration = new XmlDeclaration();
/** The XML document's root element. */
protected RootElement rootElement;
/** A list of the documents document scope declarations. */
protected List<DTD> dtds = new ArrayList<DTD>();
/** Data store for XML namespace information captured during the parse. */
protected NamespaceInformation namespaceInformation = new NamespaceInformation();
/** Data store for XML notation information captured during the parse. */
protected List<Notation> notations = new ArrayList<Notation>();
/** Data store for XML entity declarations captured during the parse. */
protected List<Entity> entities = new ArrayList<Entity>();
/** Data store for XML entity references captured during the parse. */
protected EntityReferences entityReferences = new EntityReferences();
/**
* Data store for XML numeric character references captured during the
* parse.
*/
protected NumericCharacterReferenceInformation numericCharacterReferenceInformation = new NumericCharacterReferenceInformation();
/**
* Data store for XML processing instruction information captured during the
* parse.
*/
protected List<ProcessingInstruction> processingInstructions = new ArrayList<ProcessingInstruction>();
/** Data store for XML comment information captured during the parse. */
protected CommentInformation commentInformation = new CommentInformation();
/** The validation results. */
protected ValidationResults validationResults = new ValidationResults();
/** Fail fast message. */
protected Message failFastMessage;
/** SAX Parser messages. */
protected ArrayList<Message> saxParserMessages = new ArrayList<Message>();
/** The well formed status of the source unit. */
protected Validity wellFormed = Validity.Undetermined;
/**
* Sets the SaxParser object to be used for parsing the source unit.
*
* @param saxParser
* the saxParser object
*/
public void setSaxParser(SaxParser saxParser) {
this.saxParser = saxParser;
this.saxParser.setXmlModule(this);
}
/**
* Gets the SaxParser object used for parsing the source unit.
*
* @return SaxParser object
*/
@ReportableProperty(order = 1, value = "XML Parser name, features, properties")
public SaxParser getSaxParser() {
return saxParser;
}
/** Sets the flag that specifies whether or not to collect comment text */
public void setCollectCommentText(boolean collectCommentText) {
this.commentInformation.collectCommentText = collectCommentText;
}
/**
* Sets the class name of the parser to be used for extracting numeric
* character references
*/
public void setNcrParser(boolean ncrParser) {
this.ncrParser = ncrParser;
}
/**
* Gets the XML Declaration data.
*
* @return XML Declaration data
*/
@ReportableProperty(order = 2, value = "XML Declaration data")
public XmlDeclaration getXmlDeclaration() {
return xmlDeclaration;
}
/**
* Gets the root element name.
*
* @return root element name
*/
@ReportableProperty(order = 3, value = "The document's root element")
public RootElement getRootElement() {
return rootElement;
}
/**
* Gets the list of documents document scope declarations (DTDs).
*
* @return list of documents document scope declarations (DTDs)
*/
@ReportableProperty(order = 4, value = "List of Document Scope Definitions (DTDs)")
public List<DTD> getDTDs() {
return dtds;
}
/**
* Gets the list of XML namespaces.
*
* @return list of XML namespaces
*/
@ReportableProperty(order = 5, value = "XML Namespace Information")
public NamespaceInformation getNamespaceInformation() {
return namespaceInformation;
}
/**
* Gets the list of XML entity declarations.
*
* @return list of XML entity declarations
*/
@ReportableProperty(order = 6, value = "List of Entity Declarations")
public List<Entity> getEntities() {
return entities;
}
/**
* Gets the list of XML entity references.
*
* @return list of XML entity references
*/
@ReportableProperty(order = 7, value = "List of Entity References")
public ArrayList<EntityReference> getEntityReferences() {
return entityReferences.getEntityReferenceList();
}
/**
* Gets the list of XML notations.
*
* @return list of XML notations
*/
@ReportableProperty(order = 8, value = "List of Notations found in the XML document")
public List<Notation> getNotations() {
return notations;
}
/**
* Gets the list of XML Numeric Character References.
*
* @return list of XML Numeric Character References
*/
@ReportableProperty(order = 9, value = "Numeric Character Reference Information")
public NumericCharacterReferenceInformation getNumericCharacterReferenceInformation() {
return numericCharacterReferenceInformation;
}
/**
* Gets the list of XML processing instructions.
*
* @return list of XML processing instructions
*/
@ReportableProperty(order = 10, value = "List of Processing Instructions")
public List<ProcessingInstruction> getProcessingInstructions() {
return processingInstructions;
}
/**
* Gets the list of XML comments.
*
* @return list of XML comments
*/
@ReportableProperty(order = 11, value = "List of Comments")
public CommentInformation getCommentInformation() {
return commentInformation;
}
/**
* Gets the validation results.
*
* @return validation results
*/
@ReportableProperty(order = 12, value = "Warning or error messages generated during XML Validation")
public ValidationResults getValidationResults() {
return validationResults;
}
/**
* Gets the well-formedness status.
*
* @return well-formedness status
*/
@ReportableProperty(order = 13, value = "XML well-formed status")
public Validity isWellFormed() {
return wellFormed;
}
/**
* Get fail fast message.
*
* @return Fail fast message
*/
@ReportableProperty(order = 14, value = "Fail fast message.")
public Message getFailFast() {
return this.failFastMessage;
}
/**
* Get SAX Parser messages.
*
* @return SAX Parser messages
*/
@ReportableProperty(order = 15, value = "SAX Parser Messages.")
public List<Message> getSaxParserMessages() {
return this.saxParserMessages;
}
/**
* Parse a source unit.
*
* @param jhove2
* JHOVE2 framework
* @param source
* XML source unit
* @param input
* XML source input
* @return Number of bytes consumed
*
* @throws EOFException
* the EOF exception
* @throws IOException
* Signals that an I/O exception has occurred.
* @throws JHOVE2Exception
* the JHOVE2 exception
*/
@Override
public long parse(JHOVE2 jhove2, Source source, Input input)
throws IOException, JHOVE2Exception
{
this.jhove2 = jhove2;
/* Use SAX2 to get what information is available from that mechanism */
saxParser.parse(source, jhove2);
/*
* Do a separate parse of the XML Declaration at the start of the
* document
*/
input.setPosition(source.getStartingOffset());
xmlDeclaration.parse(input);
/* Do a separate parse to inventory numeric character references */
if (this.ncrParser) {
input.setPosition(0L);
numericCharacterReferenceInformation.parse(input,
xmlDeclaration.encodingFromSAX2, jhove2);
}
validate(jhove2, source, input);
return 0;
}
}
|
Modified XmlModule's parse() method to use source.getStartingOffset() to set the input's position
|
src/main/java/org/jhove2/module/format/xml/XmlModule.java
|
Modified XmlModule's parse() method to use source.getStartingOffset() to set the input's position
|
|
Java
|
bsd-3-clause
|
6abe9a46258fd8171afb0db41e99cb32650c0d8e
| 0
|
sirixdb/sirix,sirixdb/sirix,sirixdb/sirix,sirixdb/sirix
|
package org.sirix.xquery.function.jn.io;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.file.Path;
import org.brackit.xquery.XQuery;
import org.brackit.xquery.util.io.IOUtils;
import org.brackit.xquery.util.serialize.StringSerializer;
import org.brackit.xquery.xdm.Sequence;
import org.junit.Test;
import org.sirix.JsonTestHelper;
import org.sirix.JsonTestHelper.PATHS;
import org.sirix.service.json.shredder.JsonShredder;
import org.sirix.utils.JsonDocumentCreator;
import org.sirix.xquery.SirixCompileChain;
import org.sirix.xquery.SirixQueryContext;
import org.sirix.xquery.json.BasicJsonDBStore;
import org.sirix.xquery.json.JsonDBObject;
import junit.framework.TestCase;
public final class SimpleQueryIntegrationTest extends TestCase {
private static final String mSimpleJson = "{\"sirix\":{\"revisionNumber\":1}}";
private static final String mJson =
"{\"sirix\":[{\"revisionNumber\":1,\"revision\":{\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}},{\"revisionNumber\":2,\"revision\":{\"tadaaa\":\"todooo\",\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}}]}";
private static final String mExpectedJson =
"[{\"revisionNumber\":1,\"revision\":{\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}},{\"revisionNumber\":2,\"revision\":{\"tadaaa\":\"todooo\",\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}}]";
private static final String mExpectedAllTimesTimeTravelQueryResult =
"{\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\"},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}";
private static final String mExpectedLastTimeTravelQueryResult =
"{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\"},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}";
private static final String mExpectedNextTimeTravelQueryResult =
"{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}";
private static final String mExpectedFutureTimeTravelQueryResult =
"{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\"},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}";
private static final String mExpectedPastTimeTravelQueryResult =
"{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}{\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}";
private static final String mExpectedPastOrSelfTimeTravelQueryResult =
"{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\"},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}{\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}";
private Path sirixPath = PATHS.PATH1.getFile();
@Override
protected void setUp() throws Exception {
JsonTestHelper.deleteEverything();
}
@Override
protected void tearDown() {
JsonTestHelper.closeEverything();
}
@Test
public void testSimple() {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
final var storeQuery = "jn:store('mycol.jn','mydoc.jn','[\"bla\", \"blubb\"]')";
new XQuery(chain, storeQuery).evaluate(ctx);
final var openQuery = "jn:doc('mycol.jn','mydoc.jn')[[0]]";
final var seq = new XQuery(chain, openQuery).evaluate(ctx);
assertNotNull(seq);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.serialize(seq);
}
assertEquals("\"bla\"", buf.toString());
}
}
@Test
public void testSimpleSecond() {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
final var storeQuery = "jn:store('mycol.jn','mydoc.jn','" + mJson + "')";
new XQuery(chain, storeQuery).evaluate(ctx);
final var openQuery = "jn:doc('mycol.jn','mydoc.jn')=>sirix";
final var seq = new XQuery(chain, openQuery).evaluate(ctx);
assertNotNull(seq);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.serialize(seq);
}
assertEquals(mExpectedJson, buf.toString());
}
}
@Test
public void testSimpleDeref() {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
final var storeQuery = "jn:store('mycol.jn','mydoc.jn','" + mSimpleJson + "')";
new XQuery(chain, storeQuery).evaluate(ctx);
final var openQuery = "jn:doc('mycol.jn','mydoc.jn')=>sirix=>revisionNumber";
final var seq = new XQuery(chain, openQuery).evaluate(ctx);
assertNotNull(seq);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.serialize(seq);
}
assertEquals("1", buf.toString());
}
}
@Test
public void testComplexSecond() {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
final var storeQuery = "jn:store('mycol.jn','mydoc.jn','" + mJson + "')";
new XQuery(chain, storeQuery).evaluate(ctx);
final var openQuery = "jn:doc('mycol.jn','mydoc.jn')=>sirix[[0]]=>revisionNumber";
final var seq = new XQuery(chain, openQuery).evaluate(ctx);
assertNotNull(seq);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.serialize(seq);
}
assertEquals("1", buf.toString());
}
}
@Test
public void testArrays() {
// Initialize query context and store.
try (final BasicJsonDBStore store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final SirixQueryContext ctx = SirixQueryContext.createWithJsonStore(store);
final SirixCompileChain chain = SirixCompileChain.createWithJsonStore(store)) {
final String storeQuery = "jn:store('mycol.jn','mydoc.jn','[\"foo\",[[\"bar\"]]]')";
new XQuery(chain, storeQuery).evaluate(ctx);
final String openQuery = "jn:doc('mycol.jn','mydoc.jn')[[1]][[0]][[0]]";
final Sequence seq = new XQuery(chain, openQuery).evaluate(ctx);
assertNotNull(seq);
final PrintStream buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.serialize(seq);
}
assertEquals("\"bar\"", buf.toString());
// // Use XQuery to load a JSON database/resource.
// System.out.println("Opening document again:");
// final String query = "jn:doc('mycol.jn','mydoc.jn')[[0:1]]";
// System.out.println(openQuery);
// final Sequence seqSlice = new XQuery(chain, query).evaluate(ctx);
//
// assertNotNull(seqSlice);
//
// final PrintStream bufSlice = IOUtils.createBuffer();
// new StringSerializer(bufSlice).serialize(seq);
// assertEquals("[\"foo\",[[\"bar\"]]]", buf.toString());
}
}
@Test
public void testTimeTravelAllTimes() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:all-times(jn:doc('mycol.jn','mydoc.jn'))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedAllTimesTimeTravelQueryResult, buf.toString());
}
}
@Test
public void testTimeTravelFirst() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:first(jn:doc('mycol.jn','mydoc.jn'))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(JsonDocumentCreator.JSON, buf.toString());
}
}
@Test
public void testTimeTravelLast() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:last(jn:doc('mycol.jn','mydoc.jn'))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedLastTimeTravelQueryResult, buf.toString());
}
}
@Test
public void testTimeTravelNext() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:next(jn:doc('mycol.jn','mydoc.jn',1))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedNextTimeTravelQueryResult, buf.toString());
}
}
@Test
public void testTimeTravelPrevious() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:previous(jn:doc('mycol.jn','mydoc.jn',2))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(JsonDocumentCreator.JSON, buf.toString());
}
}
@Test
public void testTimeTravelFuture() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:future(jn:doc('mycol.jn','mydoc.jn',1))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedFutureTimeTravelQueryResult, buf.toString());
}
}
@Test
public void testTimeTravelFutureOrSelf() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:future(jn:doc('mycol.jn','mydoc.jn',1),true())";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedAllTimesTimeTravelQueryResult, buf.toString());
}
}
@Test
public void testTimeTravelPast() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:past(jn:doc('mycol.jn','mydoc.jn',3))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedPastTimeTravelQueryResult, buf.toString());
}
}
@Test
public void testTimeTravelPastOrSelf() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:past(jn:doc('mycol.jn','mydoc.jn',3),true())";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedPastOrSelfTimeTravelQueryResult, buf.toString());
}
}
private void setupRevisions(final SirixQueryContext ctx, final SirixCompileChain chain) throws IOException {
final var storeQuery = "jn:store('mycol.jn','mydoc.jn','" + JsonDocumentCreator.JSON + "')";
new XQuery(chain, storeQuery).evaluate(ctx);
final var openDocQuery = "jn:doc('mycol.jn','mydoc.jn')";
final var object = (JsonDBObject) new XQuery(chain, openDocQuery).evaluate(ctx);
try (final var wtx = object.getTrx().getResourceManager().beginNodeTrx()) {
wtx.moveTo(3);
try (final var reader = JsonShredder.createStringReader("{\"foo\":\"bar\"}")) {
wtx.insertSubtreeAsFirstChild(reader);
wtx.moveTo(11);
wtx.remove();
wtx.commit();
}
assert wtx.getRevisionNumber() == 4;
}
}
// @Test
// public void testVersionedTwitter1() throws IOException, InterruptedException {
// try (final var store = BasicJsonDBStore.newBuilder().build();
// final var ctx = SirixQueryContext.createWithJsonStore(store);
// final var chain = SirixCompileChain.createWithJsonStore(store)) {
// final var twitterFilePath = JSON.resolve("twitter.json").toString();
// final var storeQuery = "jn:load('mycol.jn','mydoc.jn','" + twitterFilePath + "')=>statuses";
// final var sequence = (JsonDBArray) new XQuery(chain, storeQuery).execute(ctx);
//
// TimeUnit.SECONDS.sleep(5);
//
// final var rtx = sequence.getTrx();
// final Instant now;
//
// try (final var wtx = rtx.getResourceManager().beginNodeTrx()) {
// wtx.moveTo(rtx.getNodeKey());
// wtx.moveToFirstChild();
// while (wtx.hasRightSibling())
// wtx.moveToRightSibling();
// wtx.insertSubtreeAsRightSibling(JsonShredder.createFileReader(JSON.resolve("twitterTweet1.json")));
// TimeUnit.SECONDS.sleep(5);
// wtx.insertSubtreeAsRightSibling(JsonShredder.createFileReader(JSON.resolve("twitterTweet2.json")));
// TimeUnit.SECONDS.sleep(2);
// now = Instant.now().truncatedTo(ChronoUnit.SECONDS);
// TimeUnit.SECONDS.sleep(5);
//
// while (wtx.hasLeftSibling())
// wtx.moveToLeftSibling();
//
// wtx.remove();
// wtx.commit();
// }
//
// final var retrieveQuery =
// Files.readString(JSON.resolve("query.xq")).replace("$$$$",
// DateTimeFormatter.ISO_INSTANT.format(now));
//
// final var buf = IOUtils.createBuffer();
// final var serializer = new StringSerializer(buf);
// new XQuery(chain, retrieveQuery).serialize(ctx, serializer);
//
// assertEquals("2018-02-25T19:31:07 2018-02-26T06:42:50 2018-08-16T21:10:50:557000",
// buf.toString());
// }
// }
//
// @Test
// public void testVersionedTwitter2() throws IOException, InterruptedException {
// try (final var store = BasicJsonDBStore.newBuilder().build();
// final var ctx = SirixQueryContext.createWithJsonStore(store);
// final var chain = SirixCompileChain.createWithJsonStore(store)) {
// final var twitterFilePath = JSON.resolve("twitter.json").toString();
// final var storeQuery = "jn:load('mycol.jn','mydoc.jn','" + twitterFilePath + "')=>statuses";
// final var sequence = (JsonDBArray) new XQuery(chain, storeQuery).execute(ctx);
//
// TimeUnit.SECONDS.sleep(5);
//
// final var rtx = sequence.getTrx();
// final Instant now;
//
// try (final var wtx = rtx.getResourceManager().beginNodeTrx()) {
// wtx.moveTo(rtx.getNodeKey());
// wtx.moveToFirstChild();
// while (wtx.hasRightSibling())
// wtx.moveToRightSibling();
// wtx.insertSubtreeAsRightSibling(JsonShredder.createFileReader(JSON.resolve("twitterTweet1.json")));
// TimeUnit.SECONDS.sleep(5);
// wtx.insertSubtreeAsRightSibling(JsonShredder.createFileReader(JSON.resolve("twitterTweet2.json")));
// TimeUnit.SECONDS.sleep(2);
// now = Instant.now().truncatedTo(ChronoUnit.SECONDS);
// TimeUnit.SECONDS.sleep(5);
//
// while (wtx.hasLeftSibling())
// wtx.moveToLeftSibling();
//
// wtx.remove();
// wtx.commit();
// }
//
// final var retrieveQuery = Files.readString(JSON.resolve("temporal-query.xq"))
// .replace("$$$$", DateTimeFormatter.ISO_INSTANT.format(now));
//
// final var buf = IOUtils.createBuffer();
// final var serializer = new StringSerializer(buf);
// new XQuery(chain, retrieveQuery).serialize(ctx, serializer);
//
// System.out.println(buf.toString());
// // assertEquals("2018-02-25T19:31:07 2018-02-26T06:42:50 2018-08-16T21:10:50:557000",
// // buf.toString());
// }
// }
//
// @Test
// public void testTwitter() throws IOException {
// try (final var store = BasicJsonDBStore.newBuilder().build();
// final var ctx = SirixQueryContext.createWithJsonStore(store);
// final var chain = SirixCompileChain.createWithJsonStore(store)) {
// final var twitterFilePath = JSON.resolve("twitter.json").toString();
// final var storeQuery = "let $created := jn:load('mycol.jn','mydoc.jn','" + twitterFilePath
// + "')=>statuses[[0]]=>created_at return xs:dateTime($created)";
//
// final var buf = IOUtils.createBuffer();
// final var serializer = new StringSerializer(buf);
// new XQuery(chain, storeQuery).serialize(ctx, serializer);
//
// assertEquals("2018-08-16T21:10:50:557000", buf.toString());
// }
// }
}
|
bundles/sirix-xquery/src/test/java/org/sirix/xquery/function/jn/io/SimpleQueryIntegrationTest.java
|
package org.sirix.xquery.function.jn.io;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.file.Path;
import org.brackit.xquery.XQuery;
import org.brackit.xquery.util.io.IOUtils;
import org.brackit.xquery.util.serialize.StringSerializer;
import org.brackit.xquery.xdm.Sequence;
import org.junit.Test;
import org.sirix.JsonTestHelper;
import org.sirix.JsonTestHelper.PATHS;
import org.sirix.service.json.shredder.JsonShredder;
import org.sirix.utils.JsonDocumentCreator;
import org.sirix.xquery.SirixCompileChain;
import org.sirix.xquery.SirixQueryContext;
import org.sirix.xquery.json.BasicJsonDBStore;
import org.sirix.xquery.json.JsonDBObject;
import junit.framework.TestCase;
public final class SimpleQueryIntegrationTest extends TestCase {
private static final String mSimpleJson = "{\"sirix\":{\"revisionNumber\":1}}";
private static final String mJson =
"{\"sirix\":[{\"revisionNumber\":1,\"revision\":{\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}},{\"revisionNumber\":2,\"revision\":{\"tadaaa\":\"todooo\",\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}}]}";
private static final String mExpectedJson =
"[{\"revisionNumber\":1,\"revision\":{\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}},{\"revisionNumber\":2,\"revision\":{\"tadaaa\":\"todooo\",\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}}]";
private static final String mExpectedAllTimesTimeTravelQueryResult =
"{\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\"},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}";
private static final String mExpectedLastTimeTravelQueryResult =
"{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\"},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}";
private static final String mExpectedNextTimeTravelQueryResult =
"{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}";
private static final String mExpectedFutureTimeTravelQueryResult =
"{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\"},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}";
private static final String mExpectedPastTimeTravelQueryResult =
"{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}{\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}";
private static final String mExpectedPastOrSelfTimeTravelQueryResult =
"{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\"},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}{\"foo\":[{\"foo\":\"bar\"},\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}{\"foo\":[\"bar\",null,2.33],\"bar\":{\"hello\":\"world\",\"helloo\":true},\"baz\":\"hello\",\"tada\":[{\"foo\":\"bar\"},{\"baz\":false},\"boo\",{},[]]}";
private Path sirixPath = PATHS.PATH1.getFile();
@Override
protected void setUp() throws Exception {
JsonTestHelper.deleteEverything();
}
@Override
protected void tearDown() {
JsonTestHelper.closeEverything();
}
@Test
public void testSimple() {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
final var storeQuery = "jn:store('mycol.jn','mydoc.jn','[\"bla\", \"blubb\"]')";
new XQuery(chain, storeQuery).evaluate(ctx);
final var openQuery = "jn:doc('mycol.jn','mydoc.jn')[[0]]";
final var seq = new XQuery(chain, openQuery).evaluate(ctx);
assertNotNull(seq);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.serialize(seq);
}
assertEquals("bla", buf.toString());
}
}
@Test
public void testSimpleSecond() {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
final var storeQuery = "jn:store('mycol.jn','mydoc.jn','" + mJson + "')";
new XQuery(chain, storeQuery).evaluate(ctx);
final var openQuery = "jn:doc('mycol.jn','mydoc.jn')=>sirix";
final var seq = new XQuery(chain, openQuery).evaluate(ctx);
assertNotNull(seq);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.serialize(seq);
}
assertEquals(mExpectedJson, buf.toString());
}
}
@Test
public void testSimpleDeref() {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
final var storeQuery = "jn:store('mycol.jn','mydoc.jn','" + mSimpleJson + "')";
new XQuery(chain, storeQuery).evaluate(ctx);
final var openQuery = "jn:doc('mycol.jn','mydoc.jn')=>sirix=>revisionNumber";
final var seq = new XQuery(chain, openQuery).evaluate(ctx);
assertNotNull(seq);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.serialize(seq);
}
assertEquals("1", buf.toString());
}
}
@Test
public void testComplexSecond() {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
final var storeQuery = "jn:store('mycol.jn','mydoc.jn','" + mJson + "')";
new XQuery(chain, storeQuery).evaluate(ctx);
final var openQuery = "jn:doc('mycol.jn','mydoc.jn')=>sirix[[0]]=>revisionNumber";
final var seq = new XQuery(chain, openQuery).evaluate(ctx);
assertNotNull(seq);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.serialize(seq);
}
assertEquals("1", buf.toString());
}
}
@Test
public void testArrays() {
// Initialize query context and store.
try (final BasicJsonDBStore store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final SirixQueryContext ctx = SirixQueryContext.createWithJsonStore(store);
final SirixCompileChain chain = SirixCompileChain.createWithJsonStore(store)) {
final String storeQuery = "jn:store('mycol.jn','mydoc.jn','[\"foo\",[[\"bar\"]]]')";
new XQuery(chain, storeQuery).evaluate(ctx);
final String openQuery = "jn:doc('mycol.jn','mydoc.jn')[[1]][[0]][[0]]";
final Sequence seq = new XQuery(chain, openQuery).evaluate(ctx);
assertNotNull(seq);
final PrintStream buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.serialize(seq);
}
assertEquals("bar", buf.toString());
// // Use XQuery to load a JSON database/resource.
// System.out.println("Opening document again:");
// final String query = "jn:doc('mycol.jn','mydoc.jn')[[0:1]]";
// System.out.println(openQuery);
// final Sequence seqSlice = new XQuery(chain, query).evaluate(ctx);
//
// assertNotNull(seqSlice);
//
// final PrintStream bufSlice = IOUtils.createBuffer();
// new StringSerializer(bufSlice).serialize(seq);
// assertEquals("[\"foo\",[[\"bar\"]]]", buf.toString());
}
}
@Test
public void testTimeTravelAllTimes() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:all-times(jn:doc('mycol.jn','mydoc.jn'))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedAllTimesTimeTravelQueryResult, buf.toString());
}
}
@Test
public void testTimeTravelFirst() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:first(jn:doc('mycol.jn','mydoc.jn'))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(JsonDocumentCreator.JSON, buf.toString());
}
}
@Test
public void testTimeTravelLast() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:last(jn:doc('mycol.jn','mydoc.jn'))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedLastTimeTravelQueryResult, buf.toString());
}
}
@Test
public void testTimeTravelNext() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:next(jn:doc('mycol.jn','mydoc.jn',1))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedNextTimeTravelQueryResult, buf.toString());
}
}
@Test
public void testTimeTravelPrevious() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:previous(jn:doc('mycol.jn','mydoc.jn',2))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(JsonDocumentCreator.JSON, buf.toString());
}
}
@Test
public void testTimeTravelFuture() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:future(jn:doc('mycol.jn','mydoc.jn',1))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedFutureTimeTravelQueryResult, buf.toString());
}
}
@Test
public void testTimeTravelFutureOrSelf() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:future(jn:doc('mycol.jn','mydoc.jn',1),true())";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedAllTimesTimeTravelQueryResult, buf.toString());
}
}
@Test
public void testTimeTravelPast() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:past(jn:doc('mycol.jn','mydoc.jn',3))";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedPastTimeTravelQueryResult, buf.toString());
}
}
@Test
public void testTimeTravelPastOrSelf() throws IOException {
try (final var store = BasicJsonDBStore.newBuilder().location(sirixPath).build();
final var ctx = SirixQueryContext.createWithJsonStore(store);
final var chain = SirixCompileChain.createWithJsonStore(store)) {
setupRevisions(ctx, chain);
final var allTimesQuery = "jn:past(jn:doc('mycol.jn','mydoc.jn',3),true())";
final var allTimesSeq = new XQuery(chain, allTimesQuery).execute(ctx);
final var buf = IOUtils.createBuffer();
try (final var serializer = new StringSerializer(buf)) {
serializer.setFormat(true).serialize(allTimesSeq);
}
assertEquals(mExpectedPastOrSelfTimeTravelQueryResult, buf.toString());
}
}
private void setupRevisions(final SirixQueryContext ctx, final SirixCompileChain chain) throws IOException {
final var storeQuery = "jn:store('mycol.jn','mydoc.jn','" + JsonDocumentCreator.JSON + "')";
new XQuery(chain, storeQuery).evaluate(ctx);
final var openDocQuery = "jn:doc('mycol.jn','mydoc.jn')";
final var object = (JsonDBObject) new XQuery(chain, openDocQuery).evaluate(ctx);
try (final var wtx = object.getTrx().getResourceManager().beginNodeTrx()) {
wtx.moveTo(3);
try (final var reader = JsonShredder.createStringReader("{\"foo\":\"bar\"}")) {
wtx.insertSubtreeAsFirstChild(reader);
wtx.moveTo(11);
wtx.remove();
wtx.commit();
}
assert wtx.getRevisionNumber() == 4;
}
}
// @Test
// public void testVersionedTwitter1() throws IOException, InterruptedException {
// try (final var store = BasicJsonDBStore.newBuilder().build();
// final var ctx = SirixQueryContext.createWithJsonStore(store);
// final var chain = SirixCompileChain.createWithJsonStore(store)) {
// final var twitterFilePath = JSON.resolve("twitter.json").toString();
// final var storeQuery = "jn:load('mycol.jn','mydoc.jn','" + twitterFilePath + "')=>statuses";
// final var sequence = (JsonDBArray) new XQuery(chain, storeQuery).execute(ctx);
//
// TimeUnit.SECONDS.sleep(5);
//
// final var rtx = sequence.getTrx();
// final Instant now;
//
// try (final var wtx = rtx.getResourceManager().beginNodeTrx()) {
// wtx.moveTo(rtx.getNodeKey());
// wtx.moveToFirstChild();
// while (wtx.hasRightSibling())
// wtx.moveToRightSibling();
// wtx.insertSubtreeAsRightSibling(JsonShredder.createFileReader(JSON.resolve("twitterTweet1.json")));
// TimeUnit.SECONDS.sleep(5);
// wtx.insertSubtreeAsRightSibling(JsonShredder.createFileReader(JSON.resolve("twitterTweet2.json")));
// TimeUnit.SECONDS.sleep(2);
// now = Instant.now().truncatedTo(ChronoUnit.SECONDS);
// TimeUnit.SECONDS.sleep(5);
//
// while (wtx.hasLeftSibling())
// wtx.moveToLeftSibling();
//
// wtx.remove();
// wtx.commit();
// }
//
// final var retrieveQuery =
// Files.readString(JSON.resolve("query.xq")).replace("$$$$",
// DateTimeFormatter.ISO_INSTANT.format(now));
//
// final var buf = IOUtils.createBuffer();
// final var serializer = new StringSerializer(buf);
// new XQuery(chain, retrieveQuery).serialize(ctx, serializer);
//
// assertEquals("2018-02-25T19:31:07 2018-02-26T06:42:50 2018-08-16T21:10:50:557000",
// buf.toString());
// }
// }
//
// @Test
// public void testVersionedTwitter2() throws IOException, InterruptedException {
// try (final var store = BasicJsonDBStore.newBuilder().build();
// final var ctx = SirixQueryContext.createWithJsonStore(store);
// final var chain = SirixCompileChain.createWithJsonStore(store)) {
// final var twitterFilePath = JSON.resolve("twitter.json").toString();
// final var storeQuery = "jn:load('mycol.jn','mydoc.jn','" + twitterFilePath + "')=>statuses";
// final var sequence = (JsonDBArray) new XQuery(chain, storeQuery).execute(ctx);
//
// TimeUnit.SECONDS.sleep(5);
//
// final var rtx = sequence.getTrx();
// final Instant now;
//
// try (final var wtx = rtx.getResourceManager().beginNodeTrx()) {
// wtx.moveTo(rtx.getNodeKey());
// wtx.moveToFirstChild();
// while (wtx.hasRightSibling())
// wtx.moveToRightSibling();
// wtx.insertSubtreeAsRightSibling(JsonShredder.createFileReader(JSON.resolve("twitterTweet1.json")));
// TimeUnit.SECONDS.sleep(5);
// wtx.insertSubtreeAsRightSibling(JsonShredder.createFileReader(JSON.resolve("twitterTweet2.json")));
// TimeUnit.SECONDS.sleep(2);
// now = Instant.now().truncatedTo(ChronoUnit.SECONDS);
// TimeUnit.SECONDS.sleep(5);
//
// while (wtx.hasLeftSibling())
// wtx.moveToLeftSibling();
//
// wtx.remove();
// wtx.commit();
// }
//
// final var retrieveQuery = Files.readString(JSON.resolve("temporal-query.xq"))
// .replace("$$$$", DateTimeFormatter.ISO_INSTANT.format(now));
//
// final var buf = IOUtils.createBuffer();
// final var serializer = new StringSerializer(buf);
// new XQuery(chain, retrieveQuery).serialize(ctx, serializer);
//
// System.out.println(buf.toString());
// // assertEquals("2018-02-25T19:31:07 2018-02-26T06:42:50 2018-08-16T21:10:50:557000",
// // buf.toString());
// }
// }
//
// @Test
// public void testTwitter() throws IOException {
// try (final var store = BasicJsonDBStore.newBuilder().build();
// final var ctx = SirixQueryContext.createWithJsonStore(store);
// final var chain = SirixCompileChain.createWithJsonStore(store)) {
// final var twitterFilePath = JSON.resolve("twitter.json").toString();
// final var storeQuery = "let $created := jn:load('mycol.jn','mydoc.jn','" + twitterFilePath
// + "')=>statuses[[0]]=>created_at return xs:dateTime($created)";
//
// final var buf = IOUtils.createBuffer();
// final var serializer = new StringSerializer(buf);
// new XQuery(chain, storeQuery).serialize(ctx, serializer);
//
// assertEquals("2018-08-16T21:10:50:557000", buf.toString());
// }
// }
}
|
Fix test cases
|
bundles/sirix-xquery/src/test/java/org/sirix/xquery/function/jn/io/SimpleQueryIntegrationTest.java
|
Fix test cases
|
|
Java
|
bsd-3-clause
|
6dec052b960d44af97f5d648f2730afbbd3a674a
| 0
|
NCIP/caaers,CBIIT/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers,NCIP/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers
|
package gov.nih.nci.cabig.caaers.dao;
import gov.nih.nci.cabig.caaers.dao.query.AbstractQuery;
import gov.nih.nci.cabig.caaers.domain.DateValue;
import gov.nih.nci.cabig.caaers.domain.Identifier;
import gov.nih.nci.cabig.caaers.domain.Study;
import gov.nih.nci.cabig.caaers.domain.ReportFormatType;
import gov.nih.nci.cabig.caaers.domain.StudyAgent;
import gov.nih.nci.cabig.caaers.domain.StudyOrganization;
import gov.nih.nci.cabig.caaers.domain.StudyTherapyType;
import gov.nih.nci.cabig.caaers.service.DomainObjectImportOutcome;
import gov.nih.nci.cabig.ctms.dao.MutableDomainObjectDao;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.hibernate.Criteria;
import org.hibernate.HibernateException;
import org.hibernate.Session;
import org.hibernate.criterion.Example;
import org.hibernate.criterion.MatchMode;
import org.hibernate.criterion.Restrictions;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.HibernateTemplate;
import org.springframework.transaction.annotation.Transactional;
/**
* @author Sujith Vellat Thayyilthodi
* @author Rhett Sutphin
* @author Priyatam
* @author <a href="mailto:biju.joseph@semanticbits.com">Biju Joseph</a>
*/
@Transactional(readOnly = true)
public class StudyDao extends GridIdentifiableDao<Study> implements MutableDomainObjectDao<Study> {
private static final List<String> SUBSTRING_MATCH_PROPERTIES = Arrays.asList("shortTitle", "longTitle");
private static final List<String> EXACT_MATCH_PROPERTIES = Collections.emptyList();
private static final List<String> EXACT_MATCH_UNIQUE_PROPERTIES = Arrays.asList("longTitle");
private static final List<String> EMPTY_PROPERTIES = Collections.emptyList();
private static final List<String> EXACT_MATCH_TITLE_PROPERTIES = Arrays.asList("shortTitle");
private static final String JOINS = "join o.identifiers as identifier "
+ "join o.studyOrganizations as ss join ss.studyParticipantAssignments as spa join spa.participant as p join p.identifiers as pIdentifier";
private static final String QUERY_BY_SHORT_TITLE = "select s from " + Study.class.getName()
+ " s where shortTitle = :st";
@Override
public Class<Study> domainClass() {
return Study.class;
}
@SuppressWarnings("unchecked")
public List<Study> getAllStudies() {
return getHibernateTemplate().find("from Study");
}
/**
* //TODO - Refactor this code with Hibernate Detached objects !!!
*
* This is a hack to load all collection objects in memory. Useful for editing a Study when you know you will be needing all collections
* To avoid Lazy loading Exception by Hibernate, a call to .size() is done for each collection
* @param id
* @return Fully loaded Study
*/
public Study getStudyDesignById(final int id) {
Study study = (Study) getHibernateTemplate().get(domainClass(), id);
initialize(study);
// now select the therapies types
if (study.getStudyTherapy(StudyTherapyType.DRUG_ADMINISTRATION) != null) {
study.setDrugAdministrationTherapyType(Boolean.TRUE);
}
if (study.getStudyTherapy(StudyTherapyType.DEVICE) != null) {
study.setDeviceTherapyType(Boolean.TRUE);
}
if (study.getStudyTherapy(StudyTherapyType.RADIATION) != null) {
study.setRadiationTherapyType(Boolean.TRUE);
}
if (study.getStudyTherapy(StudyTherapyType.SURGERY) != null) {
study.setSurgeryTherapyType(Boolean.TRUE);
}
if (study.getStudyTherapy(StudyTherapyType.BEHAVIORAL) != null) {
study.setBehavioralTherapyType(Boolean.TRUE);
}
//select report formats
if (study.getReportFormat(ReportFormatType.ADEERSPDF) != null) {
study.setAdeersPDFType(Boolean.TRUE);
}
if (study.getReportFormat(ReportFormatType.CAAERSXML) != null) {
study.setCaaersXMLType(Boolean.TRUE);
}
if (study.getReportFormat(ReportFormatType.CIOMSFORM) != null) {
study.setCiomsPDFType(Boolean.TRUE);
}
if (study.getReportFormat(ReportFormatType.CIOMSSAEFORM) != null) {
study.setCiomsSaePDFType(Boolean.TRUE);
}
if (study.getReportFormat(ReportFormatType.DCPSAEFORM) != null) {
study.setDcpSAEPDFType(Boolean.TRUE);
}
if (study.getReportFormat(ReportFormatType.MEDWATCHPDF) != null) {
study.setMedwatchPDFType(Boolean.TRUE);
}
return study;
}
public Study initialize(final Study study) {
HibernateTemplate ht = getHibernateTemplate();
ht.initialize(study.getIdentifiers());
ht.initialize(study.getStudyOrganizations());
for (StudyOrganization studyOrg : study.getStudyOrganizations()) {
if (studyOrg == null) {
continue;
}
ht.initialize(studyOrg.getStudyInvestigatorsInternal());
ht.initialize(studyOrg.getStudyPersonnelsInternal());
}
ht.initialize(study.getMeddraStudyDiseases());
ht.initialize(study.getCtepStudyDiseases());
ht.initialize(study.getStudyAgentsInternal());
ht.initialize(study.getStudyTherapies());
ht.initialize(study.getTreatmentAssignmentsInternal());
ht.initialize(study.getReportFormats());
for (StudyAgent sa : study.getStudyAgents()) {
ht.initialize(sa.getStudyAgentINDAssociationsInternal());
}
return study;
}
@Transactional(readOnly = false)
public void save(final Study study) {
getHibernateTemplate().saveOrUpdate(study);
}
@Transactional(readOnly = false)
public void batchSave(final List<DomainObjectImportOutcome<Study>> domainObjectImportOutcome){
log.debug("Batch saving start time : " + new java.util.Date());
Session session = getHibernateTemplate().getSessionFactory().getCurrentSession();
for (DomainObjectImportOutcome<Study> outcome : domainObjectImportOutcome) {
final Study study = outcome.getImportedDomainObject();
session.merge(study);
}
}
public List<Study> getBySubnames(final String[] subnames) {
return findBySubname(subnames, null, null, SUBSTRING_MATCH_PROPERTIES, EXACT_MATCH_PROPERTIES);
}
/*
* Added extraCondition parameter, as a fix for bug 9514
*/
public List<Study> getBySubnamesJoinOnIdentifier(final String[] subnames, String extraConditions) {
String joins = " join o.identifiers as identifier ";
List<String> subStringMatchProperties = Arrays.asList("o.shortTitle", "o.longTitle", "identifier.type",
"identifier.value");
return findBySubname(subnames, extraConditions, null, subStringMatchProperties, EXACT_MATCH_PROPERTIES, joins);
}
/*
* public List<Study> getByCriteria(final String[] subnames, final List<String> subStringMatchProperties) { return
* findBySubname(subnames, null, null, subStringMatchProperties, null, JOINS); }
*/
public List<Study> matchStudyByParticipant(final Integer participantId, final String text, final String extraCondition) {
String joins = " join o.identifiers as identifier join o.studyOrganizations as ss join ss.studyParticipantAssignments as spa join spa.participant as p ";
List<Object> params = new ArrayList<Object>();
StringBuilder queryBuf = new StringBuilder(" select distinct o from ").append(domainClass().getName()).append(
" o ").append(joins);
queryBuf.append(" where ");
queryBuf.append((extraCondition == null)? "" : extraCondition + " and " );
queryBuf.append("p.id = ?");
params.add(participantId);
queryBuf.append(" and ( ");
queryBuf.append("LOWER(").append("o.shortTitle").append(") LIKE ?");
params.add('%' + text.toLowerCase() + '%');
queryBuf.append(" or ");
queryBuf.append("LOWER(").append("identifier.value").append(") LIKE ? ");
params.add('%' + text.toLowerCase() + '%');
queryBuf.append(" or ");
queryBuf.append("LOWER(").append("identifier.type").append(") LIKE ? ");
params.add('%' + text.toLowerCase() + '%');
queryBuf.append(" or ");
queryBuf.append("LOWER(").append("o.longTitle").append(") LIKE ? ) ");
params.add('%' + text.toLowerCase() + '%');
log.debug("matchStudyByParticipant : " + queryBuf.toString());
getHibernateTemplate().setMaxResults(30);
return getHibernateTemplate().find(queryBuf.toString(), params.toArray());
}
public List<Study> searchStudy(final Map props) throws ParseException {
List<Object> params = new ArrayList<Object>();
boolean firstClause = true;
StringBuilder queryBuf = new StringBuilder(" select distinct o from ").append(domainClass().getName()).append(
" o ").append(JOINS);
if (props.get("studyIdentifier") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("identifier.value").append(") LIKE ?");
String p = (String) props.get("studyIdentifier");
params.add('%' + p.toLowerCase() + '%');
firstClause = false;
}
if (props.get("studyShortTitle") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("o.shortTitle").append(") LIKE ?");
String p = (String) props.get("studyShortTitle");
params.add('%' + p.toLowerCase() + '%');
firstClause = false;
}
if (props.get("participantIdentifier") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("pIdentifier.value").append(") LIKE ?");
String p = (String) props.get("participantIdentifier");
params.add('%' + p.toLowerCase() + '%');
firstClause = false;
}
if (props.get("participantFirstName") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("p.firstName").append(") LIKE ?");
String p = (String) props.get("participantFirstName");
params.add('%' + p.toLowerCase() + '%');
firstClause = false;
}
if (props.get("participantLastName") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("p.lastName").append(") LIKE ?");
String p = (String) props.get("participantLastName");
params.add('%' + p.toLowerCase() + '%');
firstClause = false;
}
if (props.get("participantEthnicity") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("p.ethnicity").append(") LIKE ?");
String p = (String) props.get("participantEthnicity");
params.add(p.toLowerCase());
firstClause = false;
}
if (props.get("participantGender") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("p.gender").append(") LIKE ?");
String p = (String) props.get("participantGender");
params.add(p.toLowerCase());
firstClause = false;
}
if (props.get("participantDateOfBirth") != null) {
queryBuf.append(firstClause ? " where " : " and ");
String p = (String) props.get("participantDateOfBirth");
DateValue dob = stringToDateValue(p);
queryBuf.append(" p.dateOfBirth.year").append(" = ? ");
params.add(dob.getYear());
if(dob.getMonth() > 0){
queryBuf.append(" and p.dateOfBirth.month").append(" = ? ");
params.add(dob.getMonth());
}
if(dob.getDay() > 0){
queryBuf.append(" and p.dateOfBirth.day").append(" = ? ");
params.add(dob.getDay());
}
firstClause = false;
}
log.debug("::: " + queryBuf.toString());
getHibernateTemplate().setMaxResults(CaaersDao.DEFAULT_MAX_RESULTS_SIZE);
return getHibernateTemplate().find(queryBuf.toString(), params.toArray());
}
/**
* @param subnames a set of substrings to match
* @return a list of participants such that each entry in <code>subnames</code> is a case-insensitive substring match of the
* participant's name or other identifier
*/
@SuppressWarnings("unchecked")
public List<Study> getByUniqueIdentifiers(final String[] subnames) {
return findBySubname(subnames, null, null, EMPTY_PROPERTIES, EXACT_MATCH_UNIQUE_PROPERTIES);
}
public Study getByIdentifier(final Identifier identifier) {
return findByIdentifier(identifier);
}
/**
* This will do an exact match on the <code>shortTitle</code>, and will return the first available Study. Note:- Biz rule should be
* made that short title is unique.
*/
public Study getByShortTitle(final String shortTitle) {
List<Study> studies = findBySubname(new String[] { shortTitle },null, null,null, EXACT_MATCH_TITLE_PROPERTIES);
if (studies != null && studies.size() > 0) {
return studies.get(0);
}
return null;
}
@Override
// TODO - Need to refactor the below into CaaersDao along with identifiers
public List<Study> searchByExample(final Study study, final boolean isWildCard) {
Example example = Example.create(study).excludeZeroes().ignoreCase();
Criteria studyCriteria = getSession().createCriteria(Study.class);
studyCriteria.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY);
if (isWildCard) {
example.excludeProperty("multiInstitutionIndicator").enableLike(MatchMode.ANYWHERE);
studyCriteria.add(example);
if (study.getIdentifiers().size() > 0) {
studyCriteria.createCriteria("identifiers").add(
Restrictions.ilike("value", study.getIdentifiers().get(0).getValue() + "%"));
}
if (study.getStudyOrganizations().size() > 0) {
studyCriteria.createCriteria("studyOrganizations").add(
Restrictions.eq("organization.id", study.getStudyOrganizations().get(0).getOrganization()
.getId()));
}
return studyCriteria.list();
}
return studyCriteria.add(example).list();
}
@SuppressWarnings( { "unchecked" })
public List<Study> find(final AbstractQuery query) {
String queryString = query.getQueryString();
log.debug("::: " + queryString.toString());
return (List<Study>) getHibernateTemplate().execute(new HibernateCallback() {
public Object doInHibernate(final Session session) throws HibernateException, SQLException {
org.hibernate.Query hiberanteQuery = session.createQuery(query.getQueryString());
Map<String, Object> queryParameterMap = query.getParameterMap();
for (String key : queryParameterMap.keySet()) {
Object value = queryParameterMap.get(key);
hiberanteQuery.setParameter(key, value);
}
return hiberanteQuery.list();
}
});
}
@Transactional(readOnly=false)
public void delete(Study study){
getHibernateTemplate().delete(study);
}
}
|
projects/core/src/main/java/gov/nih/nci/cabig/caaers/dao/StudyDao.java
|
package gov.nih.nci.cabig.caaers.dao;
import gov.nih.nci.cabig.caaers.dao.query.AbstractQuery;
import gov.nih.nci.cabig.caaers.domain.DateValue;
import gov.nih.nci.cabig.caaers.domain.Identifier;
import gov.nih.nci.cabig.caaers.domain.Study;
import gov.nih.nci.cabig.caaers.domain.ReportFormatType;
import gov.nih.nci.cabig.caaers.domain.StudyAgent;
import gov.nih.nci.cabig.caaers.domain.StudyOrganization;
import gov.nih.nci.cabig.caaers.domain.StudyTherapyType;
import gov.nih.nci.cabig.caaers.service.DomainObjectImportOutcome;
import gov.nih.nci.cabig.ctms.dao.MutableDomainObjectDao;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.hibernate.Criteria;
import org.hibernate.HibernateException;
import org.hibernate.Session;
import org.hibernate.criterion.Example;
import org.hibernate.criterion.MatchMode;
import org.hibernate.criterion.Restrictions;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.HibernateTemplate;
import org.springframework.transaction.annotation.Transactional;
/**
* @author Sujith Vellat Thayyilthodi
* @author Rhett Sutphin
* @author Priyatam
* @author <a href="mailto:biju.joseph@semanticbits.com">Biju Joseph</a>
*/
@Transactional(readOnly = true)
public class StudyDao extends GridIdentifiableDao<Study> implements MutableDomainObjectDao<Study> {
private static final List<String> SUBSTRING_MATCH_PROPERTIES = Arrays.asList("shortTitle", "longTitle");
private static final List<String> EXACT_MATCH_PROPERTIES = Collections.emptyList();
private static final List<String> EXACT_MATCH_UNIQUE_PROPERTIES = Arrays.asList("longTitle");
private static final List<String> EMPTY_PROPERTIES = Collections.emptyList();
private static final List<String> EXACT_MATCH_TITLE_PROPERTIES = Arrays.asList("shortTitle");
private static final String JOINS = "join o.identifiers as identifier "
+ "join o.studyOrganizations as ss join ss.studyParticipantAssignments as spa join spa.participant as p join p.identifiers as pIdentifier";
private static final String QUERY_BY_SHORT_TITLE = "select s from " + Study.class.getName()
+ " s where shortTitle = :st";
@Override
public Class<Study> domainClass() {
return Study.class;
}
@SuppressWarnings("unchecked")
public List<Study> getAllStudies() {
return getHibernateTemplate().find("from Study");
}
/**
* //TODO - Refactor this code with Hibernate Detached objects !!!
*
* This is a hack to load all collection objects in memory. Useful for editing a Study when you know you will be needing all collections
* To avoid Lazy loading Exception by Hibernate, a call to .size() is done for each collection
* @param id
* @return Fully loaded Study
*/
public Study getStudyDesignById(final int id) {
Study study = (Study) getHibernateTemplate().get(domainClass(), id);
initialize(study);
// now select the therapies types
if (study.getStudyTherapy(StudyTherapyType.DRUG_ADMINISTRATION) != null) {
study.setDrugAdministrationTherapyType(Boolean.TRUE);
}
if (study.getStudyTherapy(StudyTherapyType.DEVICE) != null) {
study.setDeviceTherapyType(Boolean.TRUE);
}
if (study.getStudyTherapy(StudyTherapyType.RADIATION) != null) {
study.setRadiationTherapyType(Boolean.TRUE);
}
if (study.getStudyTherapy(StudyTherapyType.SURGERY) != null) {
study.setSurgeryTherapyType(Boolean.TRUE);
}
if (study.getStudyTherapy(StudyTherapyType.BEHAVIORAL) != null) {
study.setBehavioralTherapyType(Boolean.TRUE);
}
//select report formats
if (study.getReportFormat(ReportFormatType.ADEERSPDF) != null) {
study.setAdeersPDFType(Boolean.TRUE);
}
if (study.getReportFormat(ReportFormatType.CAAERSXML) != null) {
study.setCaaersXMLType(Boolean.TRUE);
}
if (study.getReportFormat(ReportFormatType.CIOMSFORM) != null) {
study.setCiomsPDFType(Boolean.TRUE);
}
if (study.getReportFormat(ReportFormatType.CIOMSSAEFORM) != null) {
study.setCiomsSaePDFType(Boolean.TRUE);
}
if (study.getReportFormat(ReportFormatType.DCPSAEFORM) != null) {
study.setDcpSAEPDFType(Boolean.TRUE);
}
if (study.getReportFormat(ReportFormatType.MEDWATCHPDF) != null) {
study.setMedwatchPDFType(Boolean.TRUE);
}
return study;
}
public Study initialize(final Study study) {
HibernateTemplate ht = getHibernateTemplate();
ht.initialize(study.getIdentifiers());
ht.initialize(study.getStudyOrganizations());
for (StudyOrganization studyOrg : study.getStudyOrganizations()) {
if (studyOrg == null) {
continue;
}
ht.initialize(studyOrg.getStudyInvestigatorsInternal());
ht.initialize(studyOrg.getStudyPersonnelsInternal());
}
ht.initialize(study.getMeddraStudyDiseases());
ht.initialize(study.getCtepStudyDiseases());
ht.initialize(study.getStudyAgentsInternal());
ht.initialize(study.getStudyTherapies());
ht.initialize(study.getTreatmentAssignmentsInternal());
ht.initialize(study.getReportFormats());
for (StudyAgent sa : study.getStudyAgents()) {
ht.initialize(sa.getStudyAgentINDAssociationsInternal());
}
return study;
}
@Transactional(readOnly = false)
public void save(final Study study) {
getHibernateTemplate().saveOrUpdate(study);
}
@Transactional(readOnly = false)
public void batchSave(final List<DomainObjectImportOutcome<Study>> domainObjectImportOutcome){
log.debug("Batch saving start time : " + new java.util.Date());
Session session = getHibernateTemplate().getSessionFactory().getCurrentSession();
for (DomainObjectImportOutcome<Study> outcome : domainObjectImportOutcome) {
final Study study = outcome.getImportedDomainObject();
session.merge(study);
}
}
public List<Study> getBySubnames(final String[] subnames) {
return findBySubname(subnames, null, null, SUBSTRING_MATCH_PROPERTIES, EXACT_MATCH_PROPERTIES);
}
/*
* Added extraCondition parameter, as a fix for bug 9514
*/
public List<Study> getBySubnamesJoinOnIdentifier(final String[] subnames, String extraConditions) {
String joins = " join o.identifiers as identifier ";
List<String> subStringMatchProperties = Arrays.asList("o.shortTitle", "o.longTitle", "identifier.type",
"identifier.value");
return findBySubname(subnames, extraConditions, null, subStringMatchProperties, EXACT_MATCH_PROPERTIES, joins);
}
/*
* public List<Study> getByCriteria(final String[] subnames, final List<String> subStringMatchProperties) { return
* findBySubname(subnames, null, null, subStringMatchProperties, null, JOINS); }
*/
public List<Study> matchStudyByParticipant(final Integer participantId, final String text, final String extraCondition) {
String joins = " join o.identifiers as identifier join o.studyOrganizations as ss join ss.studyParticipantAssignments as spa join spa.participant as p ";
List<Object> params = new ArrayList<Object>();
StringBuilder queryBuf = new StringBuilder(" select distinct o from ").append(domainClass().getName()).append(
" o ").append(joins);
queryBuf.append(" where ");
queryBuf.append((extraCondition == null)? "" : extraCondition + " and " );
queryBuf.append("p.id = ?");
params.add(participantId);
queryBuf.append(" and ( ");
queryBuf.append("LOWER(").append("o.shortTitle").append(") LIKE ?");
params.add('%' + text.toLowerCase() + '%');
queryBuf.append(" or ");
queryBuf.append("LOWER(").append("identifier.value").append(") LIKE ? ");
params.add('%' + text.toLowerCase() + '%');
queryBuf.append(" or ");
queryBuf.append("LOWER(").append("identifier.type").append(") LIKE ? ");
params.add('%' + text.toLowerCase() + '%');
queryBuf.append(" or ");
queryBuf.append("LOWER(").append("o.longTitle").append(") LIKE ? ) ");
params.add('%' + text.toLowerCase() + '%');
log.debug("matchStudyByParticipant : " + queryBuf.toString());
getHibernateTemplate().setMaxResults(30);
return getHibernateTemplate().find(queryBuf.toString(), params.toArray());
}
public List<Study> searchStudy(final Map props) throws ParseException {
List<Object> params = new ArrayList<Object>();
boolean firstClause = true;
StringBuilder queryBuf = new StringBuilder(" select distinct o from ").append(domainClass().getName()).append(
" o ").append(JOINS);
if (props.get("studyIdentifier") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("identifier.value").append(") LIKE ?");
String p = (String) props.get("studyIdentifier");
params.add('%' + p.toLowerCase() + '%');
firstClause = false;
}
if (props.get("studyShortTitle") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("o.shortTitle").append(") LIKE ?");
String p = (String) props.get("studyShortTitle");
params.add('%' + p.toLowerCase() + '%');
firstClause = false;
}
if (props.get("participantIdentifier") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("pIdentifier.value").append(") LIKE ?");
String p = (String) props.get("participantIdentifier");
params.add('%' + p.toLowerCase() + '%');
firstClause = false;
}
if (props.get("participantFirstName") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("p.firstName").append(") LIKE ?");
String p = (String) props.get("participantFirstName");
params.add('%' + p.toLowerCase() + '%');
firstClause = false;
}
if (props.get("participantLastName") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("p.lastName").append(") LIKE ?");
String p = (String) props.get("participantLastName");
params.add('%' + p.toLowerCase() + '%');
firstClause = false;
}
if (props.get("participantEthnicity") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("p.ethnicity").append(") LIKE ?");
String p = (String) props.get("participantEthnicity");
params.add(p.toLowerCase());
firstClause = false;
}
if (props.get("participantGender") != null) {
queryBuf.append(firstClause ? " where " : " and ");
queryBuf.append("LOWER(").append("p.gender").append(") LIKE ?");
String p = (String) props.get("participantGender");
params.add(p.toLowerCase());
firstClause = false;
}
if (props.get("participantDateOfBirth") != null) {
queryBuf.append(firstClause ? " where " : " and ");
String p = (String) props.get("participantDateOfBirth");
DateValue dob = stringToDateValue(p);
queryBuf.append(" p.dateOfBirth.year").append(" = ? ");
params.add(dob.getYear());
if(dob.getMonth() > 0){
queryBuf.append(" and p.dateOfBirth.month").append(" = ? ");
params.add(dob.getMonth());
}
if(dob.getDay() > 0){
queryBuf.append(" and p.dateOfBirth.day").append(" = ? ");
params.add(dob.getDay());
}
firstClause = false;
}
log.debug("::: " + queryBuf.toString());
getHibernateTemplate().setMaxResults(CaaersDao.DEFAULT_MAX_RESULTS_SIZE);
return getHibernateTemplate().find(queryBuf.toString(), params.toArray());
}
/**
* @param subnames a set of substrings to match
* @return a list of participants such that each entry in <code>subnames</code> is a case-insensitive substring match of the
* participant's name or other identifier
*/
@SuppressWarnings("unchecked")
public List<Study> getByUniqueIdentifiers(final String[] subnames) {
return findBySubname(subnames, null, null, EMPTY_PROPERTIES, EXACT_MATCH_UNIQUE_PROPERTIES);
}
public Study getByIdentifier(final Identifier identifier) {
return findByIdentifier(identifier);
}
/**
* This will do an exact match on the <code>shortTitle</code>, and will return the first available Study. Note:- Biz rule should be
* made that short title is unique.
*/
public Study getByShortTitle(final String shortTitle) {
List<Study> studies = findBySubname(new String[] { shortTitle },null, null,null, EXACT_MATCH_TITLE_PROPERTIES);
if (studies != null && studies.size() > 0) {
return studies.get(0);
}
return null;
}
@Override
// TODO - Need to refactor the below into CaaersDao along with identifiers
public List<Study> searchByExample(final Study study, final boolean isWildCard) {
Example example = Example.create(study).excludeZeroes().ignoreCase();
Criteria studyCriteria = getSession().createCriteria(Study.class);
studyCriteria.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY);
if (isWildCard) {
example.excludeProperty("multiInstitutionIndicator").enableLike(MatchMode.ANYWHERE);
studyCriteria.add(example);
if (study.getIdentifiers().size() > 0) {
studyCriteria.createCriteria("identifiers").add(
Restrictions.like("value", study.getIdentifiers().get(0).getValue() + "%"));
}
if (study.getStudyOrganizations().size() > 0) {
studyCriteria.createCriteria("studyOrganizations").add(
Restrictions.eq("organization.id", study.getStudyOrganizations().get(0).getOrganization()
.getId()));
}
return studyCriteria.list();
}
return studyCriteria.add(example).list();
}
@SuppressWarnings( { "unchecked" })
public List<Study> find(final AbstractQuery query) {
String queryString = query.getQueryString();
log.debug("::: " + queryString.toString());
return (List<Study>) getHibernateTemplate().execute(new HibernateCallback() {
public Object doInHibernate(final Session session) throws HibernateException, SQLException {
org.hibernate.Query hiberanteQuery = session.createQuery(query.getQueryString());
Map<String, Object> queryParameterMap = query.getParameterMap();
for (String key : queryParameterMap.keySet()) {
Object value = queryParameterMap.get(key);
hiberanteQuery.setParameter(key, value);
}
return hiberanteQuery.list();
}
});
}
@Transactional(readOnly=false)
public void delete(Study study){
getHibernateTemplate().delete(study);
}
/* *//**
* This will remove a study from the database
* @param study
*//*
@Transactional(readOnly=false)
public void deleteInprogressStudy(final String ccIdentifier){
final Object objStudyId = fetchStudyIdByCoordinatingCenterIdentifier(ccIdentifier);
if(objStudyId == null) throw new CaaersSystemException("No study exist with Coordinating Center Identifier :" + ccIdentifier);
getHibernateTemplate().execute(new HibernateCallback(){
public Object doInHibernate(Session session)throws HibernateException, SQLException {
//delete study invs
session.createSQLQuery("delete from study_investigators where study_sites_id in (" +
" select id from study_organizations where study_id = " + objStudyId.toString() + ")").executeUpdate();
//delete study orgs
session.createSQLQuery("delete from study_organizations where study_id = " +
objStudyId.toString() ).executeUpdate();
//delete identifiers
session.createSQLQuery("delete from identifiers where stu_id = " +
objStudyId.toString() ).executeUpdate();
//delete study
session.createSQLQuery("delete from studies where id = " +
objStudyId.toString() ).executeUpdate();
return null;
}
});
}
@Transactional(readOnly=false)
public void commitInprogressStudy(final String ccIdentifier) {
final Object objStudyId = fetchStudyIdByCoordinatingCenterIdentifier(ccIdentifier);
if(objStudyId == null) throw new CaaersSystemException("No study exist with Coordinating Center Identifier :" + ccIdentifier);
getHibernateTemplate().execute(new HibernateCallback(){
public Object doInHibernate(Session session)throws HibernateException, SQLException {
//update load status
session.createSQLQuery("update studies set load_status = 1 where id = " +
objStudyId.toString() ).executeUpdate();
return null;
}
});
}
public boolean isInprogressStudyExist(String ccIdentifier){
return fetchStudyIdByCoordinatingCenterIdentifier(ccIdentifier) != null;
}
private Object fetchStudyIdByCoordinatingCenterIdentifier(final String ccIdentifier){
return getHibernateTemplate().execute(new HibernateCallback(){
public Object doInHibernate(Session session)throws HibernateException, SQLException {
return session.createSQLQuery("select s.id from studies s " +
" join identifiers i on s.id = i.stu_id " +
" where i.type = '" + OrganizationAssignedIdentifier.COORDINATING_CENTER_IDENTIFIER_TYPE + "'" +
" and i.value = '" + ccIdentifier + "' " ).uniqueResult();
}
});
}
*/
}
|
Gforge bug # 9759
SVN-Revision: 4297
|
projects/core/src/main/java/gov/nih/nci/cabig/caaers/dao/StudyDao.java
|
Gforge bug # 9759
|
|
Java
|
bsd-3-clause
|
428963914ec07f4ab4f70a20125f931e3d1b1a62
| 0
|
pecko/thindeck,pecko/thindeck,pecko/thindeck,pecko/thindeck,pecko/thindeck,pecko/thindeck
|
/**
* Copyright (c) 2014-2015, Thindeck.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met: 1) Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer. 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution. 3) Neither the name of the thindeck.com nor
* the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.thindeck.dynamo;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.jcabi.aspects.Immutable;
import com.jcabi.dynamo.Attributes;
import com.jcabi.dynamo.Item;
import com.jcabi.dynamo.QueryValve;
import com.jcabi.dynamo.Region;
import com.jcabi.urn.URN;
import com.thindeck.api.Repo;
import com.thindeck.api.Repos;
import java.io.IOException;
import java.util.Iterator;
import lombok.EqualsAndHashCode;
import lombok.ToString;
/**
* Dynamo implementation of {@link Repos}.
*
* @author Krzysztof Krason (Krzysztof.Krason@gmail.com)
* @author Yegor Bugayenko (yegor@teamed.io)
* @version $Id$
*/
@ToString
@Immutable
@EqualsAndHashCode(of = { "region", "user" })
final class DyRepos implements Repos {
/**
* Region.
*/
private final transient Region region;
/**
* URN of the owner.
*/
private final transient URN user;
/**
* Ctor.
* @param reg Region
* @param urn URN
*/
DyRepos(final Region reg, final URN urn) {
this.region = reg;
this.user = urn;
}
@Override
public Repo get(final String name) {
return new DyRepo(
this.region, this.user, name
);
}
@Override
public void add(final String name) throws IOException {
if (!name.matches("[a-z]{3,12}")) {
throw new IllegalStateException(
"invalid repository name, must be 3-12 English letters"
);
}
this.region.table(DyRepo.TBL).put(
new Attributes()
.with(DyRepo.HASH, this.user.toString())
.with(DyRepo.RANGE, name)
.with(DyRepo.ATTR_UPDATED, System.currentTimeMillis())
.with(DyRepo.ATTR_MEMO, "<memo/>")
);
}
@Override
public void delete(final String name) {
final Iterator<Item> items = this.region.table(DyRepo.TBL)
.frame()
.through(new QueryValve().withLimit(1))
.where(DyRepo.HASH, this.user.toString())
.where(DyRepo.RANGE, name)
.iterator();
items.next();
items.remove();
}
@Override
public Iterable<Repo> iterate() {
return Iterables.transform(
this.region.table(DyRepo.TBL)
.frame()
.through(new QueryValve())
.where(DyRepo.HASH, this.user.toString()),
new Function<Item, Repo>() {
@Override
public Repo apply(final Item input) {
try {
return DyRepos.this.get(input.get(DyRepo.RANGE).getS());
} catch (final IOException ex) {
throw new IllegalStateException(ex);
}
}
}
);
}
}
|
src/main/java/com/thindeck/dynamo/DyRepos.java
|
/**
* Copyright (c) 2014-2015, Thindeck.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met: 1) Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer. 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution. 3) Neither the name of the thindeck.com nor
* the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.thindeck.dynamo;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.jcabi.aspects.Immutable;
import com.jcabi.dynamo.Attributes;
import com.jcabi.dynamo.Item;
import com.jcabi.dynamo.QueryValve;
import com.jcabi.dynamo.Region;
import com.jcabi.urn.URN;
import com.thindeck.api.Repo;
import com.thindeck.api.Repos;
import java.io.IOException;
import java.util.Iterator;
import lombok.EqualsAndHashCode;
import lombok.ToString;
/**
* Dynamo implementation of {@link Repos}.
*
* @author Krzysztof Krason (Krzysztof.Krason@gmail.com)
* @author Yegor Bugayenko (yegor@teamed.io)
* @version $Id$
*/
@ToString
@Immutable
@EqualsAndHashCode(of = { "region", "user" })
final class DyRepos implements Repos {
/**
* Region.
*/
private final transient Region region;
/**
* URN of the owner.
*/
private final transient URN user;
/**
* Ctor.
* @param reg Region
* @param urn URN
*/
DyRepos(final Region reg, final URN urn) {
this.region = reg;
this.user = urn;
}
@Override
public Repo get(final String name) {
return new DyRepo(
this.region, this.user, name
);
}
@Override
public void add(final String name) throws IOException {
if (!name.matches("[a-z]{3,32}")) {
throw new IllegalStateException(
"invalid repository name, must be 3-32 English letters"
);
}
this.region.table(DyRepo.TBL).put(
new Attributes()
.with(DyRepo.HASH, this.user.toString())
.with(DyRepo.RANGE, name)
.with(DyRepo.ATTR_UPDATED, System.currentTimeMillis())
.with(DyRepo.ATTR_MEMO, "<memo/>")
);
}
@Override
public void delete(final String name) {
final Iterator<Item> items = this.region.table(DyRepo.TBL)
.frame()
.through(new QueryValve().withLimit(1))
.where(DyRepo.HASH, this.user.toString())
.where(DyRepo.RANGE, name)
.iterator();
items.next();
items.remove();
}
@Override
public Iterable<Repo> iterate() {
return Iterables.transform(
this.region.table(DyRepo.TBL)
.frame()
.through(new QueryValve())
.where(DyRepo.HASH, this.user.toString()),
new Function<Item, Repo>() {
@Override
public Repo apply(final Item input) {
try {
return DyRepos.this.get(input.get(DyRepo.RANGE).getS());
} catch (final IOException ex) {
throw new IllegalStateException(ex);
}
}
}
);
}
}
|
#541 3-12 letters per repo name
|
src/main/java/com/thindeck/dynamo/DyRepos.java
|
#541 3-12 letters per repo name
|
|
Java
|
mit
|
a0285136268bd02f7f3361f86d594325955c8420
| 0
|
KiiPlatform/thing-if-AndroidSDK,KiiPlatform/thing-if-AndroidSDK
|
package com.kii.iotcloud;
import android.support.test.runner.AndroidJUnit4;
import com.google.gson.JsonObject;
import com.kii.iotcloud.exception.BadRequestException;
import com.kii.iotcloud.exception.NotFoundException;
import com.kii.iotcloud.exception.UnauthorizedException;
import com.squareup.okhttp.mockwebserver.RecordedRequest;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
/**
* http://docs.kii.com/rest/#notification_management-manage_the_user_thing_device_installation-register_a_new_device
* http://docs.kii.com/rest/#notification_management-manage_the_user_thing_device_installation-delete_the_installation
*/
@RunWith(AndroidJUnit4.class)
public class IoTCloudAPI_PushTest extends IoTCloudAPITestBase {
@Test
public void installPushGCMTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
String installationID = UUID.randomUUID().toString();
this.addMockResponseForInstallPush(201, installationID);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
Assert.assertNull(api.getInstallationID());
String result = api.installPush(deviceToken, PushBackend.GCM);
Assert.assertNotNull(api.getInstallationID());
// verify the result
Assert.assertEquals(installationID, result);
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations", request.getPath());
Assert.assertEquals("POST", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
expectedRequestHeaders.put("Content-Type", "application/vnd.kii.InstallationCreationRequest+json");
this.assertRequestHeader(expectedRequestHeaders, request);
JsonObject expectedRequestBody = new JsonObject();
expectedRequestBody.addProperty("installationRegistrationID", deviceToken);
expectedRequestBody.addProperty("deviceType", PushBackend.GCM.getDeviceType());
this.assertRequestBody(expectedRequestBody, request);
}
@Test
public void installPushGCMDevelopmentTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
String installationID = UUID.randomUUID().toString();
this.addMockResponseForInstallPush(201, installationID);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
Assert.assertNull(api.getInstallationID());
String result = api.installPush(deviceToken, PushBackend.GCM,true);
Assert.assertNotNull(api.getInstallationID());
// verify the result
Assert.assertEquals(installationID, result);
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations", request.getPath());
Assert.assertEquals("POST", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
expectedRequestHeaders.put("Content-Type", "application/vnd.kii.InstallationCreationRequest+json");
this.assertRequestHeader(expectedRequestHeaders, request);
JsonObject expectedRequestBody = new JsonObject();
expectedRequestBody.addProperty("installationRegistrationID", deviceToken);
expectedRequestBody.addProperty("deviceType", PushBackend.GCM.getDeviceType());
expectedRequestBody.addProperty("development", true);
this.assertRequestBody(expectedRequestBody, request);
}
@Test
public void installPushJPushTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
String installationID = UUID.randomUUID().toString();
this.addMockResponseForInstallPush(201, installationID);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
Assert.assertNull(api.getInstallationID());
String result = api.installPush(deviceToken, PushBackend.JPUSH);
Assert.assertNotNull(api.getInstallationID());
// verify the result
Assert.assertEquals(installationID, result);
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations", request.getPath());
Assert.assertEquals("POST", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
expectedRequestHeaders.put("Content-Type", "application/vnd.kii.InstallationCreationRequest+json");
this.assertRequestHeader(expectedRequestHeaders, request);
JsonObject expectedRequestBody = new JsonObject();
expectedRequestBody.addProperty("installationRegistrationID", deviceToken);
expectedRequestBody.addProperty("deviceType", PushBackend.JPUSH.getDeviceType());
this.assertRequestBody(expectedRequestBody, request);
}
@Test
public void installPush400ErrorTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
this.addEmptyMockResponse(400);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
try {
api.installPush(deviceToken, PushBackend.GCM);
Assert.fail("IoTCloudRestException should be thrown");
} catch (BadRequestException e) {
Assert.assertEquals(400, e.getStatusCode());
}
Assert.assertNull(api.getInstallationID());
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations", request.getPath());
Assert.assertEquals("POST", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
expectedRequestHeaders.put("Content-Type", "application/vnd.kii.InstallationCreationRequest+json");
this.assertRequestHeader(expectedRequestHeaders, request);
JsonObject expectedRequestBody = new JsonObject();
expectedRequestBody.addProperty("installationRegistrationID", deviceToken);
expectedRequestBody.addProperty("deviceType", PushBackend.GCM.getDeviceType());
this.assertRequestBody(expectedRequestBody, request);
}
@Test
public void installPush401ErrorTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
this.addEmptyMockResponse(401);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
try {
api.installPush(deviceToken, PushBackend.GCM);
Assert.fail("IoTCloudRestException should be thrown");
} catch (UnauthorizedException e) {
Assert.assertEquals(401, e.getStatusCode());
}
Assert.assertNull(api.getInstallationID());
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations", request.getPath());
Assert.assertEquals("POST", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
expectedRequestHeaders.put("Content-Type", "application/vnd.kii.InstallationCreationRequest+json");
this.assertRequestHeader(expectedRequestHeaders, request);
JsonObject expectedRequestBody = new JsonObject();
expectedRequestBody.addProperty("installationRegistrationID", deviceToken);
expectedRequestBody.addProperty("deviceType", PushBackend.GCM.getDeviceType());
this.assertRequestBody(expectedRequestBody, request);
}
@Test
public void installPush404ErrorTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
this.addEmptyMockResponse(404);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
try {
api.installPush(deviceToken, PushBackend.GCM);
Assert.fail("IoTCloudRestException should be thrown");
} catch (NotFoundException e) {
}
Assert.assertNull(api.getInstallationID());
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations", request.getPath());
Assert.assertEquals("POST", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
expectedRequestHeaders.put("Content-Type", "application/vnd.kii.InstallationCreationRequest+json");
this.assertRequestHeader(expectedRequestHeaders, request);
JsonObject expectedRequestBody = new JsonObject();
expectedRequestBody.addProperty("installationRegistrationID", deviceToken);
expectedRequestBody.addProperty("deviceType", PushBackend.GCM.getDeviceType());
this.assertRequestBody(expectedRequestBody, request);
}
@Test(expected = IllegalArgumentException.class)
public void installPushWithNullPushBackendTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
api.installPush(deviceToken, null);
}
@Test
public void uninstallPushTest() throws Exception {
String installationID = UUID.randomUUID().toString();
this.addEmptyMockResponse(204);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
api.uninstallPush(installationID);
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations/" + installationID, request.getPath());
Assert.assertEquals("DELETE", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
this.assertRequestHeader(expectedRequestHeaders, request);
}
@Test
public void uninstallPush401ErrorTest() throws Exception {
String installationID = UUID.randomUUID().toString();
this.addEmptyMockResponse(401);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
try {
api.uninstallPush(installationID);
Assert.fail("IoTCloudRestException should be thrown");
} catch (UnauthorizedException e) {
}
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations/" + installationID, request.getPath());
Assert.assertEquals("DELETE", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
this.assertRequestHeader(expectedRequestHeaders, request);
}
@Test
public void uninstallPush404ErrorTest() throws Exception {
String installationID = UUID.randomUUID().toString();
this.addEmptyMockResponse(404);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
try {
api.uninstallPush(installationID);
Assert.fail("IoTCloudRestException should be thrown");
} catch (NotFoundException e) {
}
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations/" + installationID, request.getPath());
Assert.assertEquals("DELETE", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
this.assertRequestHeader(expectedRequestHeaders, request);
}
@Test(expected = IllegalArgumentException.class)
public void uninstallPushWithNullInstallationIDTest() throws Exception {
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
api.uninstallPush(null);
}
}
|
iotcloudsdk/src/androidTest/java/com/kii/iotcloud/IoTCloudAPI_PushTest.java
|
package com.kii.iotcloud;
import android.support.test.runner.AndroidJUnit4;
import com.google.gson.JsonObject;
import com.kii.iotcloud.exception.BadRequestException;
import com.kii.iotcloud.exception.NotFoundException;
import com.kii.iotcloud.exception.UnauthorizedException;
import com.squareup.okhttp.mockwebserver.RecordedRequest;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
/**
* http://docs.kii.com/rest/#notification_management-manage_the_user_thing_device_installation-register_a_new_device
* http://docs.kii.com/rest/#notification_management-manage_the_user_thing_device_installation-delete_the_installation
*/
@RunWith(AndroidJUnit4.class)
public class IoTCloudAPI_PushTest extends IoTCloudAPITestBase {
@Test
public void installPushGCMTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
String installationID = UUID.randomUUID().toString();
this.addMockResponseForInstallPush(201, installationID);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
Assert.assertNull(api.getInstallationID());
String result = api.installPush(deviceToken, PushBackend.GCM);
Assert.assertNotNull(api.getInstallationID());
// verify the result
Assert.assertEquals(installationID, result);
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations", request.getPath());
Assert.assertEquals("POST", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
expectedRequestHeaders.put("Content-Type", "application/vnd.kii.InstallationCreationRequest+json");
this.assertRequestHeader(expectedRequestHeaders, request);
JsonObject expectedRequestBody = new JsonObject();
expectedRequestBody.addProperty("installationRegistrationID", deviceToken);
expectedRequestBody.addProperty("deviceType", PushBackend.GCM.getDeviceType());
this.assertRequestBody(expectedRequestBody, request);
}
@Test
public void installPushJPushTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
String installationID = UUID.randomUUID().toString();
this.addMockResponseForInstallPush(201, installationID);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
Assert.assertNull(api.getInstallationID());
String result = api.installPush(deviceToken, PushBackend.JPUSH);
Assert.assertNotNull(api.getInstallationID());
// verify the result
Assert.assertEquals(installationID, result);
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations", request.getPath());
Assert.assertEquals("POST", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
expectedRequestHeaders.put("Content-Type", "application/vnd.kii.InstallationCreationRequest+json");
this.assertRequestHeader(expectedRequestHeaders, request);
JsonObject expectedRequestBody = new JsonObject();
expectedRequestBody.addProperty("installationRegistrationID", deviceToken);
expectedRequestBody.addProperty("deviceType", PushBackend.JPUSH.getDeviceType());
this.assertRequestBody(expectedRequestBody, request);
}
@Test
public void installPush400ErrorTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
this.addEmptyMockResponse(400);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
try {
api.installPush(deviceToken, PushBackend.GCM);
Assert.fail("IoTCloudRestException should be thrown");
} catch (BadRequestException e) {
Assert.assertEquals(400, e.getStatusCode());
}
Assert.assertNull(api.getInstallationID());
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations", request.getPath());
Assert.assertEquals("POST", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
expectedRequestHeaders.put("Content-Type", "application/vnd.kii.InstallationCreationRequest+json");
this.assertRequestHeader(expectedRequestHeaders, request);
JsonObject expectedRequestBody = new JsonObject();
expectedRequestBody.addProperty("installationRegistrationID", deviceToken);
expectedRequestBody.addProperty("deviceType", PushBackend.GCM.getDeviceType());
this.assertRequestBody(expectedRequestBody, request);
}
@Test
public void installPush401ErrorTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
this.addEmptyMockResponse(401);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
try {
api.installPush(deviceToken, PushBackend.GCM);
Assert.fail("IoTCloudRestException should be thrown");
} catch (UnauthorizedException e) {
Assert.assertEquals(401, e.getStatusCode());
}
Assert.assertNull(api.getInstallationID());
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations", request.getPath());
Assert.assertEquals("POST", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
expectedRequestHeaders.put("Content-Type", "application/vnd.kii.InstallationCreationRequest+json");
this.assertRequestHeader(expectedRequestHeaders, request);
JsonObject expectedRequestBody = new JsonObject();
expectedRequestBody.addProperty("installationRegistrationID", deviceToken);
expectedRequestBody.addProperty("deviceType", PushBackend.GCM.getDeviceType());
this.assertRequestBody(expectedRequestBody, request);
}
@Test
public void installPush404ErrorTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
this.addEmptyMockResponse(404);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
try {
api.installPush(deviceToken, PushBackend.GCM);
Assert.fail("IoTCloudRestException should be thrown");
} catch (NotFoundException e) {
}
Assert.assertNull(api.getInstallationID());
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations", request.getPath());
Assert.assertEquals("POST", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
expectedRequestHeaders.put("Content-Type", "application/vnd.kii.InstallationCreationRequest+json");
this.assertRequestHeader(expectedRequestHeaders, request);
JsonObject expectedRequestBody = new JsonObject();
expectedRequestBody.addProperty("installationRegistrationID", deviceToken);
expectedRequestBody.addProperty("deviceType", PushBackend.GCM.getDeviceType());
this.assertRequestBody(expectedRequestBody, request);
}
@Test(expected = IllegalArgumentException.class)
public void installPushWithNullPushBackendTest() throws Exception {
String deviceToken = UUID.randomUUID().toString();
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
api.installPush(deviceToken, null);
}
@Test
public void uninstallPushTest() throws Exception {
String installationID = UUID.randomUUID().toString();
this.addEmptyMockResponse(204);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
api.uninstallPush(installationID);
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations/" + installationID, request.getPath());
Assert.assertEquals("DELETE", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
this.assertRequestHeader(expectedRequestHeaders, request);
}
@Test
public void uninstallPush401ErrorTest() throws Exception {
String installationID = UUID.randomUUID().toString();
this.addEmptyMockResponse(401);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
try {
api.uninstallPush(installationID);
Assert.fail("IoTCloudRestException should be thrown");
} catch (UnauthorizedException e) {
}
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations/" + installationID, request.getPath());
Assert.assertEquals("DELETE", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
this.assertRequestHeader(expectedRequestHeaders, request);
}
@Test
public void uninstallPush404ErrorTest() throws Exception {
String installationID = UUID.randomUUID().toString();
this.addEmptyMockResponse(404);
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
try {
api.uninstallPush(installationID);
Assert.fail("IoTCloudRestException should be thrown");
} catch (NotFoundException e) {
}
// verify the request
RecordedRequest request = this.server.takeRequest(1, TimeUnit.SECONDS);
Assert.assertEquals(KII_CLOUD_BASE_PATH + "/installations/" + installationID, request.getPath());
Assert.assertEquals("DELETE", request.getMethod());
Map<String, String> expectedRequestHeaders = new HashMap<String, String>();
expectedRequestHeaders.put("X-Kii-AppID", APP_ID);
expectedRequestHeaders.put("X-Kii-AppKey", APP_KEY);
expectedRequestHeaders.put("Authorization", "Bearer " + api.getOwner().getAccessToken());
this.assertRequestHeader(expectedRequestHeaders, request);
}
@Test(expected = IllegalArgumentException.class)
public void uninstallPushWithNullInstallationIDTest() throws Exception {
IoTCloudAPI api = this.craeteIoTCloudAPIWithDemoSchema(APP_ID, APP_KEY);
api.uninstallPush(null);
}
}
|
Add test case for installPush with development flag.
|
iotcloudsdk/src/androidTest/java/com/kii/iotcloud/IoTCloudAPI_PushTest.java
|
Add test case for installPush with development flag.
|
|
Java
|
mit
|
2f8bf5054c0e39841c2c128a136af3dd9d5993c4
| 0
|
WOLFI3654/Minopoly
|
package de.wolfi.minopoly.components;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.entity.Entity;
import org.bukkit.entity.LivingEntity;
import org.bukkit.metadata.FixedMetadataValue;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import de.wolfi.minopoly.Main;
import de.wolfi.minopoly.components.fields.Field;
import de.wolfi.minopoly.utils.DisguiseManager;
import de.wolfi.minopoly.utils.FigureType;
import de.wolfi.minopoly.utils.MapFactory;
import de.wolfi.minopoly.utils.Messages;
import de.wolfi.minopoly.utils.TeleportCause;
public class Player {
private final Minopoly game;
private final org.bukkit.entity.Player hook;
private Field location;
private Bank money;
private Entity tmp;
private final FigureType type;
public Player(org.bukkit.entity.Player hook, FigureType t, Minopoly game) {
this.hook = hook;
this.type = t;
this.game = game;
}
public void addMoney(int amount) {
this.addMoney(amount, "Grundlos");
}
public void addMoney(int amount, String reason) {
this.money.addMoney(this, amount);
Messages.MONEY_GAIN.send(this.hook, String.valueOf(amount), reason);
}
@Override
public boolean equals(Object compare) {
if (!(compare instanceof Player))
return false;
return this.getFigure() == ((Player) compare).getFigure();
}
public String getDisplay() {
return this.getName() + "(" + this.getFigure().getName() + ")";
}
public FigureType getFigure() {
return this.type;
}
protected org.bukkit.entity.Player getHook() {
return this.hook;
}
public String getName() {
return this.hook.getName();
}
public void move(int amount) {
Messages.MOVE_STARTED.broadcast(this.getDisplay(),Integer.toString(amount));
Bukkit.getScheduler().runTaskAsynchronously(Main.getMain(), () -> {
for (int i = 0; i < amount; i++) {
final int currentNumber = i;
Bukkit.getScheduler().runTask(Main.getMain(), () -> {
Player.this.location = Player.this.game.getFieldManager().getNextField(Player.this.location);
if(currentNumber < amount - 1) Player.this.location.byPass(Player.this);
Player.this.spawnFigure();
});
try {
Thread.sleep(1000);
} catch (final InterruptedException e) {
e.printStackTrace();
}
}
Messages.MOVE_FINISHED.broadcast(Player.this.getDisplay());
try {
Thread.sleep(700);
} catch (InterruptedException e) {
e.printStackTrace();
}
Player.this.location.playerStand(Player.this);
});
}
public void removeMoney(int amount, String reason) {
this.money.removeMoney(this, amount);
Messages.MONEY_PAYD.send(this.hook, String.valueOf(amount), reason);
}
public void setInventory() {
this.hook.getInventory().clear();
this.hook.sendMap(MapFactory.getMap(this.game, this.hook));
}
private void spawnFigure() {
if (this.tmp != null)
this.tmp.remove();
final Entity e = this.location.getWorld().spawnEntity(this.location.getLocation(), this.type.getEntityType());
e.setCustomName(this.hook.getName());
e.setCustomNameVisible(true);
e.setMetadata("PlayerNPC", new FixedMetadataValue(Main.getMain(), this));
if (e instanceof LivingEntity)
((LivingEntity) e).addPotionEffect(new PotionEffect(PotionEffectType.SLOW, 20 * 60 * 60, 20));
this.tmp = e;
}
public void teleport(Location to, TeleportCause cause) {
this.hook.teleport(to);
if (cause == TeleportCause.MINIGAME_STARTED) {
DisguiseManager.disguise(this.hook, this.type);
if (this.tmp != null)
this.tmp.remove();
} else if (cause == TeleportCause.MINIGAME_END) {
DisguiseManager.vanish(this.hook);
this.spawnFigure();
} else if (cause == TeleportCause.MINIGAME_ACTION)
Messages.TELEPORT.send(this.hook);
}
public void transferMoneyFrom(Player player, int amount, String reason) {
this.money.addMoney(this, amount);
Messages.MONEY_TRANSFER_GAIN.send(this.hook, String.valueOf(amount), player.getDisplay(), reason);
}
public void transferMoneyTo(Player player, int amount, String reason) {
this.money.removeMoney(this, amount);
Messages.MONEY_TRANSFER_SENT.send(this.hook, String.valueOf(amount), player.getDisplay(), reason);
}
}
|
src/de/wolfi/minopoly/components/Player.java
|
package de.wolfi.minopoly.components;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.entity.Entity;
import org.bukkit.entity.LivingEntity;
import org.bukkit.metadata.FixedMetadataValue;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import de.wolfi.minopoly.Main;
import de.wolfi.minopoly.components.fields.Field;
import de.wolfi.minopoly.utils.DisguiseManager;
import de.wolfi.minopoly.utils.FigureType;
import de.wolfi.minopoly.utils.MapFactory;
import de.wolfi.minopoly.utils.Messages;
import de.wolfi.minopoly.utils.TeleportCause;
public class Player {
private final Minopoly game;
private final org.bukkit.entity.Player hook;
private Field location;
private Bank money;
private Entity tmp;
private final FigureType type;
public Player(org.bukkit.entity.Player hook, FigureType t, Minopoly game) {
this.hook = hook;
this.type = t;
this.game = game;
}
public void addMoney(int amount) {
this.addMoney(amount, "Grundlos");
}
public void addMoney(int amount, String reason) {
this.money.addMoney(this, amount);
Messages.MONEY_GAIN.send(this.hook, String.valueOf(amount), reason);
}
@Override
public boolean equals(Object compare) {
if (!(compare instanceof Player))
return false;
return this.getFigure() == ((Player) compare).getFigure();
}
private String getDisplay() {
return this.getName() + "(" + this.getFigure().getName() + ")";
}
public FigureType getFigure() {
return this.type;
}
protected org.bukkit.entity.Player getHook() {
return this.hook;
}
public String getName() {
return this.hook.getName();
}
public void move(int amount) {
Bukkit.getScheduler().runTaskAsynchronously(Main.getMain(), () -> {
for (int i = 0; i < amount; i++) {
Bukkit.getScheduler().runTask(Main.getMain(), () -> {
Player.this.location = Player.this.game.getFieldManager().getNextField(Player.this.location);
Player.this.spawnFigure();
});
try {
Thread.sleep(1000);
} catch (final InterruptedException e) {
e.printStackTrace();
}
}
});
}
public void removeMoney(int amount, String reason) {
this.money.removeMoney(this, amount);
Messages.MONEY_PAYD.send(this.hook, String.valueOf(amount), reason);
}
public void setInventory() {
this.hook.getInventory().clear();
this.hook.sendMap(MapFactory.getMap(this.game, this.hook));
}
private void spawnFigure() {
if (this.tmp != null)
this.tmp.remove();
final Entity e = this.location.getWorld().spawnEntity(this.location.getLocation(), this.type.getEntityType());
e.setCustomName(this.hook.getName());
e.setCustomNameVisible(true);
e.setMetadata("PlayerNPC", new FixedMetadataValue(Main.getMain(), this));
if (e instanceof LivingEntity)
((LivingEntity) e).addPotionEffect(new PotionEffect(PotionEffectType.SLOW, 20 * 60 * 60, 20));
this.tmp = e;
}
public void teleport(Location to, TeleportCause cause) {
this.hook.teleport(to);
if (cause == TeleportCause.MINIGAME_STARTED) {
DisguiseManager.disguise(this.hook, this.type);
if (this.tmp != null)
this.tmp.remove();
} else if (cause == TeleportCause.MINIGAME_END) {
DisguiseManager.vanish(this.hook);
this.spawnFigure();
} else if (cause == TeleportCause.MINIGAME_ACTION)
Messages.TELEPORT.send(this.hook);
}
public void transferMoneyFrom(Player player, int amount, String reason) {
this.money.addMoney(this, amount);
Messages.MONEY_TRANSFER_GAIN.send(this.hook, String.valueOf(amount), player.getDisplay(), reason);
}
public void transferMoneyTo(Player player, int amount, String reason) {
this.money.removeMoney(this, amount);
Messages.MONEY_TRANSFER_SENT.send(this.hook, String.valueOf(amount), player.getDisplay(), reason);
}
}
|
Updated move Method
|
src/de/wolfi/minopoly/components/Player.java
|
Updated move Method
|
|
Java
|
mit
|
6e7720f950ccd19fd1e775d068daefeb43a11330
| 0
|
mzmine/mzmine3,mzmine/mzmine3
|
/*
* Copyright 2006-2008 The MZmine Development Team
*
* This file is part of MZmine.
*
* MZmine is free software; you can redistribute it and/or modify it under the
* terms of the GNU General Public License as published by the Free Software
* Foundation; either version 2 of the License, or (at your option) any later
* version.
*
* MZmine is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* MZmine; if not, write to the Free Software Foundation, Inc., 51 Franklin St,
* Fifth Floor, Boston, MA 02110-1301 USA
*/
package net.sf.mzmine.modules.peakpicking.threestep.massdetection;
import net.sf.mzmine.data.ChromatographicPeak;
import net.sf.mzmine.data.DataPoint;
import net.sf.mzmine.data.MzPeak;
import net.sf.mzmine.data.PeakStatus;
import net.sf.mzmine.data.RawDataFile;
import net.sf.mzmine.util.Range;
/**
*
* This class is only used for visualization terms by MassDetectorSetupDialog.
*
*/
class FakePeak implements ChromatographicPeak {
private MzPeak mzPeak;
private int scanNumber;
/**
*
* @param scanNumber
* @param datapoint
*/
public FakePeak(int scanNumber, MzPeak mzPeak) {
this.mzPeak = mzPeak;
this.scanNumber = scanNumber;
}
public float getArea() {
return 0;
}
public RawDataFile getDataFile() {
return null;
}
public MzPeak getMzPeak(int scanNumber) {
return mzPeak;
}
public float getHeight() {
return 0;
}
public float getMZ() {
return 0;
}
public PeakStatus getPeakStatus() {
return null;
}
public float getRT() {
return 0;
}
public DataPoint[] getRawDataPoints(int scanNumber) {
return null;
}
public Range getRawDataPointsIntensityRange() {
return new Range(mzPeak.getIntensity());
}
public Range getRawDataPointsMZRange() {
return new Range(mzPeak.getMZ());
}
public Range getRawDataPointsRTRange() {
return null;
}
public int[] getScanNumbers() {
int scanNumbers[] = { scanNumber };
return scanNumbers;
}
}
|
src/net/sf/mzmine/modules/peakpicking/threestep/massdetection/FakePeak.java
|
/*
* Copyright 2006-2008 The MZmine Development Team
*
* This file is part of MZmine.
*
* MZmine is free software; you can redistribute it and/or modify it under the
* terms of the GNU General Public License as published by the Free Software
* Foundation; either version 2 of the License, or (at your option) any later
* version.
*
* MZmine is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* MZmine; if not, write to the Free Software Foundation, Inc., 51 Franklin St,
* Fifth Floor, Boston, MA 02110-1301 USA
*/
package net.sf.mzmine.modules.peakpicking.threestep.massdetection;
import net.sf.mzmine.data.ChromatographicPeak;
import net.sf.mzmine.data.DataPoint;
import net.sf.mzmine.data.MzPeak;
import net.sf.mzmine.data.PeakStatus;
import net.sf.mzmine.data.RawDataFile;
import net.sf.mzmine.util.Range;
/**
*
* This class is only used for visualization terms by MassDetectorSetupDialog.
*
*/
class FakePeak implements ChromatographicPeak {
private MzPeak mzPeak;
private int scanNumber;
/**
*
* @param scanNumber
* @param datapoint
*/
public FakePeak(int scanNumber, MzPeak mzPeak) {
this.mzPeak = mzPeak;
this.scanNumber = scanNumber;
}
public float getArea() {
return 0;
}
public RawDataFile getDataFile() {
return null;
}
public MzPeak getMzPeak(int scanNumber) {
return mzPeak;
}
public float getHeight() {
return 0;
}
public float getMZ() {
return 0;
}
public PeakStatus getPeakStatus() {
return null;
}
public float getRT() {
return 0;
}
public DataPoint[] getRawDataPoints(int scanNumber) {
return null;
}
public Range getRawDataPointsIntensityRange() {
return null;
}
public Range getRawDataPointsMZRange() {
return null;
}
public Range getRawDataPointsRTRange() {
return null;
}
public int[] getScanNumbers() {
int scanNumbers[] = { scanNumber };
return scanNumbers;
}
}
|
Minor bug in fake peak used in mass detector preview
|
src/net/sf/mzmine/modules/peakpicking/threestep/massdetection/FakePeak.java
|
Minor bug in fake peak used in mass detector preview
|
|
Java
|
epl-1.0
|
36af02c9dccf19a2fdc0e7d841d9cd8d084bb92e
| 0
|
Yakindu/statecharts,Yakindu/statecharts,Yakindu/statecharts,Yakindu/statecharts,Yakindu/statecharts,Yakindu/statecharts,Yakindu/statecharts
|
/**
* Copyright (c) 2018 committers of YAKINDU and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
* Contributors:
* committers of YAKINDU - initial API and implementation
*
*/
package org.yakindu.sct.commons;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.Path;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EClass;
public class EMFHelper {
public static List<EClass> getAllSuperClasses(EClass eClass) {
List<List<EClass>> superClasses = new ArrayList<>();
getSuperClassTree(superClasses, eClass, 0);
List<EClass> result = new ArrayList<>();
for (List<EClass> list : superClasses) {
result.addAll(list);
}
return result;
}
protected static void getSuperClassTree(List<List<EClass>> superClasses, EClass eClass, int depth) {
if (superClasses.size() <= depth) {
superClasses.add(depth, new ArrayList<>());
}
List<EClass> superTypes = eClass.getESuperTypes();
superClasses.get(depth).add(eClass);
for (EClass superType : superTypes) {
getSuperClassTree(superClasses, superType, depth + 1);
}
}
public static IFile getIFileFromEMFUri(URI uri) {
return ResourcesPlugin.getWorkspace().getRoot().getFile(new Path(uri.toPlatformString(true)));
}
}
|
plugins/org.yakindu.sct.commons/src/org/yakindu/sct/commons/EMFHelper.java
|
package org.yakindu.sct.commons;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.emf.ecore.EClass;
public class EMFHelper {
public static List<EClass> getAllSuperClasses(EClass eClass) {
List<List<EClass>> superClasses = new ArrayList<>();
getSuperClassTree(superClasses, eClass, 0);
List<EClass> result = new ArrayList<>();
for (List<EClass> list : superClasses) {
result.addAll(list);
}
return result;
}
protected static void getSuperClassTree(List<List<EClass>> superClasses, EClass eClass, int depth) {
if (superClasses.size() <= depth) {
superClasses.add(depth, new ArrayList<>());
}
List<EClass> superTypes = eClass.getESuperTypes();
superClasses.get(depth).add(eClass);
for (EClass superType : superTypes) {
getSuperClassTree(superClasses, superType, depth + 1);
}
}
}
|
Add copyright header and URI helper func
|
plugins/org.yakindu.sct.commons/src/org/yakindu/sct/commons/EMFHelper.java
|
Add copyright header and URI helper func
|
|
Java
|
epl-1.0
|
f0bd5b3f2935a1f6286c8081110b5a0467c2893b
| 0
|
jesusc/anatlyzer,jesusc/anatlyzer,jesusc/anatlyzer
|
package anatlyzer.atl.graph;
import java.util.HashSet;
import java.util.Set;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
import anatlyzer.atl.errors.AnalysisResult;
import anatlyzer.atl.errors.Problem;
import anatlyzer.atl.errors.atl_error.BindingExpectedOneAssignedMany;
import anatlyzer.atl.errors.atl_error.BindingPossiblyUnresolved;
import anatlyzer.atl.errors.atl_error.BindingWithResolvedByIncompatibleRule;
import anatlyzer.atl.errors.atl_error.BindingWithoutRule;
import anatlyzer.atl.errors.atl_error.FeatureNotFound;
import anatlyzer.atl.errors.atl_error.FlattenOverNonNestedCollection;
import anatlyzer.atl.errors.atl_error.LocalProblem;
import anatlyzer.atl.errors.atl_error.NoBindingForCompulsoryFeature;
import anatlyzer.atl.errors.atl_error.OperationNotFound;
import anatlyzer.atl.model.ATLModel;
import anatlyzer.atl.model.ErrorModel;
import anatlyzer.atl.model.TypingModel;
import anatlyzer.atlext.ATL.Binding;
import anatlyzer.atlext.ATL.ContextHelper;
import anatlyzer.atlext.ATL.Helper;
import anatlyzer.atlext.ATL.MatchedRule;
import anatlyzer.atlext.ATL.Rule;
import anatlyzer.atlext.ATL.RuleResolutionInfo;
import anatlyzer.atlext.ATL.RuleWithPattern;
import anatlyzer.atlext.ATL.SimpleOutPatternElement;
import anatlyzer.atlext.ATL.StaticHelper;
import anatlyzer.atlext.ATL.StaticRule;
import anatlyzer.atlext.OCL.IfExp;
import anatlyzer.atlext.OCL.LetExp;
import anatlyzer.atlext.OCL.LoopExp;
import anatlyzer.atlext.OCL.OclExpression;
import anatlyzer.atlext.OCL.PropertyCallExp;
import anatlyzer.atlext.OCL.VariableExp;
/**
* Given an error model, generates the "path" in terms
* of types that are needed to reach each error.
*
* @author jesus
*
*/
public class ErrorPathGenerator {
private ErrorModel errors;
private TypingModel typ;
private ATLModel atlModel;
private ProblemGraph graph;
private ProblemPath currentPath;
public ErrorPathGenerator(ATLModel atlModel) {
this.errors = atlModel.getErrors();
this.typ = atlModel.getTyping();
this.atlModel = atlModel;
}
public ProblemGraph perform() {
ProblemGraph graph = new ProblemGraph();
AnalysisResult r = this.errors.getAnalysis();
for(Problem p : r.getProblems()) {
if ( p instanceof LocalProblem ) {
ProblemPath path = generatePath((LocalProblem) p);
if ( path != null )
graph.addProblemPath(path);
else
System.err.println("ErrorPathGenerator: Ignored " + p.getClass().getSimpleName());
} else {
throw new UnsupportedOperationException();
}
}
return graph;
}
/*
public ProblemPath perform(Problem p) {
currentGraph = new ProblemPath();
generatePath((LocalProblem) p);
return currentGraph;
}
*/
public ProblemPath generatePath(LocalProblem p) {
currentPath = null;
if ( p instanceof NoBindingForCompulsoryFeature ) {
generatePath_NoBindingForCompulsoryFeature((NoBindingForCompulsoryFeature) p);
// These two are very similar
} else if ( p instanceof BindingExpectedOneAssignedMany ) {
generatePath_BindingExpectedOneAssignedMany((BindingExpectedOneAssignedMany) p);
} else if ( p instanceof BindingWithoutRule ) {
generatePath_BindingWithoutRule((BindingWithoutRule) p);
} else if ( p instanceof BindingWithResolvedByIncompatibleRule ) {
generatePath_BindingWithResolvedByIncompatibleRule((BindingWithResolvedByIncompatibleRule) p);
} else if ( p instanceof BindingPossiblyUnresolved ) {
generatePath_BindingPossiblyUnresolved((BindingPossiblyUnresolved) p);
} else if ( p instanceof FeatureNotFound ) {
generatePath_FeatureNotFound((FeatureNotFound) p);
} else if ( p instanceof OperationNotFound ) {
generatePath_OperationNotFound((OperationNotFound) p);
} else if ( p instanceof FlattenOverNonNestedCollection ) {
generatePath_FlattenOverNonNestedCollection((FlattenOverNonNestedCollection) p);
}
return currentPath;
}
private void generatePath_FeatureNotFound(FeatureNotFound p) {
PropertyCallExp atlExpr = (PropertyCallExp) p.getElement();
FeatureOrOperationNotFoundNode node = new FeatureOrOperationNotFoundNode(p, atlExpr);
currentPath = new ProblemPath(p, node);
// System.out.println(p);
pathFromErrorExpression(atlExpr.getSource(), node);
}
private void generatePath_OperationNotFound(OperationNotFound p) {
PropertyCallExp atlExpr = (PropertyCallExp) p.getElement();
FeatureOrOperationNotFoundNode node = new FeatureOrOperationNotFoundNode(p, atlExpr);
currentPath = new ProblemPath(p, node);
pathFromErrorExpression(atlExpr.getSource(), node);
}
private void generatePath_FlattenOverNonNestedCollection(FlattenOverNonNestedCollection p) {
PropertyCallExp atlExpr = (PropertyCallExp) p.getElement();
FlattenOverNonNestedCollectionNode node = new FlattenOverNonNestedCollectionNode(p, atlExpr);
currentPath = new ProblemPath(p, node);
pathFromErrorExpression(atlExpr, node);
}
private void generatePath_BindingWithoutRule(BindingWithoutRule p) {
Binding atlBinding = (Binding) p.getElement();
ProblemNode node = new BindingWithoutRuleNode(p, atlBinding, atlModel);
currentPath = new ProblemPath(p, node);
Rule rule = atlBinding.getOutPatternElement().getOutPattern().getRule();
pathToRule(rule, node, new TraversedSet(), false);
pathToBinding(atlBinding, node, new TraversedSet());
}
private void generatePath_BindingExpectedOneAssignedMany(BindingExpectedOneAssignedMany p) {
Binding atlBinding = (Binding) p.getElement();
ProblemNode node = new BindingExpectedOneAssignedManyNode(p, atlBinding);
currentPath = new ProblemPath(p, node);
Rule rule = atlBinding.getOutPatternElement().getOutPattern().getRule();
pathToRule(rule, node, new TraversedSet(), false);
pathToBinding(atlBinding, node, new TraversedSet());
}
private void generatePath_BindingPossiblyUnresolved(BindingPossiblyUnresolved p) {
Binding atlBinding = (Binding) p.getElement();
ProblemNode node = new BindingPossiblyUnresolvedNode(p, atlBinding, atlModel);
currentPath = new ProblemPath(p, node);
Rule rule = atlBinding.getOutPatternElement().getOutPattern().getRule();
pathToRule(rule, node, new TraversedSet(), false);
pathToBinding(atlBinding, node, new TraversedSet());
}
private void generatePath_BindingWithResolvedByIncompatibleRule(BindingWithResolvedByIncompatibleRule p) {
Binding atlBinding = (Binding) p.getElement();
ProblemNode node = new BindingWithResolvedByIncompatibleRuleNode(p, atlBinding, atlModel);
currentPath = new ProblemPath(p, node);
Rule rule = atlBinding.getOutPatternElement().getOutPattern().getRule();
pathToRule(rule, node, new TraversedSet(), false);
pathToBinding(atlBinding, node, new TraversedSet());
}
private void generatePath_NoBindingForCompulsoryFeature(NoBindingForCompulsoryFeature p) {
ProblemNode node = new NoBindingAssignmentNode(p);
currentPath = new ProblemPath(p, node);
SimpleOutPatternElement op = (SimpleOutPatternElement) p.getElement();
Rule rule = op.getOutPattern().getRule();
pathToRule(rule, node, new TraversedSet(), false);
}
//
// End-of errors
//
private boolean pathToControlFlow(EObject start, DependencyNode node, TraversedSet traversed) {
EObject lastParent = (EObject) start;
EObject parent = start.eContainer();
while ( ! isControlFlowElement(parent) ) {
if ( isIteration(parent) ) {
LoopExp exp = (LoopExp) parent;
LoopNode loop = new LoopNode(exp.getSource(), exp.getIterators().get(0));
node.addDependency(loop);
// ?? Different from the pathToCall
return checkReachesExecution(pathToControlFlow((OclExpression) parent, loop, traversed), loop);
}
lastParent = parent;
parent = parent.eContainer();
}
if ( parent instanceof Rule ) {
return pathToRule((Rule) parent, node, traversed, false);
} else if ( parent instanceof Helper ){
return pathToHelper((Helper) parent, node, traversed);
} else if ( parent instanceof IfExp ) {
return pathToIfExpr((IfExp) parent, (OclExpression) lastParent, node, traversed);
} else if ( parent instanceof LetExp ) {
return pathToLetExpr((LetExp) parent, lastParent, node, traversed);
} else {
throw new UnsupportedOperationException();
}
}
private boolean checkReachesExecution(boolean depReachability, DependencyNode current) {
current.setLeadsToExecution(depReachability);
return depReachability;
}
private boolean isIteration(EObject element) {
return element instanceof LoopExp;
}
public boolean isControlFlowElement(EObject element) {
return (element instanceof Rule) || (element instanceof Helper) ||
(element instanceof IfExp || (element instanceof LetExp ));
}
private void pathFromErrorExpression(OclExpression start, DependencyNode depNode) {
TraversedSet traversed = new TraversedSet();
// Handle the special case "varName.error", to avoid generating a node with just "varName" which
// is redundant (variable names represents objects that must exist)
if ( start instanceof VariableExp ) {
VariableExp v = (VariableExp) start;
if ( v.getReferredVariable().getVarName().equals("thisModule") ) throw new UnsupportedOperationException();
pathToControlFlow(start.eContainer(), depNode, traversed);
} else {
DelimitedExpressionNode node = new DelimitedExpressionNode(start);
depNode.addDependency(node);
pathToControlFlow(start, node, traversed);
}
}
private boolean pathForCall(OclExpression call, DependencyNode depNode, TraversedSet traversed) {
CallExprNode node = new CallExprNode((PropertyCallExp) call, atlModel);
depNode.addDependency(node);
return checkReachesExecution(pathToControlFlow(call, node, traversed), node);
}
private boolean pathToLetExpr(LetExp start, EObject childToLet, DependencyNode node, TraversedSet traversed) {
// The error comes to the variable declaration
if ( start.getVariable() == childToLet ) {
// ExpressionAnnotation ann = (ExpressionAnnotation) typ.getAnnotation(childToLet.original_());
// DelimitedExpressionNode newNode = new DelimitedExpressionNode(start.getVariable().getInitExpression(), ann);
// return checkReachesExecution(pathToControlFlow(childToLet, newNode, traversed), newNode);
return checkReachesExecution(pathToControlFlow(start, node, traversed), node);
}
EObject lastParent = (EObject) start;
EObject parent = start.eContainer();
while ( parent instanceof LetExp ) {
lastParent = parent;
parent = parent.eContainer();
}
LetScopeNode newNode = new LetScopeNode((LetExp) lastParent);
node.addDependency(newNode);
return checkReachesExecution(pathToControlFlow(lastParent.eContainer(), newNode, traversed), newNode);
}
private boolean pathToIfExpr(IfExp expr, OclExpression directChild, DependencyNode node, TraversedSet traversed) {
boolean branch;
if ( expr.getThenExpression() == directChild ) {
branch = ConditionalNode.TRUE_BRANCH;
} else if ( expr.getElseExpression() == directChild ){
branch = ConditionalNode.FALSE_BRANCH;
} else {
// must be the condition
return pathToControlFlow(expr, node, traversed);
}
ConditionalNode newNode = new ConditionalNode(expr, branch);
node.addDependency(newNode);
return checkReachesExecution(pathToControlFlow(expr, newNode, traversed), newNode);
}
private void pathToBinding(Binding atlBinding , ProblemNode node, TraversedSet traversed) {
RuleResolutionNode resolutionNode = new RuleResolutionNode(atlBinding);
node.addConstraint(resolutionNode);
for(RuleResolutionInfo rr : atlBinding.getResolvedBy()) {
pathToRule(rr.getRule(), resolutionNode, traversed, true);
}
}
private boolean pathToRule(Rule rule, DependencyNode dependent, TraversedSet traversed, boolean isConstraint) {
if ( rule instanceof MatchedRule ) {
return pathToMatchedRuleOne((MatchedRule) rule, dependent, isConstraint);
} else if ( rule instanceof StaticRule ) {
return pathToImperativeRule((StaticRule) rule, dependent, traversed);
} else {
throw new UnsupportedOperationException(rule.getClass().getName());
}
}
private boolean pathToImperativeRule(StaticRule r, DependencyNode dependent, TraversedSet traversed) {
if ( ! traversed.addImperativeRule(r) ) {
return false;
}
boolean leadsToExecution = false;
ImperativeRuleExecution node = new ImperativeRuleExecution(r);
dependent.addDependency(node);
for(PropertyCallExp expr : r.getCalledBy()) {
if ( pathForCall(expr, node, traversed) ) {
leadsToExecution = true;
}
}
node.setLeadsToExecution(leadsToExecution);
return leadsToExecution;
}
private boolean pathToHelper(Helper h, DependencyNode depNode, TraversedSet traversed) {
if ( ! traversed.addHelper(h) )
return false;
HelperInvocationNode hNode = new HelperInvocationNode(h);
depNode.addDependency(hNode);
boolean leadsToExecution = false;
if ( h instanceof StaticHelper ) {
EList<PropertyCallExp> callers = h.getCalledBy();
for (PropertyCallExp expr : callers) {
if ( pathForCall(expr, hNode, traversed) ) {
leadsToExecution = true;
}
}
// throw new UnsupportedOperationException(h.getLocation());
} else if ( h instanceof ContextHelper ) {
EList<PropertyCallExp> callers = ((ContextHelper) h).getPolymorphicCalledBy();
for (PropertyCallExp expr : callers) {
if ( pathForCall(expr, hNode, traversed) ) {
leadsToExecution = true;
}
}
//pathToExpression(expr, annotation, depNode)
//System.out.println(callers.size());
} else {
throw new UnsupportedOperationException();
}
hNode.setLeadsToExecution(leadsToExecution);
return leadsToExecution;
}
/**
* Computes a node representing the execution of a matched rule.
*
* @param rule
* @param dependent
* @param isConstraint true means that the execution is required to satisfy an execution constraint of
* another false, while false means that the matched rule node to be computed is
* part of the execution path of the error.
* @return
*/
private boolean pathToMatchedRuleOne(MatchedRule r, DependencyNode dependent, boolean isConstraint) {
DependencyNode newNode;
if ( r.isIsAbstract() ) {
newNode = new MatchedRuleAbstract(r);
for(RuleWithPattern cr : r.getChildren()) {
MatchedRule mr = (MatchedRule) cr;
pathToMatchedRuleOne(mr, newNode, isConstraint);
// passing isConstraint = true because children rules are considered as
// constraints in the sense that we use the abstract rule as "launcher"
// of the children rules.
}
} else {
newNode = new MatchedRuleExecution(r);
if ( ! isConstraint )
currentPath.addRule((ExecutionNode) newNode);
newNode.setLeadsToExecution(true);
}
dependent.addDependency(newNode);
if ( r.getInPattern().getFilter() != null ) {
ConstraintNode constraint = pathToFilterExpression(r.getInPattern().getFilter());
newNode.addConstraint(constraint);
}
return true;
}
private ConstraintNode pathToFilterExpression(OclExpression expr) {
return new RuleFilterNode(expr);
}
private class TraversedSet {
private Set<Helper> helpers = new HashSet<Helper>();
private Set<Rule> imperative = new HashSet<Rule>();
public boolean addHelper(Helper h) {
return helpers.add(h);
}
public boolean addImperativeRule(Rule r) {
return imperative.add(r);
}
}
}
|
plugins/anatlyzer.atl.typing/src/anatlyzer/atl/graph/ErrorPathGenerator.java
|
package anatlyzer.atl.graph;
import java.util.HashSet;
import java.util.Set;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EObject;
import anatlyzer.atl.errors.AnalysisResult;
import anatlyzer.atl.errors.Problem;
import anatlyzer.atl.errors.atl_error.BindingExpectedOneAssignedMany;
import anatlyzer.atl.errors.atl_error.BindingPossiblyUnresolved;
import anatlyzer.atl.errors.atl_error.BindingWithResolvedByIncompatibleRule;
import anatlyzer.atl.errors.atl_error.BindingWithoutRule;
import anatlyzer.atl.errors.atl_error.FeatureNotFound;
import anatlyzer.atl.errors.atl_error.FlattenOverNonNestedCollection;
import anatlyzer.atl.errors.atl_error.LocalProblem;
import anatlyzer.atl.errors.atl_error.NoBindingForCompulsoryFeature;
import anatlyzer.atl.errors.atl_error.OperationNotFound;
import anatlyzer.atl.model.ATLModel;
import anatlyzer.atl.model.ErrorModel;
import anatlyzer.atl.model.TypingModel;
import anatlyzer.atlext.ATL.Binding;
import anatlyzer.atlext.ATL.ContextHelper;
import anatlyzer.atlext.ATL.Helper;
import anatlyzer.atlext.ATL.MatchedRule;
import anatlyzer.atlext.ATL.OutPattern;
import anatlyzer.atlext.ATL.Rule;
import anatlyzer.atlext.ATL.RuleResolutionInfo;
import anatlyzer.atlext.ATL.RuleWithPattern;
import anatlyzer.atlext.ATL.StaticHelper;
import anatlyzer.atlext.ATL.StaticRule;
import anatlyzer.atlext.OCL.IfExp;
import anatlyzer.atlext.OCL.LetExp;
import anatlyzer.atlext.OCL.LoopExp;
import anatlyzer.atlext.OCL.OclExpression;
import anatlyzer.atlext.OCL.PropertyCallExp;
import anatlyzer.atlext.OCL.VariableExp;
/**
* Given an error model, generates the "path" in terms
* of types that are needed to reach each error.
*
* @author jesus
*
*/
public class ErrorPathGenerator {
private ErrorModel errors;
private TypingModel typ;
private ATLModel atlModel;
private ProblemGraph graph;
private ProblemPath currentPath;
public ErrorPathGenerator(ATLModel atlModel) {
this.errors = atlModel.getErrors();
this.typ = atlModel.getTyping();
this.atlModel = atlModel;
}
public ProblemGraph perform() {
ProblemGraph graph = new ProblemGraph();
AnalysisResult r = this.errors.getAnalysis();
for(Problem p : r.getProblems()) {
if ( p instanceof LocalProblem ) {
ProblemPath path = generatePath((LocalProblem) p);
if ( path != null )
graph.addProblemPath(path);
else
System.err.println("ErrorPathGenerator: Ignored " + p.getClass().getSimpleName());
} else {
throw new UnsupportedOperationException();
}
}
return graph;
}
/*
public ProblemPath perform(Problem p) {
currentGraph = new ProblemPath();
generatePath((LocalProblem) p);
return currentGraph;
}
*/
public ProblemPath generatePath(LocalProblem p) {
currentPath = null;
if ( p instanceof NoBindingForCompulsoryFeature ) {
generatePath_NoBindingForCompulsoryFeature((NoBindingForCompulsoryFeature) p);
// These two are very similar
} else if ( p instanceof BindingExpectedOneAssignedMany ) {
generatePath_BindingExpectedOneAssignedMany((BindingExpectedOneAssignedMany) p);
} else if ( p instanceof BindingWithoutRule ) {
generatePath_BindingWithoutRule((BindingWithoutRule) p);
} else if ( p instanceof BindingWithResolvedByIncompatibleRule ) {
generatePath_BindingWithResolvedByIncompatibleRule((BindingWithResolvedByIncompatibleRule) p);
} else if ( p instanceof BindingPossiblyUnresolved ) {
generatePath_BindingPossiblyUnresolved((BindingPossiblyUnresolved) p);
} else if ( p instanceof FeatureNotFound ) {
generatePath_FeatureNotFound((FeatureNotFound) p);
} else if ( p instanceof OperationNotFound ) {
generatePath_OperationNotFound((OperationNotFound) p);
} else if ( p instanceof FlattenOverNonNestedCollection ) {
generatePath_FlattenOverNonNestedCollection((FlattenOverNonNestedCollection) p);
}
return currentPath;
}
private void generatePath_FeatureNotFound(FeatureNotFound p) {
PropertyCallExp atlExpr = (PropertyCallExp) p.getElement();
FeatureOrOperationNotFoundNode node = new FeatureOrOperationNotFoundNode(p, atlExpr);
currentPath = new ProblemPath(p, node);
// System.out.println(p);
pathFromErrorExpression(atlExpr.getSource(), node);
}
private void generatePath_OperationNotFound(OperationNotFound p) {
PropertyCallExp atlExpr = (PropertyCallExp) p.getElement();
FeatureOrOperationNotFoundNode node = new FeatureOrOperationNotFoundNode(p, atlExpr);
currentPath = new ProblemPath(p, node);
pathFromErrorExpression(atlExpr.getSource(), node);
}
private void generatePath_FlattenOverNonNestedCollection(FlattenOverNonNestedCollection p) {
PropertyCallExp atlExpr = (PropertyCallExp) p.getElement();
FlattenOverNonNestedCollectionNode node = new FlattenOverNonNestedCollectionNode(p, atlExpr);
currentPath = new ProblemPath(p, node);
pathFromErrorExpression(atlExpr, node);
}
private void generatePath_BindingWithoutRule(BindingWithoutRule p) {
Binding atlBinding = (Binding) p.getElement();
ProblemNode node = new BindingWithoutRuleNode(p, atlBinding, atlModel);
currentPath = new ProblemPath(p, node);
Rule rule = atlBinding.getOutPatternElement().getOutPattern().getRule();
pathToRule(rule, node, new TraversedSet(), false);
pathToBinding(atlBinding, node, new TraversedSet());
}
private void generatePath_BindingExpectedOneAssignedMany(BindingExpectedOneAssignedMany p) {
Binding atlBinding = (Binding) p.getElement();
ProblemNode node = new BindingExpectedOneAssignedManyNode(p, atlBinding);
currentPath = new ProblemPath(p, node);
Rule rule = atlBinding.getOutPatternElement().getOutPattern().getRule();
pathToRule(rule, node, new TraversedSet(), false);
pathToBinding(atlBinding, node, new TraversedSet());
}
private void generatePath_BindingPossiblyUnresolved(BindingPossiblyUnresolved p) {
Binding atlBinding = (Binding) p.getElement();
ProblemNode node = new BindingPossiblyUnresolvedNode(p, atlBinding, atlModel);
currentPath = new ProblemPath(p, node);
Rule rule = atlBinding.getOutPatternElement().getOutPattern().getRule();
pathToRule(rule, node, new TraversedSet(), false);
pathToBinding(atlBinding, node, new TraversedSet());
}
private void generatePath_BindingWithResolvedByIncompatibleRule(BindingWithResolvedByIncompatibleRule p) {
Binding atlBinding = (Binding) p.getElement();
ProblemNode node = new BindingWithResolvedByIncompatibleRuleNode(p, atlBinding, atlModel);
currentPath = new ProblemPath(p, node);
Rule rule = atlBinding.getOutPatternElement().getOutPattern().getRule();
pathToRule(rule, node, new TraversedSet(), false);
pathToBinding(atlBinding, node, new TraversedSet());
}
private void generatePath_NoBindingForCompulsoryFeature(NoBindingForCompulsoryFeature p) {
ProblemNode node = new NoBindingAssignmentNode(p);
currentPath = new ProblemPath(p, node);
OutPattern op = (OutPattern) p.getElement();
Rule rule = op.getRule();
pathToRule(rule, node, new TraversedSet(), false);
}
//
// End-of errors
//
private boolean pathToControlFlow(EObject start, DependencyNode node, TraversedSet traversed) {
EObject lastParent = (EObject) start;
EObject parent = start.eContainer();
while ( ! isControlFlowElement(parent) ) {
if ( isIteration(parent) ) {
LoopExp exp = (LoopExp) parent;
LoopNode loop = new LoopNode(exp.getSource(), exp.getIterators().get(0));
node.addDependency(loop);
// ?? Different from the pathToCall
return checkReachesExecution(pathToControlFlow((OclExpression) parent, loop, traversed), loop);
}
lastParent = parent;
parent = parent.eContainer();
}
if ( parent instanceof Rule ) {
return pathToRule((Rule) parent, node, traversed, false);
} else if ( parent instanceof Helper ){
return pathToHelper((Helper) parent, node, traversed);
} else if ( parent instanceof IfExp ) {
return pathToIfExpr((IfExp) parent, (OclExpression) lastParent, node, traversed);
} else if ( parent instanceof LetExp ) {
return pathToLetExpr((LetExp) parent, lastParent, node, traversed);
} else {
throw new UnsupportedOperationException();
}
}
private boolean checkReachesExecution(boolean depReachability, DependencyNode current) {
current.setLeadsToExecution(depReachability);
return depReachability;
}
private boolean isIteration(EObject element) {
return element instanceof LoopExp;
}
public boolean isControlFlowElement(EObject element) {
return (element instanceof Rule) || (element instanceof Helper) ||
(element instanceof IfExp || (element instanceof LetExp ));
}
private void pathFromErrorExpression(OclExpression start, DependencyNode depNode) {
TraversedSet traversed = new TraversedSet();
// Handle the special case "varName.error", to avoid generating a node with just "varName" which
// is redundant (variable names represents objects that must exist)
if ( start instanceof VariableExp ) {
VariableExp v = (VariableExp) start;
if ( v.getReferredVariable().getVarName().equals("thisModule") ) throw new UnsupportedOperationException();
pathToControlFlow(start.eContainer(), depNode, traversed);
} else {
DelimitedExpressionNode node = new DelimitedExpressionNode(start);
depNode.addDependency(node);
pathToControlFlow(start, node, traversed);
}
}
private boolean pathForCall(OclExpression call, DependencyNode depNode, TraversedSet traversed) {
CallExprNode node = new CallExprNode((PropertyCallExp) call, atlModel);
depNode.addDependency(node);
return checkReachesExecution(pathToControlFlow(call, node, traversed), node);
}
private boolean pathToLetExpr(LetExp start, EObject childToLet, DependencyNode node, TraversedSet traversed) {
// The error comes to the variable declaration
if ( start.getVariable() == childToLet ) {
// ExpressionAnnotation ann = (ExpressionAnnotation) typ.getAnnotation(childToLet.original_());
// DelimitedExpressionNode newNode = new DelimitedExpressionNode(start.getVariable().getInitExpression(), ann);
// return checkReachesExecution(pathToControlFlow(childToLet, newNode, traversed), newNode);
return checkReachesExecution(pathToControlFlow(start, node, traversed), node);
}
EObject lastParent = (EObject) start;
EObject parent = start.eContainer();
while ( parent instanceof LetExp ) {
lastParent = parent;
parent = parent.eContainer();
}
LetScopeNode newNode = new LetScopeNode((LetExp) lastParent);
node.addDependency(newNode);
return checkReachesExecution(pathToControlFlow(lastParent.eContainer(), newNode, traversed), newNode);
}
private boolean pathToIfExpr(IfExp expr, OclExpression directChild, DependencyNode node, TraversedSet traversed) {
boolean branch;
if ( expr.getThenExpression() == directChild ) {
branch = ConditionalNode.TRUE_BRANCH;
} else if ( expr.getElseExpression() == directChild ){
branch = ConditionalNode.FALSE_BRANCH;
} else {
// must be the condition
return pathToControlFlow(expr, node, traversed);
}
ConditionalNode newNode = new ConditionalNode(expr, branch);
node.addDependency(newNode);
return checkReachesExecution(pathToControlFlow(expr, newNode, traversed), newNode);
}
private void pathToBinding(Binding atlBinding , ProblemNode node, TraversedSet traversed) {
RuleResolutionNode resolutionNode = new RuleResolutionNode(atlBinding);
node.addConstraint(resolutionNode);
for(RuleResolutionInfo rr : atlBinding.getResolvedBy()) {
pathToRule(rr.getRule(), resolutionNode, traversed, true);
}
}
private boolean pathToRule(Rule rule, DependencyNode dependent, TraversedSet traversed, boolean isConstraint) {
if ( rule instanceof MatchedRule ) {
return pathToMatchedRuleOne((MatchedRule) rule, dependent, isConstraint);
} else if ( rule instanceof StaticRule ) {
return pathToImperativeRule((StaticRule) rule, dependent, traversed);
} else {
throw new UnsupportedOperationException(rule.getClass().getName());
}
}
private boolean pathToImperativeRule(StaticRule r, DependencyNode dependent, TraversedSet traversed) {
if ( ! traversed.addImperativeRule(r) ) {
return false;
}
boolean leadsToExecution = false;
ImperativeRuleExecution node = new ImperativeRuleExecution(r);
dependent.addDependency(node);
for(PropertyCallExp expr : r.getCalledBy()) {
if ( pathForCall(expr, node, traversed) ) {
leadsToExecution = true;
}
}
node.setLeadsToExecution(leadsToExecution);
return leadsToExecution;
}
private boolean pathToHelper(Helper h, DependencyNode depNode, TraversedSet traversed) {
if ( ! traversed.addHelper(h) )
return false;
HelperInvocationNode hNode = new HelperInvocationNode(h);
depNode.addDependency(hNode);
boolean leadsToExecution = false;
if ( h instanceof StaticHelper ) {
EList<PropertyCallExp> callers = h.getCalledBy();
for (PropertyCallExp expr : callers) {
if ( pathForCall(expr, hNode, traversed) ) {
leadsToExecution = true;
}
}
// throw new UnsupportedOperationException(h.getLocation());
} else if ( h instanceof ContextHelper ) {
EList<PropertyCallExp> callers = ((ContextHelper) h).getPolymorphicCalledBy();
for (PropertyCallExp expr : callers) {
if ( pathForCall(expr, hNode, traversed) ) {
leadsToExecution = true;
}
}
//pathToExpression(expr, annotation, depNode)
//System.out.println(callers.size());
} else {
throw new UnsupportedOperationException();
}
hNode.setLeadsToExecution(leadsToExecution);
return leadsToExecution;
}
/**
* Computes a node representing the execution of a matched rule.
*
* @param rule
* @param dependent
* @param isConstraint true means that the execution is required to satisfy an execution constraint of
* another false, while false means that the matched rule node to be computed is
* part of the execution path of the error.
* @return
*/
private boolean pathToMatchedRuleOne(MatchedRule r, DependencyNode dependent, boolean isConstraint) {
DependencyNode newNode;
if ( r.isIsAbstract() ) {
newNode = new MatchedRuleAbstract(r);
for(RuleWithPattern cr : r.getChildren()) {
MatchedRule mr = (MatchedRule) cr;
pathToMatchedRuleOne(mr, newNode, isConstraint);
// passing isConstraint = true because children rules are considered as
// constraints in the sense that we use the abstract rule as "launcher"
// of the children rules.
}
} else {
newNode = new MatchedRuleExecution(r);
if ( ! isConstraint )
currentPath.addRule((ExecutionNode) newNode);
newNode.setLeadsToExecution(true);
}
dependent.addDependency(newNode);
if ( r.getInPattern().getFilter() != null ) {
ConstraintNode constraint = pathToFilterExpression(r.getInPattern().getFilter());
newNode.addConstraint(constraint);
}
return true;
}
private ConstraintNode pathToFilterExpression(OclExpression expr) {
return new RuleFilterNode(expr);
}
private class TraversedSet {
private Set<Helper> helpers = new HashSet<Helper>();
private Set<Rule> imperative = new HashSet<Rule>();
public boolean addHelper(Helper h) {
return helpers.add(h);
}
public boolean addImperativeRule(Rule r) {
return imperative.add(r);
}
}
}
|
Fixed cast error
|
plugins/anatlyzer.atl.typing/src/anatlyzer/atl/graph/ErrorPathGenerator.java
|
Fixed cast error
|
|
Java
|
agpl-3.0
|
023a7282a88435943720ea8e3e51cad3157f08d6
| 0
|
OPEN-ENT-NG/community,OPEN-ENT-NG/community,OPEN-ENT-NG/community,OPEN-ENT-NG/community,OPEN-ENT-NG/community
|
/*
* Copyright © Région Nord Pas de Calais-Picardie, Département 91, Région Aquitaine-Limousin-Poitou-Charentes, 2016.
*
* This file is part of OPEN ENT NG. OPEN ENT NG is a versatile ENT Project based on the JVM and ENT Core Project.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation (version 3 of the License).
*
* For the sake of explanation, any module that communicate over native
* Web protocols, such as HTTP, with OPEN ENT NG is outside the scope of this
* license and could be license under its own terms. This is merely considered
* normal use of OPEN ENT NG, and does not fall under the heading of "covered work".
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*/
package net.atos.entng.community.controllers;
import static org.entcore.common.http.response.DefaultResponseHandler.arrayResponseHandler;
import static org.entcore.common.http.response.DefaultResponseHandler.defaultResponseHandler;
import static org.entcore.common.http.response.DefaultResponseHandler.leftToResponse;
import static org.entcore.common.http.response.DefaultResponseHandler.notEmptyResponseHandler;
import static org.entcore.common.user.UserUtils.getUserInfos;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import fr.wseduc.rs.*;
import net.atos.entng.community.Community;
import net.atos.entng.community.services.CommunityService;
import org.entcore.common.notification.TimelineHelper;
import org.entcore.common.events.EventStore;
import org.entcore.common.events.EventStoreFactory;
import org.entcore.common.user.UserInfos;
import org.entcore.common.user.UserUtils;
import org.vertx.java.core.Handler;
import org.vertx.java.core.Vertx;
import org.vertx.java.core.eventbus.Message;
import org.vertx.java.core.http.HttpServerRequest;
import org.vertx.java.core.http.RouteMatcher;
import org.vertx.java.core.json.JsonArray;
import org.vertx.java.core.json.JsonObject;
import org.vertx.java.platform.Container;
import fr.wseduc.rs.Delete;
import fr.wseduc.rs.Get;
import fr.wseduc.rs.Post;
import fr.wseduc.rs.Put;
import fr.wseduc.security.ActionType;
import fr.wseduc.security.SecuredAction;
import fr.wseduc.webutils.Either;
import fr.wseduc.webutils.I18n;
import fr.wseduc.webutils.http.BaseController;
import fr.wseduc.webutils.http.Renders;
import fr.wseduc.webutils.request.RequestUtils;
public class CommunityController extends BaseController {
private CommunityService communityService;
private final TimelineHelper timeline;
private static final JsonArray resourcesTypes = new JsonArray().add("read").add("contrib").add("manager");
private EventStore eventStore;
private enum CommunityEvent { ACCESS }
@Override
public void init(Vertx vertx, Container container, RouteMatcher rm,
Map<String, fr.wseduc.webutils.security.SecuredAction> securedActions) {
super.init(vertx, container, rm, securedActions);
eventStore = EventStoreFactory.getFactory().getEventStore(Community.class.getSimpleName());
}
public CommunityController(TimelineHelper helper) {
timeline = helper;
}
@Get("")
@SecuredAction("community.view")
public void view(HttpServerRequest request) {
renderView(request);
// Create event "access to application Community" and store it, for module "statistics"
eventStore.createAndStoreEvent(CommunityEvent.ACCESS.name(), request);
}
@Post("")
@SecuredAction("community.create")
public void create(final HttpServerRequest request) {
RequestUtils.bodyToJson(request, pathPrefix + "create", new Handler<JsonObject>() {
@Override
public void handle(final JsonObject body) {
UserUtils.getUserInfos(eb, request, new Handler<UserInfos>() {
@Override
public void handle(final UserInfos user) {
if (user != null) {
CommunityController.this.create(user, request, body);
} else {
unauthorized(request);
}
}
});
}
});
}
private void create(final UserInfos user, final HttpServerRequest request, final JsonObject body) {
UserUtils.getSession(eb, request, new Handler<JsonObject>() {
@Override
public void handle(JsonObject u) {
JsonObject p = new JsonObject()
.putString("title", body.getString("name"))
.putBoolean("hideInPages", true)
.putArray("pages", new JsonArray())
.putObject("referencedResources", new JsonObject());
JsonObject pages = new JsonObject()
.putString("action", "create")
.putObject("user", u)
.putObject("page", p);
eb.send("communityPages", pages, new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
final JsonObject result = message.body().getObject("result");
if ("ok".equals(message.body().getString("status")) && result != null &&
!result.getString("_id", "").trim().isEmpty()) {
final String pageId = result.getString("_id");
body.putString("pageId", pageId);
communityService.create(body, user, new Handler<Either<String, JsonObject>>() {
@Override
public void handle(final Either<String, JsonObject> r) {
if (r.isRight()) {
sharePage(pageId, user.getUserId(), r.right().getValue(), new Handler<Either<String, JsonObject>>(){
public void handle(Either<String, JsonObject> event) {
if (r.isLeft()){
leftToResponse(request, event.left());
eb.send("communityPages", new JsonObject().putString("action", "delete")
.putString("pageId", pageId));
return;
}
r.right().getValue().putString("pageId", pageId);
renderJson(request, r.right().getValue(), 201);
}
});
createPageMarkups(pageId);
} else {
leftToResponse(request, r.left());
eb.send("communityPages", new JsonObject().putString("action", "delete")
.putString("pageId", pageId));
}
}
});
} else {
renderError(request, message.body());
}
}
});
}
});
}
private void sharePage(String pageId, String userId, final JsonObject value, final Handler<Either<String, JsonObject>> handler) {
final JsonObject share = new JsonObject().putString("action", "share")
.putString("pageId", pageId).putString("userId", userId);
JsonObject r = share
.putString("groupId", value.getString("read"))
.putArray("actions", new JsonArray().add("net-atos-entng-community-controllers-PagesController|get"));
eb.send("communityPages", r, new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
if (!"ok".equals(message.body().getString("status"))) {
final String error_msg = "Error while sharing page (read) : " + message.body().getString("message");
log.error(error_msg);
handler.handle(new Either.Left<String, JsonObject>(error_msg));
return;
}
JsonObject c = share
.putString("groupId", value.getString("contrib"))
.putArray("actions", new JsonArray()
.add("net-atos-entng-community-controllers-PagesController|get")
.add("net-atos-entng-community-controllers-PagesController|update")
);
eb.send("communityPages", c, new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
if (!"ok".equals(message.body().getString("status"))) {
final String error_msg = "Error while sharing page (contrib) : " + message.body().getString("message");
log.error(error_msg);
handler.handle(new Either.Left<String, JsonObject>(error_msg));
return;
}
JsonObject m = share
.putString("groupId", value.getString("manager"))
.putArray("actions", new JsonArray()
.add("net-atos-entng-community-controllers-PagesController|get")
.add("net-atos-entng-community-controllers-PagesController|update")
.add("net-atos-entng-community-controllers-PagesController|delete")
);
eb.send("communityPages", m, new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
if (!"ok".equals(message.body().getString("status"))) {
final String error_msg = "Error while sharing page (manager) : " + message.body().getString("message");
log.error(error_msg);
handler.handle(new Either.Left<String, JsonObject>(error_msg));
return;
}
handler.handle(new Either.Right<String, JsonObject>(message.body()));
}
});
}
});
}
});
}
private void createPageMarkups(String pageId) {
JsonObject updatePage = new JsonObject()
.putString("action", "update")
.putString("pageId", pageId)
.putObject("page", new JsonObject()
.putObject("markups", new JsonObject()
.putArray("view", new JsonArray()
.addObject(new JsonObject()
.putString("label", "community.edit")
.putString("resourceRight", "share")
.putString("href", "/community#/edit/" + pageId))
.addObject(new JsonObject()
.putString("label", "community.back.to")
.putString("resourceRight", "read")
.putString("href", "/community#/list")))));
eb.send("communityPages", updatePage, new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
if (!"ok".equals(message.body().getString("status"))) {
log.error(message.body().getString("message"));
}
}
});
}
@Put("/:id")
@SecuredAction(value = "", type = ActionType.RESOURCE)
public void update(final HttpServerRequest request) {
RequestUtils.bodyToJson(request, pathPrefix + "update", new Handler<JsonObject>() {
@Override
public void handle(JsonObject body) {
if (body.size() > 0) {
String name = body.getString("name");
Handler<Either<String, JsonObject>> handler = (name != null && !name.trim().isEmpty()) ?
updatePageHandler(request, name) : notEmptyResponseHandler(request);
communityService.update(request.params().get("id"), body, handler);
} else {
badRequest(request, "empty.json");
}
}
});
}
private Handler<Either<String, JsonObject>> updatePageHandler(final HttpServerRequest request, final String name) {
return new Handler<Either<String, JsonObject>>() {
@Override
public void handle(final Either<String, JsonObject> r) {
if (r.isRight()) {
String pageId = r.right().getValue().getString("pageId");
r.right().getValue().removeField("pageId");
JsonObject updatePage = new JsonObject()
.putString("action", "update")
.putString("pageId", pageId)
.putObject("page", new JsonObject()
.putString("title", name)
.putBoolean("hideInPages", true)
.putObject("markups", new JsonObject()
.putArray("view", new JsonArray()
.addObject(new JsonObject()
.putString("label", "community.edit")
.putString("resourceRight", "share")
.putString("href", "/community#/edit/" + pageId))
.addObject(new JsonObject()
.putString("label", "community.back.to")
.putString("resourceRight", "read")
.putString("href", "/community#/list")))));
eb.send("communityPages", updatePage, new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
if (!"ok".equals(message.body().getString("status"))) {
log.error(message.body().getString("message"));
}
renderJson(request, r.right().getValue());
}
});
} else {
leftToResponse(request, r.left());
}
}
};
}
@Delete("/:id")
@SecuredAction(value = "", type = ActionType.RESOURCE)
public void delete(final HttpServerRequest request) {
communityService.delete(request.params().get("id"), new Handler<Either<String, JsonObject>>() {
@Override
public void handle(Either<String, JsonObject> r) {
if (r.isRight()) {
JsonObject deletePage = new JsonObject()
.putString("action", "delete")
.putString("pageId", r.right().getValue().getString("pageId"))
.putBoolean("deleteResources", true);
eb.send("communityPages", deletePage);
ok(request);
} else {
leftToResponse(request, r.left());
}
}
});
}
@Get("/list")
@SecuredAction(value = "", type = ActionType.AUTHENTICATED)
public void list(final HttpServerRequest request) {
UserUtils.getUserInfos(eb, request, new Handler<UserInfos>() {
@Override
public void handle(UserInfos user) {
if (user != null) {
communityService.list(user, arrayResponseHandler(request));
} else {
unauthorized(request);
}
}
});
}
@Get("/:id/details")
@SecuredAction(value = "", type = ActionType.RESOURCE)
public void details(HttpServerRequest request) {
communityService.get(request.params().get("id"), notEmptyResponseHandler(request));
}
@Put("/:id/users")
@SecuredAction(value = "", type = ActionType.RESOURCE)
public void manageUsers(final HttpServerRequest request) {
final String id = request.params().get("id");
RequestUtils.bodyToJson(request, pathPrefix + "manageUsers", new Handler<JsonObject>() {
@Override
public void handle(final JsonObject body) {
if (body.size() > 0) {
communityService.manageUsers(id, body, new Handler<Either<String, JsonObject>>() {
@Override
public void handle(Either<String, JsonObject> event) {
if (event.isRight()) {
UserUtils.getUserInfos(eb, request, new Handler<UserInfos>() {
@Override
public void handle(final UserInfos user) {
communityService.get(id, user, new Handler<Either<String, JsonObject>>() {
public void handle(Either<String, JsonObject> event) {
if (event.isRight()) {
//Populate notification parameters
JsonObject params = new JsonObject()
.putString("resourceName", event.right().getValue().getString("name", ""))
.putString("resourceUri", "/community#/view/" + id)
.putString("uri", "/userbook/annuaire#" + user.getUserId() + "#" + user.getType())
.putString("username", user.getUsername());
//Get user list
ArrayList<String> readList = new ArrayList<>();
ArrayList<String> contribList = new ArrayList<>();
ArrayList<String> managerList = new ArrayList<>();
for (String field : body.getFieldNames()) {
if ("read".equals(field))
readList.addAll(body.getArray(field).toList());
else if ("contrib".equals(field))
contribList.addAll(body.getArray(field).toList());
else if ("manager".equals(field))
managerList.addAll(body.getArray(field).toList());
}
if (readList.size() > 0) {
timeline.notifyTimeline(request, "community.share", user, readList,
request.params().get("id"), null, params.putString("shareType", "read"), true);
}
if (contribList.size() > 0) {
params.removeField("shareType");
timeline.notifyTimeline(request, "community.share", user, contribList,
request.params().get("id"), null, params.putString("shareType", "contrib"), true);
}
if (managerList.size() > 0) {
params.removeField("shareType");
timeline.notifyTimeline(request, "community.share", user, managerList,
request.params().get("id"), null, params.putString("shareType", "manager"), true);
}
}
}
});
}
});
Renders.renderJson(request, event.right().getValue(), 200);
} else {
JsonObject error = new JsonObject()
.putString("error", event.left().getValue());
Renders.renderJson(request, error, 400);
}
}
});
} else {
badRequest(request, "empty.json");
}
}
});
}
@Get("/:id/users")
@SecuredAction(value = "", type = ActionType.RESOURCE)
public void listUsers(final HttpServerRequest request) {
List<String> t = request.params().getAll("type");
JsonArray types = (t != null && !t.isEmpty()) ?
new JsonArray(t.toArray()) : resourcesTypes; //new JsonArray(RightsController.allowedSharingRights.toArray());
communityService.listUsers(request.params().get("id"), types, new Handler<Either<String, JsonObject>>() {
@Override
public void handle(final Either<String, JsonObject> event) {
final Handler<Either<String, JsonObject>> handler = defaultResponseHandler(request);
if (event.isRight()) {
final JsonObject res = event.right().getValue();
listVisible(request, I18n.acceptLanguage(request), new Handler<JsonObject>() {
@Override
public void handle(final JsonObject visibles) {
res.putObject("visibles", visibles);
handler.handle(event);
}
});
} else {
handler.handle(event);
}
}
});
}
@Get("/visibles")
@SecuredAction("community.listVisibles")
public void listVisibles(final HttpServerRequest request) {
listVisible(request, I18n.acceptLanguage(request), new Handler<JsonObject>() {
@Override
public void handle(final JsonObject visibles) {
renderJson(request, visibles);
}
});
}
private void listVisible(final HttpServerRequest request, final String acceptLanguage, final Handler<JsonObject> handler) {
getUserInfos(eb, request, new Handler<UserInfos>() {
@Override
public void handle(final UserInfos user) {
final JsonObject visibles = new JsonObject();
UserUtils.findVisibleUsers(eb, user.getUserId(), false, new Handler<JsonArray>() {
@Override
public void handle(JsonArray users) {
if (users != null) {
visibles.putArray("users", users);
}
UserUtils.findVisibleProfilsGroups(eb, user.getUserId(), new Handler<JsonArray>() {
@Override
public void handle(JsonArray groups) {
if (groups != null) {
for (Object g : groups) {
if (!(g instanceof JsonObject)) continue;
JsonObject group = (JsonObject) g;
UserUtils.groupDisplayName(group, acceptLanguage);
}
visibles.putArray("groups", groups);
}
handler.handle(visibles);
}
});
}
});
}
});
}
public void setCommunityService(CommunityService communityService) {
this.communityService = communityService;
}
@Get("/listallpages")
@ApiDoc("List communities, visible by current user")
@SecuredAction(value = "", type = ActionType.AUTHENTICATED)
public void listAllPages(final HttpServerRequest request) {
UserUtils.getUserInfos(eb, request, new Handler<UserInfos>() {
@Override
public void handle(final UserInfos user) {
if (user != null) {
Handler<Either<String, JsonArray>> handler = arrayResponseHandler(request);
communityService.list(user, handler);
}
}
});
}
}
|
src/main/java/net/atos/entng/community/controllers/CommunityController.java
|
/*
* Copyright © Région Nord Pas de Calais-Picardie, Département 91, Région Aquitaine-Limousin-Poitou-Charentes, 2016.
*
* This file is part of OPEN ENT NG. OPEN ENT NG is a versatile ENT Project based on the JVM and ENT Core Project.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation (version 3 of the License).
*
* For the sake of explanation, any module that communicate over native
* Web protocols, such as HTTP, with OPEN ENT NG is outside the scope of this
* license and could be license under its own terms. This is merely considered
* normal use of OPEN ENT NG, and does not fall under the heading of "covered work".
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*/
package net.atos.entng.community.controllers;
import static org.entcore.common.http.response.DefaultResponseHandler.arrayResponseHandler;
import static org.entcore.common.http.response.DefaultResponseHandler.defaultResponseHandler;
import static org.entcore.common.http.response.DefaultResponseHandler.leftToResponse;
import static org.entcore.common.http.response.DefaultResponseHandler.notEmptyResponseHandler;
import static org.entcore.common.user.UserUtils.getUserInfos;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import fr.wseduc.rs.*;
import net.atos.entng.community.Community;
import net.atos.entng.community.services.CommunityService;
import org.entcore.common.notification.TimelineHelper;
import org.entcore.common.events.EventStore;
import org.entcore.common.events.EventStoreFactory;
import org.entcore.common.user.UserInfos;
import org.entcore.common.user.UserUtils;
import org.vertx.java.core.Handler;
import org.vertx.java.core.Vertx;
import org.vertx.java.core.eventbus.Message;
import org.vertx.java.core.http.HttpServerRequest;
import org.vertx.java.core.http.RouteMatcher;
import org.vertx.java.core.json.JsonArray;
import org.vertx.java.core.json.JsonObject;
import org.vertx.java.platform.Container;
import fr.wseduc.rs.Delete;
import fr.wseduc.rs.Get;
import fr.wseduc.rs.Post;
import fr.wseduc.rs.Put;
import fr.wseduc.security.ActionType;
import fr.wseduc.security.SecuredAction;
import fr.wseduc.webutils.Either;
import fr.wseduc.webutils.I18n;
import fr.wseduc.webutils.http.BaseController;
import fr.wseduc.webutils.http.Renders;
import fr.wseduc.webutils.request.RequestUtils;
public class CommunityController extends BaseController {
private CommunityService communityService;
private final TimelineHelper timeline;
private static final JsonArray resourcesTypes = new JsonArray().add("read").add("contrib").add("manager");
private EventStore eventStore;
private enum CommunityEvent { ACCESS }
@Override
public void init(Vertx vertx, Container container, RouteMatcher rm,
Map<String, fr.wseduc.webutils.security.SecuredAction> securedActions) {
super.init(vertx, container, rm, securedActions);
eventStore = EventStoreFactory.getFactory().getEventStore(Community.class.getSimpleName());
}
public CommunityController(TimelineHelper helper) {
timeline = helper;
}
@Get("")
@SecuredAction("community.view")
public void view(HttpServerRequest request) {
renderView(request);
// Create event "access to application Community" and store it, for module "statistics"
eventStore.createAndStoreEvent(CommunityEvent.ACCESS.name(), request);
}
@Post("")
@SecuredAction("community.create")
public void create(final HttpServerRequest request) {
RequestUtils.bodyToJson(request, pathPrefix + "create", new Handler<JsonObject>() {
@Override
public void handle(final JsonObject body) {
UserUtils.getUserInfos(eb, request, new Handler<UserInfos>() {
@Override
public void handle(final UserInfos user) {
if (user != null) {
CommunityController.this.create(user, request, body);
} else {
unauthorized(request);
}
}
});
}
});
}
private void create(final UserInfos user, final HttpServerRequest request, final JsonObject body) {
UserUtils.getSession(eb, request, new Handler<JsonObject>() {
@Override
public void handle(JsonObject u) {
JsonObject p = new JsonObject()
.putString("title", body.getString("name"))
.putBoolean("hideInPages", true)
.putArray("pages", new JsonArray())
.putObject("referencedResources", new JsonObject());
JsonObject pages = new JsonObject()
.putString("action", "create")
.putObject("user", u)
.putObject("page", p);
eb.send("communityPages", pages, new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
final JsonObject result = message.body().getObject("result");
if ("ok".equals(message.body().getString("status")) && result != null &&
!result.getString("_id", "").trim().isEmpty()) {
final String pageId = result.getString("_id");
body.putString("pageId", pageId);
communityService.create(body, user, new Handler<Either<String, JsonObject>>() {
@Override
public void handle(final Either<String, JsonObject> r) {
if (r.isRight()) {
sharePage(pageId, user.getUserId(), r.right().getValue(), new Handler<Either<String, JsonObject>>(){
public void handle(Either<String, JsonObject> event) {
if (r.isLeft()){
leftToResponse(request, event.left());
eb.send("communityPages", new JsonObject().putString("action", "delete")
.putString("pageId", pageId));
return;
}
r.right().getValue().putString("pageId", pageId);
renderJson(request, r.right().getValue(), 201);
}
});
createPageMarkups(pageId);
} else {
leftToResponse(request, r.left());
eb.send("communityPages", new JsonObject().putString("action", "delete")
.putString("pageId", pageId));
}
}
});
} else {
renderError(request, message.body());
}
}
});
}
});
}
private void sharePage(String pageId, String userId, final JsonObject value, final Handler<Either<String, JsonObject>> handler) {
final JsonObject share = new JsonObject().putString("action", "share")
.putString("pageId", pageId).putString("userId", userId);
JsonObject r = share
.putString("groupId", value.getString("read"))
.putArray("actions", new JsonArray().add("net-atos-entng-community-controllers-PagesController|get"));
eb.send("communityPages", r, new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
if (!"ok".equals(message.body().getString("status"))) {
final String error_msg = "Error while sharing page (read) : " + message.body().getString("message");
log.error(error_msg);
handler.handle(new Either.Left<String, JsonObject>(error_msg));
return;
}
JsonObject c = share
.putString("groupId", value.getString("contrib"))
.putArray("actions", new JsonArray()
.add("net-atos-entng-community-controllers-PagesController|get")
.add("net-atos-entng-community-controllers-PagesController|update")
);
eb.send("communityPages", c, new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
if (!"ok".equals(message.body().getString("status"))) {
final String error_msg = "Error while sharing page (contrib) : " + message.body().getString("message");
log.error(error_msg);
handler.handle(new Either.Left<String, JsonObject>(error_msg));
return;
}
JsonObject m = share
.putString("groupId", value.getString("manager"))
.putArray("actions", new JsonArray()
.add("net-atos-entng-community-controllers-PagesController|get")
.add("net-atos-entng-community-controllers-PagesController|update")
.add("net-atos-entng-community-controllers-PagesController|delete")
);
eb.send("communityPages", m, new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
if (!"ok".equals(message.body().getString("status"))) {
final String error_msg = "Error while sharing page (manager) : " + message.body().getString("message");
log.error(error_msg);
handler.handle(new Either.Left<String, JsonObject>(error_msg));
return;
}
handler.handle(new Either.Right<String, JsonObject>(message.body()));
}
});
}
});
}
});
}
private void createPageMarkups(String pageId) {
JsonObject updatePage = new JsonObject()
.putString("action", "update")
.putString("pageId", pageId)
.putObject("page", new JsonObject()
.putObject("markups", new JsonObject()
.putArray("view", new JsonArray()
.addObject(new JsonObject()
.putString("label", "community.edit")
.putString("resourceRight", "share")
.putString("href", "/community#/edit/" + pageId))
.addObject(new JsonObject()
.putString("label", "community.back.to")
.putString("resourceRight", "read")
.putString("href", "/community#/list")))));
eb.send("communityPages", updatePage, new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
if (!"ok".equals(message.body().getString("status"))) {
log.error(message.body().getString("message"));
}
}
});
}
@Put("/:id")
@SecuredAction(value = "", type = ActionType.RESOURCE)
public void update(final HttpServerRequest request) {
RequestUtils.bodyToJson(request, pathPrefix + "update", new Handler<JsonObject>() {
@Override
public void handle(JsonObject body) {
if (body.size() > 0) {
String name = body.getString("name");
Handler<Either<String, JsonObject>> handler = (name != null && !name.trim().isEmpty()) ?
updatePageHandler(request, name) : notEmptyResponseHandler(request);
communityService.update(request.params().get("id"), body, handler);
} else {
badRequest(request, "empty.json");
}
}
});
}
private Handler<Either<String, JsonObject>> updatePageHandler(final HttpServerRequest request, final String name) {
return new Handler<Either<String, JsonObject>>() {
@Override
public void handle(final Either<String, JsonObject> r) {
if (r.isRight()) {
String pageId = r.right().getValue().getString("pageId");
r.right().getValue().removeField("pageId");
JsonObject updatePage = new JsonObject()
.putString("action", "update")
.putString("pageId", pageId)
.putObject("page", new JsonObject()
.putString("title", name)
.putBoolean("hideInPages", true)
.putObject("markups", new JsonObject()
.putArray("view", new JsonArray()
.addObject(new JsonObject()
.putString("label", "community.edit")
.putString("resourceRight", "share")
.putString("href", "/community#/edit/" + pageId))
.addObject(new JsonObject()
.putString("label", "community.back.to")
.putString("resourceRight", "read")
.putString("href", "/community#/list")))));
eb.send("communityPages", updatePage, new Handler<Message<JsonObject>>() {
@Override
public void handle(Message<JsonObject> message) {
if (!"ok".equals(message.body().getString("status"))) {
log.error(message.body().getString("message"));
}
renderJson(request, r.right().getValue());
}
});
} else {
leftToResponse(request, r.left());
}
}
};
}
@Delete("/:id")
@SecuredAction(value = "", type = ActionType.RESOURCE)
public void delete(final HttpServerRequest request) {
communityService.delete(request.params().get("id"), new Handler<Either<String, JsonObject>>() {
@Override
public void handle(Either<String, JsonObject> r) {
if (r.isRight()) {
JsonObject deletePage = new JsonObject()
.putString("action", "delete")
.putString("pageId", r.right().getValue().getString("pageId"))
.putBoolean("deleteResources", true);
eb.send("communityPages", deletePage);
ok(request);
} else {
leftToResponse(request, r.left());
}
}
});
}
@Get("/list")
@SecuredAction(value = "", type = ActionType.AUTHENTICATED)
public void list(final HttpServerRequest request) {
UserUtils.getUserInfos(eb, request, new Handler<UserInfos>() {
@Override
public void handle(UserInfos user) {
if (user != null) {
communityService.list(user, arrayResponseHandler(request));
} else {
unauthorized(request);
}
}
});
}
@Get("/:id/details")
@SecuredAction(value = "", type = ActionType.RESOURCE)
public void details(HttpServerRequest request) {
communityService.get(request.params().get("id"), notEmptyResponseHandler(request));
}
@Put("/:id/users")
@SecuredAction(value = "", type = ActionType.RESOURCE)
public void manageUsers(final HttpServerRequest request) {
final String id = request.params().get("id");
RequestUtils.bodyToJson(request, pathPrefix + "manageUsers", new Handler<JsonObject>() {
@Override
public void handle(final JsonObject body) {
if (body.size() > 0) {
communityService.manageUsers(id, body, new Handler<Either<String, JsonObject>>() {
@Override
public void handle(Either<String, JsonObject> event) {
if (event.isRight()) {
UserUtils.getUserInfos(eb, request, new Handler<UserInfos>() {
@Override
public void handle(final UserInfos user) {
communityService.get(id, user, new Handler<Either<String, JsonObject>>() {
public void handle(Either<String, JsonObject> event) {
if (event.isRight()) {
//Populate notification parameters
JsonObject params = new JsonObject()
.putString("resourceName", event.right().getValue().getString("name", ""))
.putString("resourceUri", "/community#/view/" + id)
.putString("uri", "/userbook/annuaire#" + user.getUserId() + "#" + user.getType())
.putString("username", user.getUsername());
//Get user list
ArrayList<String> readList = new ArrayList<>();
ArrayList<String> contribList = new ArrayList<>();
ArrayList<String> managerList = new ArrayList<>();
for (String field : body.getFieldNames()) {
if ("read".equals(field))
readList.addAll(body.getArray(field).toList());
else if ("contrib".equals(field))
contribList.addAll(body.getArray(field).toList());
else if ("manager".equals(field))
managerList.addAll(body.getArray(field).toList());
}
if (readList.size() > 0) {
timeline.notifyTimeline(request, "community.share", user, readList,
request.params().get("id"), params.putString("shareType", "read"));
}
if (contribList.size() > 0) {
params.removeField("shareType");
timeline.notifyTimeline(request, "community.share", user, contribList,
request.params().get("id"), params.putString("shareType", "contrib"));
}
if (managerList.size() > 0) {
params.removeField("shareType");
timeline.notifyTimeline(request, "community.share", user, managerList,
request.params().get("id"), params.putString("shareType", "manager"));
}
}
}
});
}
});
Renders.renderJson(request, event.right().getValue(), 200);
} else {
JsonObject error = new JsonObject()
.putString("error", event.left().getValue());
Renders.renderJson(request, error, 400);
}
}
});
} else {
badRequest(request, "empty.json");
}
}
});
}
@Get("/:id/users")
@SecuredAction(value = "", type = ActionType.RESOURCE)
public void listUsers(final HttpServerRequest request) {
List<String> t = request.params().getAll("type");
JsonArray types = (t != null && !t.isEmpty()) ?
new JsonArray(t.toArray()) : resourcesTypes; //new JsonArray(RightsController.allowedSharingRights.toArray());
communityService.listUsers(request.params().get("id"), types, new Handler<Either<String, JsonObject>>() {
@Override
public void handle(final Either<String, JsonObject> event) {
final Handler<Either<String, JsonObject>> handler = defaultResponseHandler(request);
if (event.isRight()) {
final JsonObject res = event.right().getValue();
listVisible(request, I18n.acceptLanguage(request), new Handler<JsonObject>() {
@Override
public void handle(final JsonObject visibles) {
res.putObject("visibles", visibles);
handler.handle(event);
}
});
} else {
handler.handle(event);
}
}
});
}
@Get("/visibles")
@SecuredAction("community.listVisibles")
public void listVisibles(final HttpServerRequest request) {
listVisible(request, I18n.acceptLanguage(request), new Handler<JsonObject>() {
@Override
public void handle(final JsonObject visibles) {
renderJson(request, visibles);
}
});
}
private void listVisible(final HttpServerRequest request, final String acceptLanguage, final Handler<JsonObject> handler) {
getUserInfos(eb, request, new Handler<UserInfos>() {
@Override
public void handle(final UserInfos user) {
final JsonObject visibles = new JsonObject();
UserUtils.findVisibleUsers(eb, user.getUserId(), false, new Handler<JsonArray>() {
@Override
public void handle(JsonArray users) {
if (users != null) {
visibles.putArray("users", users);
}
UserUtils.findVisibleProfilsGroups(eb, user.getUserId(), new Handler<JsonArray>() {
@Override
public void handle(JsonArray groups) {
if (groups != null) {
for (Object g : groups) {
if (!(g instanceof JsonObject)) continue;
JsonObject group = (JsonObject) g;
UserUtils.groupDisplayName(group, acceptLanguage);
}
visibles.putArray("groups", groups);
}
handler.handle(visibles);
}
});
}
});
}
});
}
public void setCommunityService(CommunityService communityService) {
this.communityService = communityService;
}
@Get("/listallpages")
@ApiDoc("List communities, visible by current user")
@SecuredAction(value = "", type = ActionType.AUTHENTICATED)
public void listAllPages(final HttpServerRequest request) {
UserUtils.getUserInfos(eb, request, new Handler<UserInfos>() {
@Override
public void handle(final UserInfos user) {
if (user != null) {
Handler<Either<String, JsonArray>> handler = arrayResponseHandler(request);
communityService.list(user, handler);
}
}
});
}
}
|
[Fix] #14787 Disabled AntiFlood for Community sharing
|
src/main/java/net/atos/entng/community/controllers/CommunityController.java
|
[Fix] #14787 Disabled AntiFlood for Community sharing
|
|
Java
|
agpl-3.0
|
427d783d24cd32395fc5ae7483de00d983751cd8
| 0
|
DemandCube/Scribengin,DemandCube/Scribengin,DemandCube/Scribengin,DemandCube/Scribengin
|
package com.neverwinterdp.scribengin;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import kafka.javaapi.PartitionMetadata;
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.consumer.SimpleConsumer;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
public class ScribenginAM extends AbstractApplicationMaster {
// hadoop jar target/scribengin-uber-0.0.1-SNAPSHOT.jar com.neverwinterdp.scribengin.Client -am_mem 300 -container_mem 300 --container_cnt 4 --hdfsjar /scribengin-uber-0.0.1-SNAPSHOT.jar --app_name scribe --command "echo" --am_class_name "com.neverwinterdp.scribengin.ScribenginAM" -topic "scribe" -kafka_seed_brokers "10.0.2.15" -kafka_port 9092
private static final Logger LOG = Logger.getLogger(ScribenginAM.class.getName());
@Parameter(names = {"-" + Constants.OPT_KAFKA_SEED_BROKERS, "--" + Constants.OPT_KAFKA_SEED_BROKERS}, variableArity = true)
private List<String> kafkaSeedBrokers;
@Parameter(names = {"-" + Constants.OPT_KAFKA_PORT, "--" + Constants.OPT_KAFKA_PORT})
private int port;
// TODO: This is not ideal.
// topic is repeated in topicList and topicMetadatMap. However, with jcommander automatically parses and store
// parsed cli arguments to member variables, this is the best I can come up with right now
@Parameter(names = {"-" + Constants.OPT_KAFKA_TOPIC, "--" + Constants.OPT_KAFKA_TOPIC}, variableArity = true)
private List<String> topicList;
// {topic(String) : { partition(integer) : PartitionMetaData }}
private Map<String, Map<Integer, PartitionMetadata> > topicMetadataMap;
public ScribenginAM() {
super();
topicMetadataMap = new HashMap<String, Map<Integer, PartitionMetadata>>();
}
public void init(String[] args) {
super.init(args);
LOG.info("calling init");
for (String topic : topicList) {
getMetaData(kafkaSeedBrokers, port, topic);
}
}
@Override
protected List<String> buildCommandList(int startingFrom, int containerCnt, String commandTemplate) {
// TODO: construnct the list of actual commands for containers to execute.
// A container should be able to read from more than one partition.
List<String> r = new ArrayList<String>();
for ( Map.Entry<String, Map<Integer, PartitionMetadata> > entry : topicMetadataMap.entrySet() ) {
String t = entry.getKey();
LOG.info("topic : " + t);
for ( Map.Entry<Integer, PartitionMetadata> innerEntry: entry.getValue().entrySet()) {
Integer partition = innerEntry.getKey();
PartitionMetadata meta = innerEntry.getValue();
LOG.info("\tpartition: " + partition);
LOG.info("\t\t leader: " + meta.leader());
}
}
return r;
}
//TODO: pass the following to each container
// 1) partition number
// 2) topic name
// 3) replica host
//
private void getMetaData(List<String> seedBrokerList, int port, String topic) {
LOG.info("inside getMetaData"); //xxx
LOG.info("seedBrokerList size: " + seedBrokerList); //xxx
for (String seed: seedBrokerList) {
LOG.info("making a simple consumer"); //xxx
SimpleConsumer consumer = new SimpleConsumer(
seed,
port,
10000, // timeout
64*1024, // bufgerSize
"metaLookup" // clientId
);
List <String> topicList = Collections.singletonList(topic);
TopicMetadataRequest req = new TopicMetadataRequest(topicList);
kafka.javaapi.TopicMetadataResponse resp = consumer.send(req);
List<TopicMetadata> metaDataList = resp.topicsMetadata();
LOG.info("metaDataList: " + metaDataList); //xxxx
for (TopicMetadata m: metaDataList) {
LOG.info("inside the metadatalist loop"); //xxx
LOG.info("m partitionsMetadata: " + m.partitionsMetadata()); //xxx
for (PartitionMetadata part : m.partitionsMetadata()) {
LOG.info("inside the partitionmetadata loop"); //xxx
storeMetadata(topic, part);
}
}
}
}
private void storeMetadata(String topic, PartitionMetadata p) {
Integer id = new Integer(p.partitionId());
Map<Integer, PartitionMetadata> m;
if (topicMetadataMap.containsKey(id)) {
LOG.info("already crreated a partitionMap. Just retrieve it."); //xxx
m = topicMetadataMap.get(topic);
} else {
LOG.info("making a new partitionMap"); //xxx
m = new HashMap<Integer, PartitionMetadata>();
topicMetadataMap.put(topic, m);
}
m.put(id, p);
}
public static void main(String[] args) {
AbstractApplicationMaster am = new ScribenginAM();
new JCommander(am, args);
am.init(args);
LOG.info("calling main");
try {
am.run();
} catch (Exception e) {
System.out.println("am.run throws: " + e);
e.printStackTrace();
System.exit(0);
}
}
}
|
src/main/java/com/neverwinterdp/scribengin/ScribenginAM.java
|
package com.neverwinterdp.scribengin;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import kafka.javaapi.PartitionMetadata;
import kafka.javaapi.TopicMetadata;
import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.consumer.SimpleConsumer;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
public class ScribenginAM extends AbstractApplicationMaster {
//@Parameter(names = {"-" + Constants.OPT_KAFKA_TOPIC, "--" + Constants.OPT_KAFKA_TOPIC})
//private String topic;
@Parameter(names = {"-" + Constants.OPT_KAFKA_SEED_BROKERS, "--" + Constants.OPT_KAFKA_SEED_BROKERS}, variableArity = true)
private List<String> kafkaSeedBrokers;
@Parameter(names = {"-" + Constants.OPT_KAFKA_PORT, "--" + Constants.OPT_KAFKA_PORT})
private int port;
// TODO: This is not ideal.
// topic is repeated in topicList and topicMetadatMap. However, with jcommander automatically parses and store
// parsed cli arguments to member variables, this is the best I can come up with right now
@Parameter(names = {"-" + Constants.OPT_KAFKA_TOPIC, "--" + Constants.OPT_KAFKA_TOPIC}, variableArity = true)
private List<String> topicList;
// {topic(String) : { partition(integer) : PartitionMetaData }}
private Map<String, Map<Integer, PartitionMetadata> > topicMetadataMap;
public ScribenginAM() {
super();
topicMetadataMap = new HashMap<String, Map<Integer, PartitionMetadata>>();
}
public void init(String[] args) {
super.init(args);
LOG.info("calling init");
for (String topic : topicList) {
getMetaData(kafkaSeedBrokers, port, topic);
}
}
@Override
protected List<String> buildCommandList(int startingFrom, int containerCnt, String commandTemplate) {
// TODO: construnct the list of actual commands for containers to execute.
// A container should be able to read from more than one partition.
List<String> r = new ArrayList<String>();
for ( Map.Entry<String, Map<Integer, PartitionMetadata> > entry : topicMetadataMap.entrySet() ) {
String t = entry.getKey();
LOG.info("topic : " + t);
for ( Map.Entry<Integer, PartitionMetadata> innerEntry: entry.getValue().entrySet()) {
Integer partition = innerEntry.getKey();
PartitionMetadata meta = innerEntry.getValue();
LOG.info("\tpartition: " + partition);
LOG.info("\t\t leader: " + meta.leader());
}
}
return r;
}
//TODO: pass the following to each container
// 1) partition number
// 2) topic name
// 3) replica host
//
private void getMetaData(List<String> seedBrokerList, int port, String topic) {
LOG.info("inside getMetaData"); //xxx
LOG.info("seedBrokerList size: " + seedBrokerList); //xxx
for (String seed: seedBrokerList) {
LOG.info("making a simple consumer"); //xxx
SimpleConsumer consumer = new SimpleConsumer(
seed,
port,
10000, // timeout
64*1024, // bufgerSize
"metaLookup" // clientId
);
List <String> topicList = Collections.singletonList(topic);
TopicMetadataRequest req = new TopicMetadataRequest(topicList);
kafka.javaapi.TopicMetadataResponse resp = consumer.send(req);
List<TopicMetadata> metaDataList = resp.topicsMetadata();
LOG.info("metaDataList: " + metaDataList); //xxxx
for (TopicMetadata m: metaDataList) {
LOG.info("inside the metadatalist loop"); //xxx
LOG.info("m partitionsMetadata: " + m.partitionsMetadata()); //xxx
// QUESTION: If there's no established leader yet, ie. no data in kafka, it will return an empty list.
for (PartitionMetadata part : m.partitionsMetadata()) {
LOG.info("inside the partitionmetadata loop"); //xxx
storeMetadata(topic, part);
}
}
}
}
private void storeMetadata(String topic, PartitionMetadata p) {
Integer id = new Integer(p.partitionId());
Map<Integer, PartitionMetadata> m;
if (topicMetadataMap.containsKey(id)) {
LOG.info("already crreated a partitionMap. Just retrieve it."); //xxx
m = topicMetadataMap.get(topic);
} else {
LOG.info("making a new partitionMap"); //xxx
m = new HashMap<Integer, PartitionMetadata>();
topicMetadataMap.put(topic, m);
}
m.put(id, p);
}
public static void main(String[] args) {
AbstractApplicationMaster am = new ScribenginAM();
new JCommander(am, args);
am.init(args);
LOG.info("calling main");
try {
am.run();
} catch (Exception e) {
System.out.println("am.run throws: " + e);
e.printStackTrace();
System.exit(0);
}
}
}
|
ScribenginAM creates its own logger.
|
src/main/java/com/neverwinterdp/scribengin/ScribenginAM.java
|
ScribenginAM creates its own logger.
|
|
Java
|
agpl-3.0
|
b654036a43bf783e40d6893472d1602b964d8ec7
| 0
|
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
|
b741d41c-2e60-11e5-9284-b827eb9e62be
|
hello.java
|
b73c65f4-2e60-11e5-9284-b827eb9e62be
|
b741d41c-2e60-11e5-9284-b827eb9e62be
|
hello.java
|
b741d41c-2e60-11e5-9284-b827eb9e62be
|
|
Java
|
agpl-3.0
|
b713d826790535b00dd0c21dbe671a7948eab629
| 0
|
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
|
ca1c1566-2e60-11e5-9284-b827eb9e62be
|
hello.java
|
ca16a2b6-2e60-11e5-9284-b827eb9e62be
|
ca1c1566-2e60-11e5-9284-b827eb9e62be
|
hello.java
|
ca1c1566-2e60-11e5-9284-b827eb9e62be
|
|
Java
|
lgpl-2.1
|
4bfca6c8fbfc2bac80f543bb770a8f362cf7ca2d
| 0
|
blue-systems-group/project.maybe.polyglot,liujed/polyglot-eclipse,liujed/polyglot-eclipse,blue-systems-group/project.maybe.polyglot,xcv58/polyglot,liujed/polyglot-eclipse,xcv58/polyglot,liujed/polyglot-eclipse,blue-systems-group/project.maybe.polyglot,xcv58/polyglot,xcv58/polyglot,blue-systems-group/project.maybe.polyglot
|
package parser;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Set;
import java_cup.lalr_item;
import java_cup.lalr_state;
import java_cup.non_terminal;
import java_cup.production;
import java_cup.symbol;
import java_cup.symbol_part;
import java_cup.terminal;
public class UnifiedExample {
protected static final int PRODUCTION_COST = 50;
protected static final int REDUCE_COST = 1;
protected static final int SHIFT_COST = 1;
protected static final int UNSHIFT_COST = 1;
protected static final int DUPLICATE_PRODUCTION_COST = 0;
protected static final long ASSURANCE_LIMIT = 2 * 1000000000L;
protected static final long TIME_LIMIT = 5 * 1000000000L;
protected lalr_state conflict;
protected lalr_item itm1;
protected lalr_item itm2;
protected terminal nextSym;
protected boolean isShiftReduce;
protected List<StateItem> shortestConflictPath;
protected Set<lalr_state> scpSet;
protected Set<lalr_state> rppSet;
public UnifiedExample(lalr_state conflict, lalr_item itm1, lalr_item itm2,
terminal nextSym) {
this.conflict = conflict;
this.nextSym = nextSym;
if (itm1.dot_at_end()) {
if (itm2.dot_at_end()) {
// reduce/reduce conflict
this.itm1 = itm1;
this.itm2 = itm2;
isShiftReduce = false;
}
else {
// shift/reduce conflict
this.itm1 = itm1;
this.itm2 = itm2;
isShiftReduce = true;
}
}
else if (itm2.dot_at_end()) {
// shift/reduce conflict
this.itm1 = itm2;
this.itm2 = itm1;
isShiftReduce = true;
}
else throw new Error("Expected at least one reduce item.");
shortestConflictPath = findShortestPathFromStart();
scpSet = new HashSet<>(shortestConflictPath.size());
production reduceProd = this.itm1.the_production();
rppSet = new HashSet<>(reduceProd.rhs_length());
boolean reduceProdReached = false;
for (StateItem si : shortestConflictPath) {
scpSet.add(si.state);
reduceProdReached =
reduceProdReached || si.item.the_production() == reduceProd;
if (reduceProdReached) rppSet.add(si.state);
}
}
protected List<StateItem> findShortestPathFromStart() {
StateItem.init();
long start = System.nanoTime();
lalr_state startState = lalr_state.startState();
lalr_item startItem = lalr_state.startItem();
StateItem source = StateItem.lookup(startState, startItem);
StateItem target = StateItem.lookup(conflict, itm1);
Queue<List<StateItemWithLookahead>> queue = new LinkedList<>();
Set<StateItemWithLookahead> visited = new HashSet<>();
{
List<StateItemWithLookahead> init = new LinkedList<>();
init.add(new StateItemWithLookahead(source,
StateItem.symbolSet(source.item.lookahead())));
queue.add(init);
}
while (!queue.isEmpty()) {
List<StateItemWithLookahead> path = queue.remove();
StateItemWithLookahead last = path.get(path.size() - 1);
if (visited.contains(last)) continue;
visited.add(last);
if (target.equals(last.si) && last.lookahead.contains(nextSym)) {
// Done
// System.err.println(path);
System.err.println("reachable: " + (System.nanoTime() - start));
List<StateItem> shortestConflictPath =
new ArrayList<>(path.size());
for (StateItemWithLookahead sil : path)
shortestConflictPath.add(sil.si);
return shortestConflictPath;
}
if (StateItem.trans.containsKey(last.si)) {
for (Map.Entry<symbol, StateItem> trans : StateItem.trans.get(last.si)
.entrySet()) {
StateItem nextSI = trans.getValue();
StateItemWithLookahead next =
new StateItemWithLookahead(nextSI, last.lookahead);
List<StateItemWithLookahead> nextPath =
new LinkedList<>(path);
nextPath.add(next);
queue.add(nextPath);
}
}
if (StateItem.prods.containsKey(last.si)) {
production prod = last.si.item.the_production();
int len = prod.rhs_length();
int pos = last.si.item.dot_pos() + 1;
Set<symbol> lookahead;
if (pos == len)
lookahead = last.lookahead;
else {
symbol sym = rhs(prod, pos);
if (sym instanceof terminal)
lookahead = symbolSet(sym);
else lookahead =
StateItem.symbolSet(((non_terminal) sym).first_set());
}
for (lalr_item itm : StateItem.prods.get(last.si)) {
StateItem nextSI = StateItem.lookup(last.si.state, itm);
StateItemWithLookahead next =
new StateItemWithLookahead(nextSI, lookahead);
List<StateItemWithLookahead> nextPath =
new LinkedList<>(path);
nextPath.add(next);
queue.add(nextPath);
}
}
}
throw new Error("Cannot find shortest path to conflict state.");
}
protected class StateItemWithLookahead {
protected StateItem si;
protected Set<symbol> lookahead;
protected StateItemWithLookahead(StateItem si, Set<symbol> lookahead) {
this.si = si;
this.lookahead = lookahead;
}
@Override
public String toString() {
return si.toString() + lookahead.toString();
}
@Override
public int hashCode() {
return si.hashCode() * 31 + lookahead.hashCode();
}
protected boolean equals(StateItemWithLookahead sil) {
return si.equals(sil.si) && lookahead.equals(sil.lookahead);
}
@Override
public boolean equals(Object o) {
if (!(o instanceof StateItemWithLookahead)) return false;
return equals((StateItemWithLookahead) o);
}
}
public Counterexample find() {
StateItem.init();
return findExample();
}
protected Counterexample findExample() {
SearchState initial =
new SearchState(StateItem.lookup(conflict, itm1),
StateItem.lookup(conflict, itm2));
long start;
Map<Integer, FixedComplexitySearchState> fcssMap = new HashMap<>();
PriorityQueue<FixedComplexitySearchState> pq = new PriorityQueue<>();
Map<List<StateItem>, Set<List<StateItem>>> visited = new HashMap<>();
add(pq, fcssMap, visited, initial);
start = System.nanoTime();
boolean assurancePrinted = false;
SearchState stage3result = null;
while (!pq.isEmpty()) {
FixedComplexitySearchState fcss = pq.remove();
for (SearchState ss : fcss.sss) {
int stage = 2;
StateItem si1src = ss.states1.get(0);
StateItem si2src = ss.states2.get(0);
visited(visited, ss);
if (ss.reduceDepth < 0 && ss.shiftDepth < 0) {
// Stage 3
if (!scpSet.contains(si1src.state)
|| !scpSet.contains(si2src.state)) {
// The current head state is not on the shortest path.
// Ignore this search state.
continue;
}
stage = 3;
if (si1src.item.the_production().lhs().the_symbol() == si2src.item.the_production()
.lhs()
.the_symbol()
&& hasCommonPrefix(si1src.item, si2src.item)) {
if (ss.derivs1.size() == 1
&& ss.derivs2.size() == 1
&& ss.derivs1.get(0).sym == ss.derivs2.get(0).sym) {
System.err.println(ss.complexity);
return new Counterexample(ss.derivs1.get(0),
ss.derivs2.get(0),
true);
}
if (stage3result == null) stage3result = ss;
stage = 4;
}
}
if (!assurancePrinted
&& System.nanoTime() - start > ASSURANCE_LIMIT
&& stage3result != null) {
System.err.println("Productions leading up to the conflict stage found. Still finding a possible unified example...");
assurancePrinted = true;
}
if (System.nanoTime() - start > TIME_LIMIT
&& stage3result != null) {
System.err.println("time limit exceeded: "
+ (System.nanoTime() - start));
System.err.println(stage3result.complexity);
return completeDivergingExamples(stage3result);
}
StateItem si1 = ss.states1.get(ss.states1.size() - 1);
StateItem si2 = ss.states2.get(ss.states2.size() - 1);
boolean si1reduce = si1.item.dot_at_end();
boolean si2reduce = si2.item.dot_at_end();
symbol si1sym = si1reduce ? null : si1.item.symbol_after_dot();
symbol si2sym = si2reduce ? null : si2.item.symbol_after_dot();
if (!si1reduce && !si2reduce) {
// Both paths are not reduce items.
// Two actions are possible:
// - Make a transition on the next symbol of the items,
// if they are the same.
// - Take a production step, avoiding duplicates as necessary.
if (si1sym == si2sym) {
StateItem si1last =
StateItem.trans.get(si1).get(si1sym);
StateItem si2last =
StateItem.trans.get(si2).get(si2sym);
List<Derivation> derivs1 = new LinkedList<>();
List<Derivation> derivs2 = new LinkedList<>();
List<StateItem> states1 = new LinkedList<>();
List<StateItem> states2 = new LinkedList<>();
derivs1.add(new Derivation(si1sym));
states1.add(si1last);
derivs2.add(new Derivation(si2sym));
states2.add(si2last);
// Check for subsequent nullable nonterminals
production prod1 = si1.item.the_production();
for (int pos = si1.item.dot_pos() + 1, len1 =
prod1.rhs_length(); pos < len1; pos++) {
symbol sp = rhs(prod1, pos);
if (!sp.is_non_term()) break;
non_terminal nt = (non_terminal) sp;
if (!nt.nullable()) break;
si1last = StateItem.trans.get(si1last).get(nt);
derivs1.add(new Derivation(nt,
Collections.<Derivation> emptyList()));
states1.add(si1last);
}
production prod2 = si2.item.the_production();
for (int pos = si1.item.dot_pos() + 1, len =
prod2.rhs_length(); pos < len; pos++) {
symbol sp = rhs(prod2, pos);
if (!sp.is_non_term()) break;
non_terminal nt = (non_terminal) sp;
if (!nt.nullable()) break;
si2last = StateItem.trans.get(si2last).get(nt);
derivs2.add(new Derivation(nt,
Collections.<Derivation> emptyList()));
states2.add(si2last);
}
for (int i = 1, size1 = derivs1.size(); i <= size1; i++) {
List<Derivation> subderivs1 =
new ArrayList<>(derivs1.subList(0, i));
List<StateItem> substates1 =
new ArrayList<>(states1.subList(0, i));
for (int j = 1, size2 = derivs2.size(); j <= size2; j++) {
List<Derivation> subderivs2 =
new ArrayList<>(derivs2.subList(0, j));
List<StateItem> substates2 =
new ArrayList<>(states2.subList(0, j));
SearchState copy = ss.copy();
copy.derivs1.addAll(subderivs1);
copy.states1.addAll(substates1);
copy.derivs2.addAll(subderivs2);
copy.states2.addAll(substates2);
copy.complexity += 2 * SHIFT_COST;
add(pq, fcssMap, visited, copy);
}
}
}
if (si1sym instanceof non_terminal
&& StateItem.prods.containsKey(si1))
for (lalr_item itm1 : StateItem.prods.get(si1)) {
// Take production step only if lhs is not nullable and
// if first rhs symbol is compatible with the other path
boolean applicable =
!itm1.dot_at_end()
&& compatible(itm1.symbol_after_dot(),
si2sym);
if (!applicable) continue;
production prod = si1.item.the_production();
production nextProd = itm1.the_production();
if (!StateItem.productionAllowed(prod, nextProd))
continue;
SearchState copy = ss.copy();
StateItem next = StateItem.lookup(si1.state, itm1);
// TODO
if (copy.states1.contains(next))
copy.complexity += DUPLICATE_PRODUCTION_COST;
copy.states1.add(next);
copy.complexity += PRODUCTION_COST;
add(pq, fcssMap, visited, copy);
}
if (si2sym instanceof non_terminal
&& StateItem.prods.containsKey(si2))
for (lalr_item itm2 : StateItem.prods.get(si2)) {
// Take production step only if lhs is not nullable and
// if first rhs symbol is compatible with the other path
boolean applicable =
!itm2.dot_at_end()
&& compatible(itm2.symbol_after_dot(),
si1sym);
if (!applicable) continue;
production prod = si2.item.the_production();
production nextProd = itm2.the_production();
if (!StateItem.productionAllowed(prod, nextProd))
continue;
SearchState copy = ss.copy();
StateItem next = StateItem.lookup(si2.state, itm2);
// TODO
if (copy.states2.contains(next))
copy.complexity += DUPLICATE_PRODUCTION_COST;
copy.states2.add(next);
if (ss.shiftDepth >= 0) copy.shiftDepth++;
copy.complexity += PRODUCTION_COST;
add(pq, fcssMap, visited, copy);
}
}
else {
// One of the paths requires a reduction.
production prod1 = si1.item.the_production();
int len1 = prod1.rhs_length();
production prod2 = si2.item.the_production();
int len2 = prod2.rhs_length();
int size1 = ss.states1.size();
int size2 = ss.states2.size();
boolean ready1 = si1reduce && size1 > len1;
boolean ready2 = si2reduce && size2 > len2;
// If there is a path ready for reduction
// without being prepended further, reduce.
if (ready1) {
List<SearchState> reduced1 = ss.reduce1(si2sym);
if (ready2) {
reduced1.add(ss);
for (SearchState red1 : reduced1) {
for (SearchState candidate : red1.reduce2(si1sym))
add(pq, fcssMap, visited, candidate);
if (si1reduce && red1 != ss)
add(pq, fcssMap, visited, red1);
}
}
else {
for (SearchState candidate : reduced1)
add(pq, fcssMap, visited, candidate);
}
}
else if (ready2) {
List<SearchState> reduced2 = ss.reduce2(si1sym);
for (SearchState candidate : reduced2)
add(pq, fcssMap, visited, candidate);
}
// Otherwise, prepend both paths and continue.
else {
symbol sym;
if (si1reduce && !ready1)
sym = rhs(prod1, len1 - size1);
else sym = rhs(prod2, len2 - size2);
for (SearchState prepended : ss.prepend(sym,
null,
null,
ss.reduceDepth >= 0
? rppSet
: scpSet
/*: ss.shiftDepth < 0
? scpSet
: null*/)) {
add(pq, fcssMap, visited, prepended);
}
}
}
}
fcssMap.remove(fcss.complexity);
}
// No unifying examples. Construct examples from common shortest path
// to conflict state.
return exampleFromShortestPath();
}
protected void add(PriorityQueue<FixedComplexitySearchState> pq,
Map<Integer, FixedComplexitySearchState> fcssMap,
Map<List<StateItem>, Set<List<StateItem>>> visited, SearchState ss) {
Set<List<StateItem>> visited1 = visited.get(ss.states1);
if (visited1 != null && visited1.contains(ss.states2)) return;
FixedComplexitySearchState fcss = fcssMap.get(ss.complexity);
if (fcss == null) {
fcss = new FixedComplexitySearchState(ss.complexity);
fcssMap.put(ss.complexity, fcss);
pq.add(fcss);
}
fcss.add(ss);
}
protected void visited(Map<List<StateItem>, Set<List<StateItem>>> visited,
SearchState ss) {
Set<List<StateItem>> visited1 = visited.get(ss.states1);
if (visited1 == null) {
visited1 = new HashSet<>();
visited.put(ss.states1, visited1);
}
visited1.add(ss.states2);
}
protected Counterexample exampleFromShortestPath() {
StateItem si = StateItem.lookup(conflict, itm2);
List<StateItem> result = new LinkedList<>();
result.add(si);
ListIterator<StateItem> itr =
shortestConflictPath.listIterator(shortestConflictPath.size());
// refsi is the last StateItem in this state of the shortest path.
StateItem refsi = itr.previous();
for (; refsi != null;) {
// Construct a list of items in the same state as refsi.
// prevrefsi is the last StateItem in the previous state.
List<StateItem> refsis = new LinkedList<>();
refsis.add(refsi);
StateItem prevrefsi = itr.hasPrevious() ? itr.previous() : null;
if (prevrefsi != null) {
for (int curPos = refsi.item.dot_pos(), prevPos =
prevrefsi.item.dot_pos(); prevrefsi != null
&& prevPos + 1 != curPos;) {
refsis.add(0, prevrefsi);
curPos = prevPos;
if (itr.hasPrevious()) {
prevrefsi = itr.previous();
prevPos = prevrefsi.item.dot_pos();
}
else prevrefsi = null;
}
}
if (si == refsi || si.item == lalr_state.startItem()) {
// Reached common item; prepend to the beginning.
refsis.remove(refsis.size() - 1);
result.addAll(0, refsis);
if (prevrefsi != null) result.add(0, prevrefsi);
while (itr.hasPrevious())
result.add(0, itr.previous());
Derivation deriv1 =
completeDivergingExample(shortestConflictPath);
Derivation deriv2 = completeDivergingExample(result);
return new Counterexample(deriv1, deriv2, false);
}
int pos = si.item.dot_pos();
if (pos == 0) {
// For a production item, find a sequence of items within the
// same state that leads to this production.
List<StateItem> init = new LinkedList<>();
init.add(si);
Queue<List<StateItem>> queue = new LinkedList<>();
queue.add(init);
while (!queue.isEmpty()) {
List<StateItem> sis = queue.remove();
StateItem sisrc = sis.get(0);
if (sisrc.item == lalr_state.startItem()) {
sis.remove(sis.size() - 1);
result.addAll(0, sis);
si = sisrc;
break;
}
int srcpos = sisrc.item.dot_pos();
if (srcpos > 0) {
// Determine if reverse transition is possible.
production prod = sisrc.item.the_production();
symbol sym = rhs(prod, srcpos - 1);
for (StateItem prevsi : StateItem.revTrans.get(sisrc)
.get(sym)) {
// Only look for state compatible with the shortest path.
if (prevsi.state != prevrefsi.state) continue;
sis.remove(sis.size() - 1);
result.addAll(0, sis);
result.add(0, prevsi);
si = prevsi;
refsi = prevrefsi;
queue.clear();
break;
}
}
else {
production prod = sisrc.item.the_production();
symbol lhs = prod.lhs().the_symbol();
for (lalr_item prev : StateItem.revProds.get(sisrc.state)
.get(lhs)) {
StateItem prevsi =
StateItem.lookup(sisrc.state, prev);
if (sis.contains(prevsi)) continue;
List<StateItem> prevsis = new LinkedList<>(sis);
prevsis.add(0, prevsi);
queue.add(prevsis);
}
}
}
}
else {
// If not a production item, make a reverse transition.
production prod = si.item.the_production();
symbol sym = rhs(prod, pos - 1);
for (StateItem prevsi : StateItem.revTrans.get(si).get(sym)) {
// Only look for state compatible with the shortest path.
if (prevsi.state != prevrefsi.state) continue;
result.add(0, prevsi);
si = prevsi;
refsi = prevrefsi;
break;
}
}
}
throw new Error("Cannot find derivation to conflict state.");
}
protected Counterexample completeDivergingExamples(SearchState ss) {
Derivation deriv1 = completeDivergingExample(ss.states1, ss.derivs1);
Derivation deriv2 = completeDivergingExample(ss.states2, ss.derivs2);
return new Counterexample(deriv1, deriv2, false);
}
protected Derivation completeDivergingExample(List<StateItem> states) {
return completeDivergingExample(states,
Collections.<Derivation> emptyList());
}
protected Derivation completeDivergingExample(List<StateItem> states,
List<Derivation> derivs) {
List<Derivation> result = new LinkedList<>();
ListIterator<Derivation> dItr = derivs.listIterator(derivs.size());
for (ListIterator<StateItem> sItr = states.listIterator(states.size()); sItr.hasPrevious();) {
StateItem si = sItr.previous();
int pos = si.item.dot_pos();
production prod = si.item.the_production();
int len = prod.rhs_length();
// symbols after dot
if (result.isEmpty()) {
if (derivs.isEmpty()) result.add(Derivation.dot);
if (!si.item.dot_at_end())
result.add(new Derivation(rhs(prod, pos)));
}
for (int i = pos + 1; i < len; i++) {
result.add(new Derivation(rhs(prod, i)));
}
// symbols before dot
for (int i = pos - 1; i >= 0; i--) {
if (sItr.hasPrevious()) sItr.previous();
result.add(0, dItr.hasPrevious()
? dItr.previous() : new Derivation(rhs(prod, i)));
}
// completing the derivation
symbol lhs = prod.lhs().the_symbol();
Derivation deriv = new Derivation(lhs, result);
result = new LinkedList<>();
result.add(deriv);
}
return result.get(0);
}
protected static class FixedComplexitySearchState implements
Comparable<FixedComplexitySearchState> {
protected int complexity;
protected Set<SearchState> sss;
protected FixedComplexitySearchState(int complexity) {
this.complexity = complexity;
sss = new HashSet<>();
}
protected void add(SearchState ss) {
sss.add(ss);
}
@Override
public int compareTo(FixedComplexitySearchState o) {
return complexity - o.complexity;
}
}
/**
* A search state consists of
* - a pair of the following items:
* - a list of derivations, simulating the parser's symbol stack
* - a list of StateItems, simulating the parser's state stack but
* including explicit production steps
* Each item in the pair corresponds to a possible parse that involves
* each of the conflict LALR items.
* - a complexity of the partial parse trees, determined by the number of
* states and production steps the parse has to encounter
* - a shift depth, indicating the number of unreduced production steps
* that has been made from the original shift item. This helps keep
* track of when the shift conflict item is reduced.
* @author Chinawat
*
*/
protected class SearchState implements Comparable<SearchState> {
protected List<Derivation> derivs1, derivs2;
protected List<StateItem> states1, states2;
protected int complexity, reduceDepth, shiftDepth;
protected SearchState(StateItem si1, StateItem si2) {
derivs1 = new LinkedList<>();
derivs2 = new LinkedList<>();
states1 = new LinkedList<>();
states2 = new LinkedList<>();
states1.add(si1);
states2.add(si2);
complexity = 0;
reduceDepth = 0;
shiftDepth = 0;
}
private SearchState(List<Derivation> derivs1, List<Derivation> derivs2,
List<StateItem> states1, List<StateItem> states2,
int complexity, int reduceDepth, int shiftDepth) {
this.derivs1 = new LinkedList<>(derivs1);
this.derivs2 = new LinkedList<>(derivs2);
this.states1 = new LinkedList<>(states1);
this.states2 = new LinkedList<>(states2);
this.complexity = complexity;
this.reduceDepth = reduceDepth;
this.shiftDepth = shiftDepth;
}
protected SearchState copy() {
return new SearchState(derivs1,
derivs2,
states1,
states2,
complexity,
reduceDepth,
shiftDepth);
}
protected List<SearchState> prepend(symbol sym, symbol nextSym1,
symbol nextSym2) {
return prepend(sym, nextSym1, nextSym2, null);
}
protected List<SearchState> prepend(symbol sym, symbol nextSym1,
symbol nextSym2, Set<lalr_state> guide) {
List<SearchState> result = new LinkedList<>();
SearchState ss = this;
StateItem si1src = ss.states1.get(0);
StateItem si2src = ss.states2.get(0);
Set<symbol> si1lookahead =
nextSym1 == null
? StateItem.symbolSet(si1src.item.lookahead())
: symbolSet(nextSym1);
Set<symbol> si2lookahead =
nextSym2 == null
? StateItem.symbolSet(si2src.item.lookahead())
: symbolSet(nextSym2);
List<List<StateItem>> prev1 =
si1src.reverseTransition(sym, si1lookahead, guide);
List<List<StateItem>> prev2 =
si2src.reverseTransition(sym, si2lookahead, guide);
for (List<StateItem> psis1 : prev1) {
StateItem psi1 = psis1.isEmpty() ? si1src : psis1.get(0);
for (List<StateItem> psis2 : prev2) {
StateItem psi2 = psis2.isEmpty() ? si2src : psis2.get(0);
if (psi1 == si1src && psi2 == si2src) continue;
if (psi1.state != psi2.state) continue;
SearchState copy = ss.copy();
copy.states1.addAll(0, psis1);
copy.states2.addAll(0, psis2);
if (!psis1.isEmpty()
&& copy.states1.get(0).item.dot_pos() + 1 == copy.states1.get(1).item.dot_pos()) {
if (!psis2.isEmpty()
&& copy.states2.get(0).item.dot_pos() + 1 == copy.states2.get(1).item.dot_pos()) {
Derivation deriv = new Derivation(sym);
copy.derivs1.add(0, deriv);
copy.derivs2.add(0, deriv);
}
else continue;
}
else if (!psis2.isEmpty()
&& copy.states2.get(0).item.dot_pos() + 1 == copy.states2.get(1).item.dot_pos()) {
continue;
}
int prependSize = psis1.size() + psis2.size();
int productionSteps =
productionSteps(psis1, si1src)
+ productionSteps(psis2, si2src);
copy.complexity +=
UNSHIFT_COST * (prependSize - productionSteps)
+ PRODUCTION_COST * productionSteps;
result.add(copy);
}
}
return result;
}
protected List<SearchState> reduce1(symbol nextSym) {
List<StateItem> states = states1;
List<Derivation> derivs = derivs1;
int sSize = states.size();
lalr_item item = states.get(sSize - 1).item;
if (!item.dot_at_end())
throw new Error("Cannot reduce item without dot at end.");
List<SearchState> result = new LinkedList<>();
Set<symbol> symbolSet =
nextSym == null
? StateItem.symbolSet(item.lookahead())
: symbolSet(nextSym);
if (!StateItem.intersect(item.lookahead(), symbolSet))
return result;
production prod = item.the_production();
symbol lhs = prod.lhs().the_symbol();
int len = prod.rhs_length();
int dSize = derivs.size();
Derivation deriv =
new Derivation(lhs, new LinkedList<>(derivs.subList(dSize
- len, dSize)));
if (reduceDepth == 0) {
deriv.deriv.add(itm1.dot_pos(), Derivation.dot);
reduceDepth--;
}
derivs = new LinkedList<>(derivs.subList(0, dSize - len));
derivs.add(deriv);
if (sSize == len + 1) {
// The head StateItem is a production item, so we need to prepend
// with possible source StateItems.
List<List<StateItem>> prev =
states.get(0).reverseProduction(symbolSet);
for (List<StateItem> psis : prev) {
SearchState copy = copy();
copy.derivs1 = derivs;
copy.states1 =
new LinkedList<>(states1.subList(0, sSize - len - 1));
copy.states1.addAll(0, psis);
copy.states1.add(StateItem.trans.get(copy.states1.get(copy.states1.size() - 1))
.get(lhs));
int statesSize = copy.states1.size();
int productionSteps =
productionSteps(copy.states1, states.get(0));
copy.complexity +=
UNSHIFT_COST * (statesSize - productionSteps)
+ PRODUCTION_COST * productionSteps;
result.add(copy);
}
}
else {
SearchState copy = copy();
copy.derivs1 = derivs;
copy.states1 =
new LinkedList<>(states1.subList(0, sSize - len - 1));
copy.states1.add(StateItem.trans.get(copy.states1.get(copy.states1.size() - 1))
.get(lhs));
copy.complexity += REDUCE_COST;
result.add(copy);
}
return result;
}
protected List<SearchState> reduce2(symbol nextSym) {
List<StateItem> states = states2;
List<Derivation> derivs = derivs2;
int sSize = states.size();
lalr_item item = states.get(sSize - 1).item;
if (!item.dot_at_end())
throw new Error("Cannot reduce item without dot at end.");
List<SearchState> result = new LinkedList<>();
Set<symbol> symbolSet =
nextSym == null
? StateItem.symbolSet(item.lookahead())
: symbolSet(nextSym);
if (!StateItem.intersect(item.lookahead(), symbolSet))
return result;
production prod = item.the_production();
symbol lhs = prod.lhs().the_symbol();
int len = prod.rhs_length();
int dSize = derivs.size();
Derivation deriv =
new Derivation(lhs, new LinkedList<>(derivs.subList(dSize
- len, dSize)));
if (shiftDepth == 0)
deriv.deriv.add(itm2.dot_pos(), Derivation.dot);
derivs = new LinkedList<>(derivs.subList(0, dSize - len));
derivs.add(deriv);
if (sSize == len + 1) {
// The head StateItem is a production item, so we need to prepend
// with possible source StateItems.
List<List<StateItem>> prev =
states.get(0).reverseProduction(symbolSet);
for (List<StateItem> psis : prev) {
SearchState copy = copy();
copy.derivs2 = derivs;
copy.states2 =
new LinkedList<>(states.subList(0, sSize - len - 1));
copy.states2.addAll(0, psis);
copy.states2.add(StateItem.trans.get(copy.states2.get(copy.states2.size() - 1))
.get(lhs));
int statesSize = copy.states2.size();
int productionSteps =
productionSteps(copy.states2, states.get(0));
copy.complexity +=
SHIFT_COST * (statesSize - productionSteps)
+ PRODUCTION_COST * productionSteps;
if (copy.shiftDepth >= 0) copy.shiftDepth--;
result.add(copy);
}
}
else {
SearchState copy = copy();
copy.derivs2 = derivs;
copy.states2 =
new LinkedList<>(states.subList(0, sSize - len - 1));
copy.states2.add(StateItem.trans.get(copy.states2.get(copy.states2.size() - 1))
.get(lhs));
copy.complexity += REDUCE_COST;
if (copy.shiftDepth >= 0) copy.shiftDepth--;
result.add(copy);
}
return result;
}
@Override
public int compareTo(SearchState o) {
int diff = complexity - o.complexity;
if (diff != 0) return diff;
return reductionStreak(states1) + reductionStreak(states2)
- (reductionStreak(o.states1) + reductionStreak(o.states2));
}
@Override
public int hashCode() {
return states1.hashCode() * 31 + states2.hashCode();
}
protected boolean equals(SearchState ss) {
return states1.equals(ss.states1) && states2.equals(ss.states2);
}
@Override
public boolean equals(Object o) {
if (o instanceof SearchState) return equals((SearchState) o);
return false;
}
}
protected static boolean samePrefix(lalr_item itm1, lalr_item itm2) {
if (itm1.dot_pos() != itm2.dot_pos()) return false;
production prod1 = itm1.the_production();
production prod2 = itm2.the_production();
for (int i = 0, len = itm1.dot_pos(); i < len; i++)
if (rhs(prod1, i) != rhs(prod2, i)) return false;
return true;
}
protected static int productionSteps(List<StateItem> sis, StateItem last) {
int count = 0;
lalr_state lastState = last.state;
for (ListIterator<StateItem> itr = sis.listIterator(sis.size()); itr.hasPrevious();) {
StateItem si = itr.previous();
lalr_state state = si.state;
if (state == lastState) count++;
lastState = state;
}
return count;
}
protected static boolean compatible(symbol sym1, symbol sym2) {
if (sym1 instanceof terminal) {
if (sym2 instanceof terminal) return sym1 == sym2;
non_terminal nt2 = (non_terminal) sym2;
return nt2.first_set().contains((terminal) sym1);
}
else {
non_terminal nt1 = (non_terminal) sym1;
if (sym2 instanceof terminal)
return nt1.first_set().contains((terminal) sym2);
non_terminal nt2 = (non_terminal) sym2;
return nt1 == nt2 || nt1.first_set().intersects(nt2.first_set());
}
}
protected static int reductionStreak(List<StateItem> sis) {
int count = 0;
StateItem last = null;
for (ListIterator<StateItem> itr = sis.listIterator(sis.size()); itr.hasPrevious();) {
StateItem si = itr.previous();
if (last == null) {
last = si;
continue;
}
if (si.state != last.state) break;
count++;
}
return count;
}
protected static boolean hasCommonPrefix(lalr_item itm1, lalr_item itm2) {
if (itm1.dot_pos() != itm2.dot_pos()) return false;
int dotPos = itm1.dot_pos();
production prod1 = itm1.the_production();
production prod2 = itm2.the_production();
for (int i = 0; i < dotPos; i++)
if (rhs(prod1, i) != rhs(prod2, i)) return false;
return true;
}
protected static Set<symbol> symbolSet(symbol sym) {
Set<symbol> result = new HashSet<>();
result.add(sym);
return result;
}
protected static symbol rhs(production prod, int pos) {
symbol_part sp = (symbol_part) prod.rhs(pos);
return sp.the_symbol();
}
}
|
tools/java_cup/src/parser/UnifiedExample.java
|
package parser;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Set;
import java_cup.lalr_item;
import java_cup.lalr_state;
import java_cup.non_terminal;
import java_cup.production;
import java_cup.symbol;
import java_cup.symbol_part;
import java_cup.terminal;
public class UnifiedExample {
protected static final int PRODUCTION_COST = 50;
protected static final int REDUCE_COST = 1;
protected static final int SHIFT_COST = 1;
protected static final int UNSHIFT_COST = 1;
protected static final int DUPLICATE_PRODUCTION_COST = 0;
protected static final long ASSURANCE_LIMIT = 2 * 1000000000L;
protected static final long TIME_LIMIT = 5 * 1000000000L;
protected lalr_state conflict;
protected lalr_item itm1;
protected lalr_item itm2;
protected terminal nextSym;
protected boolean isShiftReduce;
protected List<StateItem> shortestConflictPath;
protected Set<lalr_state> scpSet;
protected Set<lalr_state> rppSet;
public UnifiedExample(lalr_state conflict, lalr_item itm1, lalr_item itm2,
terminal nextSym) {
this.conflict = conflict;
this.nextSym = nextSym;
if (itm1.dot_at_end()) {
if (itm2.dot_at_end()) {
// reduce/reduce conflict
this.itm1 = itm1;
this.itm2 = itm2;
isShiftReduce = false;
}
else {
// shift/reduce conflict
this.itm1 = itm1;
this.itm2 = itm2;
isShiftReduce = true;
}
}
else if (itm2.dot_at_end()) {
// shift/reduce conflict
this.itm1 = itm2;
this.itm2 = itm1;
isShiftReduce = true;
}
else throw new Error("Expected at least one reduce item.");
shortestConflictPath = findShortestPathFromStart();
scpSet = new HashSet<>(shortestConflictPath.size());
production reduceProd = this.itm1.the_production();
rppSet = new HashSet<>(reduceProd.rhs_length());
boolean reduceProdReached = false;
for (StateItem si : shortestConflictPath) {
scpSet.add(si.state);
reduceProdReached =
reduceProdReached || si.item.the_production() == reduceProd;
if (reduceProdReached) rppSet.add(si.state);
}
}
protected List<StateItem> findShortestPathFromStart() {
StateItem.init();
long start = System.nanoTime();
lalr_state startState = lalr_state.startState();
lalr_item startItem = lalr_state.startItem();
StateItem source = StateItem.lookup(startState, startItem);
StateItem target = StateItem.lookup(conflict, itm1);
Queue<List<StateItemWithLookahead>> queue = new LinkedList<>();
Set<StateItemWithLookahead> visited = new HashSet<>();
{
List<StateItemWithLookahead> init = new LinkedList<>();
init.add(new StateItemWithLookahead(source,
StateItem.symbolSet(source.item.lookahead())));
queue.add(init);
}
while (!queue.isEmpty()) {
List<StateItemWithLookahead> path = queue.remove();
StateItemWithLookahead last = path.get(path.size() - 1);
if (visited.contains(last)) continue;
visited.add(last);
if (target.equals(last.si) && last.lookahead.contains(nextSym)) {
// Done
// System.err.println(path);
System.err.println("reachable: " + (System.nanoTime() - start));
List<StateItem> shortestConflictPath =
new ArrayList<>(path.size());
for (StateItemWithLookahead sil : path)
shortestConflictPath.add(sil.si);
return shortestConflictPath;
}
if (StateItem.trans.containsKey(last.si)) {
for (Map.Entry<symbol, StateItem> trans : StateItem.trans.get(last.si)
.entrySet()) {
StateItem nextSI = trans.getValue();
StateItemWithLookahead next =
new StateItemWithLookahead(nextSI, last.lookahead);
List<StateItemWithLookahead> nextPath =
new LinkedList<>(path);
nextPath.add(next);
queue.add(nextPath);
}
}
if (StateItem.prods.containsKey(last.si)) {
production prod = last.si.item.the_production();
int len = prod.rhs_length();
int pos = last.si.item.dot_pos() + 1;
Set<symbol> lookahead;
if (pos == len)
lookahead = last.lookahead;
else {
symbol sym = rhs(prod, pos);
if (sym instanceof terminal)
lookahead = symbolSet(sym);
else lookahead =
StateItem.symbolSet(((non_terminal) sym).first_set());
}
for (lalr_item itm : StateItem.prods.get(last.si)) {
StateItem nextSI = StateItem.lookup(last.si.state, itm);
StateItemWithLookahead next =
new StateItemWithLookahead(nextSI, lookahead);
List<StateItemWithLookahead> nextPath =
new LinkedList<>(path);
nextPath.add(next);
queue.add(nextPath);
}
}
}
throw new Error("Cannot find shortest path to conflict state.");
}
protected class StateItemWithLookahead {
protected StateItem si;
protected Set<symbol> lookahead;
protected StateItemWithLookahead(StateItem si, Set<symbol> lookahead) {
this.si = si;
this.lookahead = lookahead;
}
@Override
public String toString() {
return si.toString() + lookahead.toString();
}
@Override
public int hashCode() {
return si.hashCode() * 31 + lookahead.hashCode();
}
protected boolean equals(StateItemWithLookahead sil) {
return si.equals(sil.si) && lookahead.equals(sil.lookahead);
}
@Override
public boolean equals(Object o) {
if (!(o instanceof StateItemWithLookahead)) return false;
return equals((StateItemWithLookahead) o);
}
}
public Counterexample find() {
StateItem.init();
return findExample();
}
protected Counterexample findExample() {
SearchState initial =
new SearchState(StateItem.lookup(conflict, itm1),
StateItem.lookup(conflict, itm2));
long start;
Map<Integer, FixedComplexitySearchState> fcssMap = new HashMap<>();
PriorityQueue<FixedComplexitySearchState> pq = new PriorityQueue<>();
Map<List<StateItem>, Set<List<StateItem>>> visited = new HashMap<>();
add(pq, fcssMap, visited, initial);
start = System.nanoTime();
boolean assurancePrinted = false;
SearchState stage3result = null;
while (!pq.isEmpty()) {
FixedComplexitySearchState fcss = pq.remove();
for (SearchState ss : fcss.sss) {
int stage = 2;
StateItem si1src = ss.states1.get(0);
StateItem si2src = ss.states2.get(0);
visited(visited, ss);
if (ss.reduceDepth < 0 && ss.shiftDepth < 0) {
// Stage 3
if (!scpSet.contains(si1src.state)
|| !scpSet.contains(si2src.state)) {
// The current head state is not on the shortest path.
// Ignore this search state.
continue;
}
stage = 3;
if (si1src.item.the_production().lhs().the_symbol() == si2src.item.the_production()
.lhs()
.the_symbol()
&& hasCommonPrefix(si1src.item, si2src.item)) {
if (ss.derivs1.size() == 1
&& ss.derivs2.size() == 1
&& ss.derivs1.get(0).sym == ss.derivs2.get(0).sym) {
System.err.println(ss.complexity);
return new Counterexample(ss.derivs1.get(0),
ss.derivs2.get(0),
true);
}
if (stage3result == null) stage3result = ss;
stage = 4;
}
}
if (!assurancePrinted
&& System.nanoTime() - start > ASSURANCE_LIMIT
&& stage3result != null) {
System.err.println("Productions leading up to the conflict stage found. Still finding a possible unified example...");
assurancePrinted = true;
}
if (System.nanoTime() - start > TIME_LIMIT
&& stage3result != null) {
System.err.println("time limit exceeded: "
+ (System.nanoTime() - start));
System.err.println(stage3result.complexity);
return completeDivergingExamples(stage3result);
}
StateItem si1 = ss.states1.get(ss.states1.size() - 1);
StateItem si2 = ss.states2.get(ss.states2.size() - 1);
boolean si1reduce = si1.item.dot_at_end();
boolean si2reduce = si2.item.dot_at_end();
symbol si1sym = si1reduce ? null : si1.item.symbol_after_dot();
symbol si2sym = si2reduce ? null : si2.item.symbol_after_dot();
if (!si1reduce && !si2reduce) {
// Both paths are not reduce items.
// Two actions are possible:
// - Make a transition on the next symbol of the items,
// if they are the same.
// - Take a production step, avoiding duplicates as necessary.
if (si1sym == si2sym) {
StateItem si1last =
StateItem.trans.get(si1).get(si1sym);
StateItem si2last =
StateItem.trans.get(si2).get(si2sym);
List<Derivation> derivs1 = new LinkedList<>();
List<Derivation> derivs2 = new LinkedList<>();
List<StateItem> states1 = new LinkedList<>();
List<StateItem> states2 = new LinkedList<>();
derivs1.add(new Derivation(si1sym));
states1.add(si1last);
derivs2.add(new Derivation(si2sym));
states2.add(si2last);
// Check for subsequent nullable nonterminals
production prod1 = si1.item.the_production();
for (int pos = si1.item.dot_pos() + 1, len1 =
prod1.rhs_length(); pos < len1; pos++) {
symbol sp = rhs(prod1, pos);
if (!sp.is_non_term()) break;
non_terminal nt = (non_terminal) sp;
if (!nt.nullable()) break;
si1last = StateItem.trans.get(si1last).get(nt);
derivs1.add(new Derivation(nt,
Collections.<Derivation> emptyList()));
states1.add(si1last);
}
production prod2 = si2.item.the_production();
for (int pos = si1.item.dot_pos() + 1, len =
prod2.rhs_length(); pos < len; pos++) {
symbol sp = rhs(prod2, pos);
if (!sp.is_non_term()) break;
non_terminal nt = (non_terminal) sp;
if (!nt.nullable()) break;
si2last = StateItem.trans.get(si2last).get(nt);
derivs2.add(new Derivation(nt,
Collections.<Derivation> emptyList()));
states2.add(si2last);
}
for (int i = 1, size1 = derivs1.size(); i <= size1; i++) {
List<Derivation> subderivs1 =
new ArrayList<>(derivs1.subList(0, i));
List<StateItem> substates1 =
new ArrayList<>(states1.subList(0, i));
for (int j = 1, size2 = derivs2.size(); j <= size2; j++) {
List<Derivation> subderivs2 =
new ArrayList<>(derivs2.subList(0, j));
List<StateItem> substates2 =
new ArrayList<>(states2.subList(0, j));
SearchState copy = ss.copy();
copy.derivs1.addAll(subderivs1);
copy.states1.addAll(substates1);
copy.derivs2.addAll(subderivs2);
copy.states2.addAll(substates2);
copy.complexity += 2 * SHIFT_COST;
add(pq, fcssMap, visited, copy);
}
}
}
if (si1sym instanceof non_terminal
&& StateItem.prods.containsKey(si1))
for (lalr_item itm1 : StateItem.prods.get(si1)) {
// Take production step only if lhs is not nullable and
// if first rhs symbol is compatible with the other path
boolean applicable =
!itm1.dot_at_end()
&& compatible(itm1.symbol_after_dot(),
si2sym);
if (!applicable) continue;
production prod = si1.item.the_production();
production nextProd = itm1.the_production();
if (!StateItem.productionAllowed(prod, nextProd))
continue;
SearchState copy = ss.copy();
StateItem next = StateItem.lookup(si1.state, itm1);
// TODO
if (copy.states1.contains(next))
copy.complexity += DUPLICATE_PRODUCTION_COST;
copy.states1.add(next);
copy.complexity += PRODUCTION_COST;
add(pq, fcssMap, visited, copy);
}
if (si2sym instanceof non_terminal
&& StateItem.prods.containsKey(si2))
for (lalr_item itm2 : StateItem.prods.get(si2)) {
// Take production step only if lhs is not nullable and
// if first rhs symbol is compatible with the other path
boolean applicable =
!itm2.dot_at_end()
&& compatible(itm2.symbol_after_dot(),
si1sym);
if (!applicable) continue;
production prod = si2.item.the_production();
production nextProd = itm2.the_production();
if (!StateItem.productionAllowed(prod, nextProd))
continue;
SearchState copy = ss.copy();
StateItem next = StateItem.lookup(si2.state, itm2);
// TODO
if (copy.states2.contains(next))
copy.complexity += DUPLICATE_PRODUCTION_COST;
copy.states2.add(next);
if (ss.shiftDepth >= 0) copy.shiftDepth++;
copy.complexity += PRODUCTION_COST;
add(pq, fcssMap, visited, copy);
}
}
else {
// One of the paths requires a reduction.
production prod1 = si1.item.the_production();
int len1 = prod1.rhs_length();
production prod2 = si2.item.the_production();
int len2 = prod2.rhs_length();
int size1 = ss.states1.size();
int size2 = ss.states2.size();
boolean ready1 = si1reduce && size1 > len1;
boolean ready2 = si2reduce && size2 > len2;
// If there is a path ready for reduction
// without being prepended further, reduce.
if (ready1) {
List<SearchState> reduced1 = ss.reduce1(si2sym);
if (ready2) {
reduced1.add(ss);
for (SearchState red1 : reduced1) {
for (SearchState candidate : red1.reduce2(si1sym))
add(pq, fcssMap, visited, candidate);
if (si1reduce && red1 != ss)
add(pq, fcssMap, visited, red1);
}
}
else {
for (SearchState candidate : reduced1)
add(pq, fcssMap, visited, candidate);
}
}
else if (ready2) {
List<SearchState> reduced2 = ss.reduce2(si1sym);
for (SearchState candidate : reduced2)
add(pq, fcssMap, visited, candidate);
}
// Otherwise, prepend both paths and continue.
else {
symbol sym;
if (si1reduce && !ready1)
sym = rhs(prod1, len1 - size1);
else sym = rhs(prod2, len2 - size2);
for (SearchState prepended : ss.prepend(sym,
null,
null,
ss.reduceDepth >= 0
? rppSet
: scpSet
/*: ss.shiftDepth < 0
? scpSet
: null*/)) {
add(pq, fcssMap, visited, prepended);
}
}
}
}
fcssMap.remove(fcss.complexity);
}
// No unifying examples. Construct examples from common shortest path
// to conflict state.
return exampleFromShortestPath();
}
protected void add(PriorityQueue<FixedComplexitySearchState> pq,
Map<Integer, FixedComplexitySearchState> fcssMap,
Map<List<StateItem>, Set<List<StateItem>>> visited, SearchState ss) {
Set<List<StateItem>> visited1 = visited.get(ss.states1);
if (visited1 != null && visited1.contains(ss.states2)) return;
FixedComplexitySearchState fcss = fcssMap.get(ss.complexity);
if (fcss == null) {
fcss = new FixedComplexitySearchState(ss.complexity);
fcssMap.put(ss.complexity, fcss);
pq.add(fcss);
}
fcss.add(ss);
}
protected void visited(Map<List<StateItem>, Set<List<StateItem>>> visited,
SearchState ss) {
Set<List<StateItem>> visited1 = visited.get(ss.states1);
if (visited1 == null) {
visited1 = new HashSet<>();
visited.put(ss.states1, visited1);
}
visited1.add(ss.states2);
}
protected Counterexample exampleFromShortestPath() {
StateItem si = StateItem.lookup(conflict, itm2);
List<StateItem> result = new LinkedList<>();
result.add(si);
ListIterator<StateItem> itr =
shortestConflictPath.listIterator(shortestConflictPath.size());
// refsi is the last StateItem in this state of the shortest path.
StateItem refsi = itr.previous();
for (; refsi != null;) {
// Construct a list of items in the same state as refsi.
// prevrefsi is the last StateItem in the previous state.
List<StateItem> refsis = new LinkedList<>();
refsis.add(refsi);
StateItem prevrefsi = itr.hasPrevious() ? itr.previous() : null;
if (prevrefsi != null) {
for (int curPos = refsi.item.dot_pos(), prevPos =
prevrefsi.item.dot_pos(); prevrefsi != null
&& prevPos + 1 != curPos;) {
refsis.add(prevrefsi);
curPos = prevPos;
if (itr.hasPrevious()) {
prevrefsi = itr.previous();
prevPos = prevrefsi.item.dot_pos();
}
else prevrefsi = null;
}
}
if (si == refsi || si.item == lalr_state.startItem()) {
// Reached common item; prepend to the beginning.
refsis.remove(refsis.size() - 1);
result.addAll(0, refsis);
if (prevrefsi != null) result.add(0, prevrefsi);
while (itr.hasPrevious())
result.add(0, itr.previous());
Derivation deriv1 =
completeDivergingExample(shortestConflictPath);
Derivation deriv2 = completeDivergingExample(result);
return new Counterexample(deriv1, deriv2, false);
}
int pos = si.item.dot_pos();
if (pos == 0) {
// For a production item, find a sequence of items within the
// same state that leads to this production.
List<StateItem> init = new LinkedList<>();
init.add(si);
Queue<List<StateItem>> queue = new LinkedList<>();
queue.add(init);
while (!queue.isEmpty()) {
List<StateItem> sis = queue.remove();
StateItem sisrc = sis.get(0);
if (sisrc.item == lalr_state.startItem()) {
sis.remove(sis.size() - 1);
result.addAll(0, sis);
si = sisrc;
break;
}
int srcpos = sisrc.item.dot_pos();
if (srcpos > 0) {
// Determine if reverse transition is possible.
production prod = sisrc.item.the_production();
symbol sym = rhs(prod, srcpos - 1);
for (StateItem prevsi : StateItem.revTrans.get(sisrc)
.get(sym)) {
// Only look for state compatible with the shortest path.
if (prevsi.state != prevrefsi.state) continue;
sis.remove(sis.size() - 1);
result.addAll(0, sis);
result.add(0, prevsi);
si = prevsi;
refsi = prevrefsi;
break;
}
}
else {
production prod = sisrc.item.the_production();
symbol lhs = prod.lhs().the_symbol();
for (lalr_item prev : StateItem.revProds.get(sisrc.state)
.get(lhs)) {
StateItem prevsi =
StateItem.lookup(sisrc.state, prev);
if (sis.contains(prevsi)) continue;
List<StateItem> prevsis = new LinkedList<>(sis);
prevsis.add(0, prevsi);
queue.add(prevsis);
}
}
}
}
else {
// If not a production item, make a reverse transition.
production prod = si.item.the_production();
symbol sym = rhs(prod, pos - 1);
for (StateItem prevsi : StateItem.revTrans.get(si).get(sym)) {
// Only look for state compatible with the shortest path.
if (prevsi.state != prevrefsi.state) continue;
result.add(0, prevsi);
si = prevsi;
refsi = prevrefsi;
break;
}
}
}
throw new Error("Cannot find derivation to conflict state.");
}
protected Counterexample completeDivergingExamples(SearchState ss) {
Derivation deriv1 = completeDivergingExample(ss.states1, ss.derivs1);
Derivation deriv2 = completeDivergingExample(ss.states2, ss.derivs2);
return new Counterexample(deriv1, deriv2, false);
}
protected Derivation completeDivergingExample(List<StateItem> states) {
return completeDivergingExample(states,
Collections.<Derivation> emptyList());
}
protected Derivation completeDivergingExample(List<StateItem> states,
List<Derivation> derivs) {
List<Derivation> result = new LinkedList<>();
ListIterator<Derivation> dItr = derivs.listIterator(derivs.size());
for (ListIterator<StateItem> sItr = states.listIterator(states.size()); sItr.hasPrevious();) {
StateItem si = sItr.previous();
int pos = si.item.dot_pos();
production prod = si.item.the_production();
int len = prod.rhs_length();
// symbols after dot
if (result.isEmpty()) {
if (derivs.isEmpty()) result.add(Derivation.dot);
if (!si.item.dot_at_end())
result.add(new Derivation(rhs(prod, pos)));
}
for (int i = pos + 1; i < len; i++) {
result.add(new Derivation(rhs(prod, i)));
}
// symbols before dot
for (int i = pos - 1; i >= 0; i--) {
if (sItr.hasPrevious()) sItr.previous();
result.add(0, dItr.hasPrevious()
? dItr.previous() : new Derivation(rhs(prod, i)));
}
// completing the derivation
symbol lhs = prod.lhs().the_symbol();
Derivation deriv = new Derivation(lhs, result);
result = new LinkedList<>();
result.add(deriv);
}
return result.get(0);
}
protected static class FixedComplexitySearchState implements
Comparable<FixedComplexitySearchState> {
protected int complexity;
protected Set<SearchState> sss;
protected FixedComplexitySearchState(int complexity) {
this.complexity = complexity;
sss = new HashSet<>();
}
protected void add(SearchState ss) {
sss.add(ss);
}
@Override
public int compareTo(FixedComplexitySearchState o) {
return complexity - o.complexity;
}
}
/**
* A search state consists of
* - a pair of the following items:
* - a list of derivations, simulating the parser's symbol stack
* - a list of StateItems, simulating the parser's state stack but
* including explicit production steps
* Each item in the pair corresponds to a possible parse that involves
* each of the conflict LALR items.
* - a complexity of the partial parse trees, determined by the number of
* states and production steps the parse has to encounter
* - a shift depth, indicating the number of unreduced production steps
* that has been made from the original shift item. This helps keep
* track of when the shift conflict item is reduced.
* @author Chinawat
*
*/
protected class SearchState implements Comparable<SearchState> {
protected List<Derivation> derivs1, derivs2;
protected List<StateItem> states1, states2;
protected int complexity, reduceDepth, shiftDepth;
protected SearchState(StateItem si1, StateItem si2) {
derivs1 = new LinkedList<>();
derivs2 = new LinkedList<>();
states1 = new LinkedList<>();
states2 = new LinkedList<>();
states1.add(si1);
states2.add(si2);
complexity = 0;
reduceDepth = 0;
shiftDepth = 0;
}
private SearchState(List<Derivation> derivs1, List<Derivation> derivs2,
List<StateItem> states1, List<StateItem> states2,
int complexity, int reduceDepth, int shiftDepth) {
this.derivs1 = new LinkedList<>(derivs1);
this.derivs2 = new LinkedList<>(derivs2);
this.states1 = new LinkedList<>(states1);
this.states2 = new LinkedList<>(states2);
this.complexity = complexity;
this.reduceDepth = reduceDepth;
this.shiftDepth = shiftDepth;
}
protected SearchState copy() {
return new SearchState(derivs1,
derivs2,
states1,
states2,
complexity,
reduceDepth,
shiftDepth);
}
protected List<SearchState> prepend(symbol sym, symbol nextSym1,
symbol nextSym2) {
return prepend(sym, nextSym1, nextSym2, null);
}
protected List<SearchState> prepend(symbol sym, symbol nextSym1,
symbol nextSym2, Set<lalr_state> guide) {
List<SearchState> result = new LinkedList<>();
SearchState ss = this;
StateItem si1src = ss.states1.get(0);
StateItem si2src = ss.states2.get(0);
Set<symbol> si1lookahead =
nextSym1 == null
? StateItem.symbolSet(si1src.item.lookahead())
: symbolSet(nextSym1);
Set<symbol> si2lookahead =
nextSym2 == null
? StateItem.symbolSet(si2src.item.lookahead())
: symbolSet(nextSym2);
List<List<StateItem>> prev1 =
si1src.reverseTransition(sym, si1lookahead, guide);
List<List<StateItem>> prev2 =
si2src.reverseTransition(sym, si2lookahead, guide);
for (List<StateItem> psis1 : prev1) {
StateItem psi1 = psis1.isEmpty() ? si1src : psis1.get(0);
for (List<StateItem> psis2 : prev2) {
StateItem psi2 = psis2.isEmpty() ? si2src : psis2.get(0);
if (psi1 == si1src && psi2 == si2src) continue;
if (psi1.state != psi2.state) continue;
SearchState copy = ss.copy();
copy.states1.addAll(0, psis1);
copy.states2.addAll(0, psis2);
if (!psis1.isEmpty()
&& copy.states1.get(0).item.dot_pos() + 1 == copy.states1.get(1).item.dot_pos()) {
if (!psis2.isEmpty()
&& copy.states2.get(0).item.dot_pos() + 1 == copy.states2.get(1).item.dot_pos()) {
Derivation deriv = new Derivation(sym);
copy.derivs1.add(0, deriv);
copy.derivs2.add(0, deriv);
}
else continue;
}
else if (!psis2.isEmpty()
&& copy.states2.get(0).item.dot_pos() + 1 == copy.states2.get(1).item.dot_pos()) {
continue;
}
int prependSize = psis1.size() + psis2.size();
int productionSteps =
productionSteps(psis1, si1src)
+ productionSteps(psis2, si2src);
copy.complexity +=
UNSHIFT_COST * (prependSize - productionSteps)
+ PRODUCTION_COST * productionSteps;
result.add(copy);
}
}
return result;
}
protected List<SearchState> reduce1(symbol nextSym) {
List<StateItem> states = states1;
List<Derivation> derivs = derivs1;
int sSize = states.size();
lalr_item item = states.get(sSize - 1).item;
if (!item.dot_at_end())
throw new Error("Cannot reduce item without dot at end.");
List<SearchState> result = new LinkedList<>();
Set<symbol> symbolSet =
nextSym == null
? StateItem.symbolSet(item.lookahead())
: symbolSet(nextSym);
if (!StateItem.intersect(item.lookahead(), symbolSet))
return result;
production prod = item.the_production();
symbol lhs = prod.lhs().the_symbol();
int len = prod.rhs_length();
int dSize = derivs.size();
Derivation deriv =
new Derivation(lhs, new LinkedList<>(derivs.subList(dSize
- len, dSize)));
if (reduceDepth == 0) {
deriv.deriv.add(itm1.dot_pos(), Derivation.dot);
reduceDepth--;
}
derivs = new LinkedList<>(derivs.subList(0, dSize - len));
derivs.add(deriv);
if (sSize == len + 1) {
// The head StateItem is a production item, so we need to prepend
// with possible source StateItems.
List<List<StateItem>> prev =
states.get(0).reverseProduction(symbolSet);
for (List<StateItem> psis : prev) {
SearchState copy = copy();
copy.derivs1 = derivs;
copy.states1 =
new LinkedList<>(states1.subList(0, sSize - len - 1));
copy.states1.addAll(0, psis);
copy.states1.add(StateItem.trans.get(copy.states1.get(copy.states1.size() - 1))
.get(lhs));
int statesSize = copy.states1.size();
int productionSteps =
productionSteps(copy.states1, states.get(0));
copy.complexity +=
UNSHIFT_COST * (statesSize - productionSteps)
+ PRODUCTION_COST * productionSteps;
result.add(copy);
}
}
else {
SearchState copy = copy();
copy.derivs1 = derivs;
copy.states1 =
new LinkedList<>(states1.subList(0, sSize - len - 1));
copy.states1.add(StateItem.trans.get(copy.states1.get(copy.states1.size() - 1))
.get(lhs));
copy.complexity += REDUCE_COST;
result.add(copy);
}
return result;
}
protected List<SearchState> reduce2(symbol nextSym) {
List<StateItem> states = states2;
List<Derivation> derivs = derivs2;
int sSize = states.size();
lalr_item item = states.get(sSize - 1).item;
if (!item.dot_at_end())
throw new Error("Cannot reduce item without dot at end.");
List<SearchState> result = new LinkedList<>();
Set<symbol> symbolSet =
nextSym == null
? StateItem.symbolSet(item.lookahead())
: symbolSet(nextSym);
if (!StateItem.intersect(item.lookahead(), symbolSet))
return result;
production prod = item.the_production();
symbol lhs = prod.lhs().the_symbol();
int len = prod.rhs_length();
int dSize = derivs.size();
Derivation deriv =
new Derivation(lhs, new LinkedList<>(derivs.subList(dSize
- len, dSize)));
if (shiftDepth == 0)
deriv.deriv.add(itm2.dot_pos(), Derivation.dot);
derivs = new LinkedList<>(derivs.subList(0, dSize - len));
derivs.add(deriv);
if (sSize == len + 1) {
// The head StateItem is a production item, so we need to prepend
// with possible source StateItems.
List<List<StateItem>> prev =
states.get(0).reverseProduction(symbolSet);
for (List<StateItem> psis : prev) {
SearchState copy = copy();
copy.derivs2 = derivs;
copy.states2 =
new LinkedList<>(states.subList(0, sSize - len - 1));
copy.states2.addAll(0, psis);
copy.states2.add(StateItem.trans.get(copy.states2.get(copy.states2.size() - 1))
.get(lhs));
int statesSize = copy.states2.size();
int productionSteps =
productionSteps(copy.states2, states.get(0));
copy.complexity +=
SHIFT_COST * (statesSize - productionSteps)
+ PRODUCTION_COST * productionSteps;
if (copy.shiftDepth >= 0) copy.shiftDepth--;
result.add(copy);
}
}
else {
SearchState copy = copy();
copy.derivs2 = derivs;
copy.states2 =
new LinkedList<>(states.subList(0, sSize - len - 1));
copy.states2.add(StateItem.trans.get(copy.states2.get(copy.states2.size() - 1))
.get(lhs));
copy.complexity += REDUCE_COST;
if (copy.shiftDepth >= 0) copy.shiftDepth--;
result.add(copy);
}
return result;
}
@Override
public int compareTo(SearchState o) {
int diff = complexity - o.complexity;
if (diff != 0) return diff;
return reductionStreak(states1) + reductionStreak(states2)
- (reductionStreak(o.states1) + reductionStreak(o.states2));
}
@Override
public int hashCode() {
return states1.hashCode() * 31 + states2.hashCode();
}
protected boolean equals(SearchState ss) {
return states1.equals(ss.states1) && states2.equals(ss.states2);
}
@Override
public boolean equals(Object o) {
if (o instanceof SearchState) return equals((SearchState) o);
return false;
}
}
protected static boolean samePrefix(lalr_item itm1, lalr_item itm2) {
if (itm1.dot_pos() != itm2.dot_pos()) return false;
production prod1 = itm1.the_production();
production prod2 = itm2.the_production();
for (int i = 0, len = itm1.dot_pos(); i < len; i++)
if (rhs(prod1, i) != rhs(prod2, i)) return false;
return true;
}
protected static int productionSteps(List<StateItem> sis, StateItem last) {
int count = 0;
lalr_state lastState = last.state;
for (ListIterator<StateItem> itr = sis.listIterator(sis.size()); itr.hasPrevious();) {
StateItem si = itr.previous();
lalr_state state = si.state;
if (state == lastState) count++;
lastState = state;
}
return count;
}
protected static boolean compatible(symbol sym1, symbol sym2) {
if (sym1 instanceof terminal) {
if (sym2 instanceof terminal) return sym1 == sym2;
non_terminal nt2 = (non_terminal) sym2;
return nt2.first_set().contains((terminal) sym1);
}
else {
non_terminal nt1 = (non_terminal) sym1;
if (sym2 instanceof terminal)
return nt1.first_set().contains((terminal) sym2);
non_terminal nt2 = (non_terminal) sym2;
return nt1 == nt2 || nt1.first_set().intersects(nt2.first_set());
}
}
protected static int reductionStreak(List<StateItem> sis) {
int count = 0;
StateItem last = null;
for (ListIterator<StateItem> itr = sis.listIterator(sis.size()); itr.hasPrevious();) {
StateItem si = itr.previous();
if (last == null) {
last = si;
continue;
}
if (si.state != last.state) break;
count++;
}
return count;
}
protected static boolean hasCommonPrefix(lalr_item itm1, lalr_item itm2) {
if (itm1.dot_pos() != itm2.dot_pos()) return false;
int dotPos = itm1.dot_pos();
production prod1 = itm1.the_production();
production prod2 = itm2.the_production();
for (int i = 0; i < dotPos; i++)
if (rhs(prod1, i) != rhs(prod2, i)) return false;
return true;
}
protected static Set<symbol> symbolSet(symbol sym) {
Set<symbol> result = new HashSet<>();
result.add(sym);
return result;
}
protected static symbol rhs(production prod, int pos) {
symbol_part sp = (symbol_part) prod.rhs(pos);
return sp.the_symbol();
}
}
|
Fixed bug in stage 2 finisher.
|
tools/java_cup/src/parser/UnifiedExample.java
|
Fixed bug in stage 2 finisher.
|
|
Java
|
apache-2.0
|
c0cf076398fe8dd9b488071a4dd4ae1ed184d94a
| 0
|
devth/calcite,devth/calcite
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.avatica;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableMap;
import java.lang.reflect.Field;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Command handler for getting various metadata. Should be implemented by each
* driver.
*
* <p>Also holds other abstract methods that are not related to metadata
* that each provider must implement. This is not ideal.</p>
*/
public interface Meta {
String getSqlKeywords();
String getNumericFunctions();
String getStringFunctions();
String getSystemFunctions();
String getTimeDateFunctions();
MetaResultSet getTables(String catalog,
Pat schemaPattern,
Pat tableNamePattern,
List<String> typeList);
MetaResultSet getColumns(String catalog,
Pat schemaPattern,
Pat tableNamePattern,
Pat columnNamePattern);
MetaResultSet getSchemas(String catalog, Pat schemaPattern);
MetaResultSet getCatalogs();
MetaResultSet getTableTypes();
MetaResultSet getProcedures(String catalog,
Pat schemaPattern,
Pat procedureNamePattern);
MetaResultSet getProcedureColumns(String catalog,
Pat schemaPattern,
Pat procedureNamePattern,
Pat columnNamePattern);
MetaResultSet getColumnPrivileges(String catalog,
String schema,
String table,
Pat columnNamePattern);
MetaResultSet getTablePrivileges(String catalog,
Pat schemaPattern,
Pat tableNamePattern);
MetaResultSet getBestRowIdentifier(String catalog,
String schema,
String table,
int scope,
boolean nullable);
MetaResultSet getVersionColumns(String catalog, String schema, String table);
MetaResultSet getPrimaryKeys(String catalog, String schema, String table);
MetaResultSet getImportedKeys(String catalog, String schema, String table);
MetaResultSet getExportedKeys(String catalog, String schema, String table);
MetaResultSet getCrossReference(String parentCatalog,
String parentSchema,
String parentTable,
String foreignCatalog,
String foreignSchema,
String foreignTable);
MetaResultSet getTypeInfo();
MetaResultSet getIndexInfo(String catalog,
String schema,
String table,
boolean unique,
boolean approximate);
MetaResultSet getUDTs(String catalog,
Pat schemaPattern,
Pat typeNamePattern,
int[] types);
MetaResultSet getSuperTypes(String catalog,
Pat schemaPattern,
Pat typeNamePattern);
MetaResultSet getSuperTables(String catalog,
Pat schemaPattern,
Pat tableNamePattern);
MetaResultSet getAttributes(String catalog,
Pat schemaPattern,
Pat typeNamePattern,
Pat attributeNamePattern);
MetaResultSet getClientInfoProperties();
MetaResultSet getFunctions(String catalog,
Pat schemaPattern,
Pat functionNamePattern);
MetaResultSet getFunctionColumns(String catalog,
Pat schemaPattern,
Pat functionNamePattern,
Pat columnNamePattern);
MetaResultSet getPseudoColumns(String catalog,
Pat schemaPattern,
Pat tableNamePattern,
Pat columnNamePattern);
/** Creates an iterable for a result set.
*
* <p>The default implementation just returns {@code iterable}, which it
* requires to be not null; derived classes may instead choose to execute the
* relational expression in {@code signature}. */
Iterable<Object> createIterable(StatementHandle handle, Signature signature,
Iterable<Object> iterable);
/** Prepares a statement.
*
* @param h Statement handle
* @param sql SQL query
* @param maxRowCount Negative for no limit (different meaning than JDBC)
* @return Signature of prepared statement
*/
Signature prepare(StatementHandle h, String sql, int maxRowCount);
/** Prepares and executes a statement.
*
* @param h Statement handle
* @param sql SQL query
* @param maxRowCount Negative for no limit (different meaning than JDBC)
* @param callback Callback to lock, clear and assign cursor
* @return Signature of prepared statement
*/
MetaResultSet prepareAndExecute(StatementHandle h, String sql,
int maxRowCount, PrepareCallback callback);
/** Called during the creation of a statement to allocate a new handle.
*
* @param ch Connection handle
*/
StatementHandle createStatement(ConnectionHandle ch);
/** Factory to create instances of {@link Meta}. */
interface Factory {
Meta create(List<String> args);
}
/** Wrapper to remind API calls that a parameter is a pattern (allows '%' and
* '_' wildcards, per the JDBC spec) rather than a string to be matched
* exactly. */
class Pat {
public final String s;
private Pat(String s) {
this.s = s;
}
@JsonCreator
public static Pat of(@JsonProperty("s") String name) {
return new Pat(name);
}
}
/** Meta data from which a result set can be constructed. */
class MetaResultSet {
public final int statementId;
public final boolean ownStatement;
public final Iterable<Object> iterable;
public final Signature signature;
public MetaResultSet(int statementId, boolean ownStatement,
Signature signature, Iterable<Object> iterable) {
this.signature = signature;
this.statementId = statementId;
this.ownStatement = ownStatement;
this.iterable = iterable;
}
}
/** Information necessary to convert an {@link Iterable} into a
* {@link org.apache.calcite.avatica.util.Cursor}. */
final class CursorFactory {
public final Style style;
public final Class clazz;
@JsonIgnore
public final List<Field> fields;
public final List<String> fieldNames;
private CursorFactory(Style style, Class clazz, List<Field> fields,
List<String> fieldNames) {
assert (fieldNames != null)
== (style == Style.RECORD_PROJECTION || style == Style.MAP);
assert (fields != null) == (style == Style.RECORD_PROJECTION);
this.style = style;
this.clazz = clazz;
this.fields = fields;
this.fieldNames = fieldNames;
}
@JsonCreator
public static CursorFactory create(@JsonProperty("style") Style style,
@JsonProperty("clazz") Class clazz,
@JsonProperty("fieldNames") List<String> fieldNames) {
switch (style) {
case OBJECT:
return OBJECT;
case ARRAY:
return ARRAY;
case LIST:
return LIST;
case RECORD:
return record(clazz);
case RECORD_PROJECTION:
return record(clazz, null, fieldNames);
case MAP:
return map(fieldNames);
default:
throw new AssertionError("unknown style: " + style);
}
}
public static final CursorFactory OBJECT =
new CursorFactory(Style.OBJECT, null, null, null);
public static final CursorFactory ARRAY =
new CursorFactory(Style.ARRAY, null, null, null);
public static final CursorFactory LIST =
new CursorFactory(Style.LIST, null, null, null);
public static CursorFactory record(Class resultClazz) {
return new CursorFactory(Style.RECORD, resultClazz, null, null);
}
public static CursorFactory record(Class resultClass, List<Field> fields,
List<String> fieldNames) {
if (fields == null) {
fields = new ArrayList<Field>();
for (String fieldName : fieldNames) {
try {
fields.add(resultClass.getField(fieldName));
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
}
return new CursorFactory(Style.RECORD_PROJECTION, resultClass, fields,
fieldNames);
}
public static CursorFactory map(List<String> fieldNames) {
return new CursorFactory(Style.MAP, null, null, fieldNames);
}
public static CursorFactory deduce(List<ColumnMetaData> columns,
Class resultClazz) {
if (columns.size() == 1) {
return OBJECT;
} else if (resultClazz != null && !resultClazz.isArray()) {
return record(resultClazz);
} else {
return ARRAY;
}
}
}
/** How logical fields are represented in the objects returned by the
* iterator. */
enum Style {
OBJECT,
RECORD,
RECORD_PROJECTION,
ARRAY,
LIST,
MAP
}
/** Result of preparing a statement. */
class Signature {
public final List<ColumnMetaData> columns;
public final String sql;
public final List<AvaticaParameter> parameters;
public final transient Map<String, Object> internalParameters;
public final CursorFactory cursorFactory;
/** Creates a Signature. */
public Signature(List<ColumnMetaData> columns,
String sql,
List<AvaticaParameter> parameters,
Map<String, Object> internalParameters,
CursorFactory cursorFactory) {
this.columns = columns;
this.sql = sql;
this.parameters = parameters;
this.internalParameters = internalParameters;
this.cursorFactory = cursorFactory;
}
/** Used by Jackson to create a Signature by de-serializing JSON. */
@JsonCreator
public static Signature create(
@JsonProperty("columns") List<ColumnMetaData> columns,
@JsonProperty("sql") String sql,
@JsonProperty("parameters") List<AvaticaParameter> parameters,
@JsonProperty("cursorFactory") CursorFactory cursorFactory) {
return new Signature(columns, sql, parameters,
ImmutableMap.<String, Object>of(), cursorFactory);
}
/** Returns a copy of this Signature, substituting given CursorFactory. */
public Signature setCursorFactory(CursorFactory cursorFactory) {
return new Signature(columns, sql, parameters, internalParameters,
cursorFactory);
}
}
/** Connection handle. */
class ConnectionHandle {
public final int id;
@Override public String toString() {
return Integer.toString(id);
}
@JsonCreator
public ConnectionHandle(@JsonProperty("id") int id) {
this.id = id;
}
}
/** Statement handle. */
class StatementHandle {
public final int id;
@Override public String toString() {
return Integer.toString(id);
}
@JsonCreator
public StatementHandle(@JsonProperty("id") int id) {
this.id = id;
}
}
/** API to put a result set into a statement, being careful to enforce
* thread-safety and not to overwrite existing open result sets. */
interface PrepareCallback {
Object getMonitor();
void clear() throws SQLException;
void assign(Signature signature, Iterable<Object> iterable)
throws SQLException;
void execute() throws SQLException;
}
}
// End Meta.java
|
avatica/src/main/java/org/apache/calcite/avatica/Meta.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.avatica;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.lang.reflect.Field;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Command handler for getting various metadata. Should be implemented by each
* driver.
*
* <p>Also holds other abstract methods that are not related to metadata
* that each provider must implement. This is not ideal.</p>
*/
public interface Meta {
String getSqlKeywords();
String getNumericFunctions();
String getStringFunctions();
String getSystemFunctions();
String getTimeDateFunctions();
MetaResultSet getTables(String catalog,
Pat schemaPattern,
Pat tableNamePattern,
List<String> typeList);
MetaResultSet getColumns(String catalog,
Pat schemaPattern,
Pat tableNamePattern,
Pat columnNamePattern);
MetaResultSet getSchemas(String catalog, Pat schemaPattern);
MetaResultSet getCatalogs();
MetaResultSet getTableTypes();
MetaResultSet getProcedures(String catalog,
Pat schemaPattern,
Pat procedureNamePattern);
MetaResultSet getProcedureColumns(String catalog,
Pat schemaPattern,
Pat procedureNamePattern,
Pat columnNamePattern);
MetaResultSet getColumnPrivileges(String catalog,
String schema,
String table,
Pat columnNamePattern);
MetaResultSet getTablePrivileges(String catalog,
Pat schemaPattern,
Pat tableNamePattern);
MetaResultSet getBestRowIdentifier(String catalog,
String schema,
String table,
int scope,
boolean nullable);
MetaResultSet getVersionColumns(String catalog, String schema, String table);
MetaResultSet getPrimaryKeys(String catalog, String schema, String table);
MetaResultSet getImportedKeys(String catalog, String schema, String table);
MetaResultSet getExportedKeys(String catalog, String schema, String table);
MetaResultSet getCrossReference(String parentCatalog,
String parentSchema,
String parentTable,
String foreignCatalog,
String foreignSchema,
String foreignTable);
MetaResultSet getTypeInfo();
MetaResultSet getIndexInfo(String catalog,
String schema,
String table,
boolean unique,
boolean approximate);
MetaResultSet getUDTs(String catalog,
Pat schemaPattern,
Pat typeNamePattern,
int[] types);
MetaResultSet getSuperTypes(String catalog,
Pat schemaPattern,
Pat typeNamePattern);
MetaResultSet getSuperTables(String catalog,
Pat schemaPattern,
Pat tableNamePattern);
MetaResultSet getAttributes(String catalog,
Pat schemaPattern,
Pat typeNamePattern,
Pat attributeNamePattern);
MetaResultSet getClientInfoProperties();
MetaResultSet getFunctions(String catalog,
Pat schemaPattern,
Pat functionNamePattern);
MetaResultSet getFunctionColumns(String catalog,
Pat schemaPattern,
Pat functionNamePattern,
Pat columnNamePattern);
MetaResultSet getPseudoColumns(String catalog,
Pat schemaPattern,
Pat tableNamePattern,
Pat columnNamePattern);
/** Creates an iterable for a result set.
*
* <p>The default implementation just returns {@code iterable}, which it
* requires to be not null; derived classes may instead choose to execute the
* relational expression in {@code signature}. */
Iterable<Object> createIterable(StatementHandle handle, Signature signature,
Iterable<Object> iterable);
/** Prepares a statement.
*
* @param h Statement handle
* @param sql SQL query
* @param maxRowCount Negative for no limit (different meaning than JDBC)
* @return Signature of prepared statement
*/
Signature prepare(StatementHandle h, String sql, int maxRowCount);
/** Prepares and executes a statement.
*
* @param h Statement handle
* @param sql SQL query
* @param maxRowCount Negative for no limit (different meaning than JDBC)
* @param callback Callback to lock, clear and assign cursor
* @return Signature of prepared statement
*/
MetaResultSet prepareAndExecute(StatementHandle h, String sql,
int maxRowCount, PrepareCallback callback);
/** Called during the creation of a statement to allocate a new handle.
*
* @param ch Connection handle
*/
StatementHandle createStatement(ConnectionHandle ch);
/** Factory to create instances of {@link Meta}. */
interface Factory {
Meta create(List<String> args);
}
/** Wrapper to remind API calls that a parameter is a pattern (allows '%' and
* '_' wildcards, per the JDBC spec) rather than a string to be matched
* exactly. */
class Pat {
public final String s;
private Pat(String s) {
this.s = s;
}
@JsonCreator
public static Pat of(@JsonProperty("s") String name) {
return new Pat(name);
}
}
/** Meta data from which a result set can be constructed. */
class MetaResultSet {
public final int statementId;
public final boolean ownStatement;
public final Iterable<Object> iterable;
public final Signature signature;
public MetaResultSet(int statementId, boolean ownStatement,
Signature signature, Iterable<Object> iterable) {
this.signature = signature;
this.statementId = statementId;
this.ownStatement = ownStatement;
this.iterable = iterable;
}
}
/** Information necessary to convert an {@link Iterable} into a
* {@link org.apache.calcite.avatica.util.Cursor}. */
final class CursorFactory {
public final Style style;
public final Class clazz;
@JsonIgnore
public final List<Field> fields;
public final List<String> fieldNames;
private CursorFactory(Style style, Class clazz, List<Field> fields,
List<String> fieldNames) {
assert (fieldNames != null)
== (style == Style.RECORD_PROJECTION || style == Style.MAP);
assert (fields != null) == (style == Style.RECORD_PROJECTION);
this.style = style;
this.clazz = clazz;
this.fields = fields;
this.fieldNames = fieldNames;
}
@JsonCreator
public static CursorFactory create(@JsonProperty("style") Style style,
@JsonProperty("clazz") Class clazz,
@JsonProperty("fieldNames") List<String> fieldNames) {
switch (style) {
case OBJECT:
return OBJECT;
case ARRAY:
return ARRAY;
case LIST:
return LIST;
case RECORD:
return record(clazz);
case RECORD_PROJECTION:
return record(clazz, null, fieldNames);
case MAP:
return map(fieldNames);
default:
throw new AssertionError("unknown style: " + style);
}
}
public static final CursorFactory OBJECT =
new CursorFactory(Style.OBJECT, null, null, null);
public static final CursorFactory ARRAY =
new CursorFactory(Style.ARRAY, null, null, null);
public static final CursorFactory LIST =
new CursorFactory(Style.LIST, null, null, null);
public static CursorFactory record(Class resultClazz) {
return new CursorFactory(Style.RECORD, resultClazz, null, null);
}
public static CursorFactory record(Class resultClass, List<Field> fields,
List<String> fieldNames) {
if (fields == null) {
fields = new ArrayList<Field>();
for (String fieldName : fieldNames) {
try {
fields.add(resultClass.getField(fieldName));
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
}
return new CursorFactory(Style.RECORD_PROJECTION, resultClass, fields,
fieldNames);
}
public static CursorFactory map(List<String> fieldNames) {
return new CursorFactory(Style.MAP, null, null, fieldNames);
}
public static CursorFactory deduce(List<ColumnMetaData> columns,
Class resultClazz) {
if (columns.size() == 1) {
return OBJECT;
} else if (resultClazz != null && !resultClazz.isArray()) {
return record(resultClazz);
} else {
return ARRAY;
}
}
}
/** How logical fields are represented in the objects returned by the
* iterator. */
enum Style {
OBJECT,
RECORD,
RECORD_PROJECTION,
ARRAY,
LIST,
MAP
}
/** Result of preparing a statement. */
class Signature {
public final List<ColumnMetaData> columns;
public final String sql;
public final List<AvaticaParameter> parameters;
public final Map<String, Object> internalParameters;
public final CursorFactory cursorFactory;
@JsonCreator
public Signature(@JsonProperty("columns") List<ColumnMetaData> columns,
@JsonProperty("sql") String sql,
@JsonProperty("parameters") List<AvaticaParameter> parameters,
@JsonProperty("internalParameters") Map<String, Object>
internalParameters,
@JsonProperty("cursorFactory") CursorFactory cursorFactory) {
this.columns = columns;
this.sql = sql;
this.parameters = parameters;
this.internalParameters = internalParameters;
this.cursorFactory = cursorFactory;
}
/** Returns a copy of this Signature, substituting given CursorFactory. */
public Signature setCursorFactory(CursorFactory cursorFactory) {
return new Signature(columns, sql, parameters, internalParameters,
cursorFactory);
}
}
/** Connection handle. */
class ConnectionHandle {
public final int id;
@Override public String toString() {
return Integer.toString(id);
}
@JsonCreator
public ConnectionHandle(@JsonProperty("id") int id) {
this.id = id;
}
}
/** Statement handle. */
class StatementHandle {
public final int id;
@Override public String toString() {
return Integer.toString(id);
}
@JsonCreator
public StatementHandle(@JsonProperty("id") int id) {
this.id = id;
}
}
/** API to put a result set into a statement, being careful to enforce
* thread-safety and not to overwrite existing open result sets. */
interface PrepareCallback {
Object getMonitor();
void clear() throws SQLException;
void assign(Signature signature, Iterable<Object> iterable)
throws SQLException;
void execute() throws SQLException;
}
}
// End Meta.java
|
[CALCITE-586] Prevent JSON serialization of Signature.internalParameters
|
avatica/src/main/java/org/apache/calcite/avatica/Meta.java
|
[CALCITE-586] Prevent JSON serialization of Signature.internalParameters
|
|
Java
|
apache-2.0
|
62bcc759faabb4ba8e8248f545d68b092abd18dc
| 0
|
blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,gstevey/gradle,blindpirate/gradle,lsmaira/gradle,blindpirate/gradle,blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,gradle/gradle,gstevey/gradle,gradle/gradle,lsmaira/gradle,blindpirate/gradle,gstevey/gradle,lsmaira/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,lsmaira/gradle,robinverduijn/gradle,gradle/gradle,lsmaira/gradle,gradle/gradle,robinverduijn/gradle,robinverduijn/gradle,gradle/gradle,gradle/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,gstevey/gradle,robinverduijn/gradle,lsmaira/gradle,lsmaira/gradle,lsmaira/gradle,gradle/gradle,blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,robinverduijn/gradle,gstevey/gradle,robinverduijn/gradle,blindpirate/gradle,lsmaira/gradle,gstevey/gradle,lsmaira/gradle
|
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.ivyservice.moduleconverter.dependencies;
import org.apache.ivy.core.module.descriptor.DefaultDependencyDescriptor;
import org.apache.ivy.core.module.descriptor.ModuleDescriptor;
import org.apache.ivy.core.module.id.ModuleRevisionId;
import org.gradle.api.artifacts.Module;
import org.gradle.api.artifacts.ModuleDependency;
import org.gradle.api.artifacts.ProjectDependency;
import org.gradle.api.internal.artifacts.dependencies.ProjectDependencyInternal;
import org.gradle.api.internal.artifacts.ivyservice.IvyUtil;
import org.gradle.api.internal.artifacts.ivyservice.moduleconverter.ExcludeRuleConverter;
import org.gradle.internal.component.local.model.DefaultProjectDependencyMetaData;
import org.gradle.internal.component.local.model.ProjectDependencyMetaData;
import org.gradle.api.internal.project.ProjectInternal;
public class ProjectIvyDependencyDescriptorFactory extends AbstractIvyDependencyDescriptorFactory {
public ProjectIvyDependencyDescriptorFactory(ExcludeRuleConverter excludeRuleConverter) {
super(excludeRuleConverter);
}
public ProjectDependencyMetaData createDependencyDescriptor(String configuration, ModuleDependency dependency, ModuleDescriptor parent) {
ProjectDependencyInternal projectDependency = (ProjectDependencyInternal) dependency;
projectDependency.beforeResolved();
ModuleRevisionId moduleRevisionId = createModuleRevisionId(dependency);
DefaultDependencyDescriptor dependencyDescriptor = new DefaultDependencyDescriptor(parent, moduleRevisionId, false, false, dependency.isTransitive());
addExcludesArtifactsAndDependencies(configuration, dependency, dependencyDescriptor);
return new DefaultProjectDependencyMetaData(dependencyDescriptor, projectDependency);
}
public boolean canConvert(ModuleDependency dependency) {
return dependency instanceof ProjectDependency;
}
private ModuleRevisionId createModuleRevisionId(ModuleDependency dependency) {
ProjectDependency projectDependency = (ProjectDependency) dependency;
Module module = ((ProjectInternal) projectDependency.getDependencyProject()).getModule();
return IvyUtil.createModuleRevisionId(module);
}
}
|
subprojects/dependency-management/src/main/java/org/gradle/api/internal/artifacts/ivyservice/moduleconverter/dependencies/ProjectIvyDependencyDescriptorFactory.java
|
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.ivyservice.moduleconverter.dependencies;
import org.apache.ivy.core.module.descriptor.DefaultDependencyDescriptor;
import org.apache.ivy.core.module.descriptor.ModuleDescriptor;
import org.apache.ivy.core.module.id.ModuleRevisionId;
import org.gradle.api.artifacts.Module;
import org.gradle.api.artifacts.ModuleDependency;
import org.gradle.api.artifacts.ProjectDependency;
import org.gradle.api.internal.artifacts.dependencies.ProjectDependencyInternal;
import org.gradle.api.internal.artifacts.ivyservice.IvyUtil;
import org.gradle.api.internal.artifacts.ivyservice.moduleconverter.ExcludeRuleConverter;
import org.gradle.api.internal.project.ProjectInternal;
import org.gradle.internal.component.local.model.DefaultProjectDependencyMetaData;
import org.gradle.internal.component.local.model.ProjectDependencyMetaData;
public class ProjectIvyDependencyDescriptorFactory extends AbstractIvyDependencyDescriptorFactory {
public ProjectIvyDependencyDescriptorFactory(ExcludeRuleConverter excludeRuleConverter) {
super(excludeRuleConverter);
}
public ProjectDependencyMetaData createDependencyDescriptor(String configuration, ModuleDependency dependency, ModuleDescriptor parent) {
ProjectDependencyInternal projectDependency = (ProjectDependencyInternal) dependency;
projectDependency.beforeResolved();
ModuleRevisionId moduleRevisionId = createModuleRevisionId(dependency);
DefaultDependencyDescriptor dependencyDescriptor = new DefaultDependencyDescriptor(parent, moduleRevisionId, false, false, dependency.isTransitive());
addExcludesArtifactsAndDependencies(configuration, dependency, dependencyDescriptor);
return new DefaultProjectDependencyMetaData(dependencyDescriptor, projectDependency);
}
public boolean canConvert(ModuleDependency dependency) {
return dependency instanceof ProjectDependency;
}
private ModuleRevisionId createModuleRevisionId(ModuleDependency dependency) {
ProjectDependency projectDependency = (ProjectDependency) dependency;
Module module = ((ProjectInternal) projectDependency.getDependencyProject()).getModule();
return IvyUtil.createModuleRevisionId(module);
}
}
|
Reverted unnecessary change
|
subprojects/dependency-management/src/main/java/org/gradle/api/internal/artifacts/ivyservice/moduleconverter/dependencies/ProjectIvyDependencyDescriptorFactory.java
|
Reverted unnecessary change
|
|
Java
|
apache-2.0
|
d303a2cb94a51c9c4db38acef731fa0743eeac42
| 0
|
marsounjan/floatingsearchview,arimorty/floatingsearchview,maxee/floatingsearchview,maxee/floatingsearchview,marsounjan/floatingsearchview,arimorty/floatingsearchview
|
/*
* Copyright (C) 2015 Arlib
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arlib.floatingsearchview;
import android.animation.ObjectAnimator;
import android.animation.ValueAnimator;
import android.app.Activity;
import android.content.Context;
import android.content.ContextWrapper;
import android.content.Intent;
import android.content.res.Configuration;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Parcel;
import android.os.Parcelable;
import android.speech.RecognizerIntent;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.ViewPropertyAnimatorListenerAdapter;
import android.support.v7.graphics.drawable.DrawerArrowDrawable;
import android.support.v7.view.SupportMenuInflater;
import android.support.v7.view.ViewPropertyAnimatorCompatSet;
import android.support.v7.view.menu.MenuBuilder;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.Layout;
import android.text.StaticLayout;
import android.text.TextPaint;
import android.util.AttributeSet;
import android.util.Log;
import android.util.TypedValue;
import android.view.GestureDetector;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.view.WindowManager;
import android.view.animation.Interpolator;
import android.view.animation.LinearInterpolator;
import android.view.inputmethod.EditorInfo;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.arlib.floatingsearchview.suggestions.SearchSuggestionsAdapter;
import com.arlib.floatingsearchview.suggestions.model.SearchSuggestion;
import com.arlib.floatingsearchview.util.MenuPopupHelper;
import com.arlib.floatingsearchview.util.Util;
import com.arlib.floatingsearchview.util.adapter.GestureDetectorListenerAdapter;
import com.arlib.floatingsearchview.util.adapter.OnItemTouchListenerAdapter;
import com.arlib.floatingsearchview.util.adapter.TextWatcherAdapter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* A search UI widget that implements a floating search box also called persistent
* search.
*/
public class FloatingSearchView extends FrameLayout {
private static final String TAG = "FloatingSearchView";
private final int BACKGROUND_DRAWABLE_ALPHA_SEARCH_ACTIVE = 150;
private final int BACKGROUND_DRAWABLE_ALPHA_SEARCH_INACTIVE = 0;
private final int MENU_ICON_ANIM_DURATION = 250;
private final int BACKGROUND_FADE__ANIM_DURATION = 250;
private final int ATTRS_SEARCH_BAR_MARGIN_DEFAULT = 0;
private final boolean ATTRS_SEARCH_BAR_SHOW_MENU_ACTION_DEFAULT = true;
private final boolean ATTRS_DISMISS_ON_OUTSIDE_TOUCH_DEFAULT = false;
private final boolean ATTRS_SEARCH_BAR_SHOW_VOICE_ACTION_DEFAULT = false;
private final boolean ATTRS_SEARCH_BAR_SHOW_SEARCH_KEY_DEFAULT = true;
private final boolean ATTRS_SEARCH_BAR_SHOW_SEARCH_HINT_NOT_FOCUSED_DEFAULT = true;
private final boolean ATTRS_HIDE_OVERFLOW_MENU_FOCUSED_DEFAULT = true;
private final boolean ATTRS_SHOW_OVERFLOW_MENU_DEFAULT = true;
private final int SUGGEST_LIST_COLLAPSE_ANIM_DURATION = 200;
private final Interpolator SUGGEST_LIST_COLLAPSE_ANIM_INTERPOLATOR = new LinearInterpolator();
private final int SUGGEST_ITEM_ADD_ANIM_DURATION = 250;
private final Interpolator SUGGEST_ITEM_ADD_ANIM_INTERPOLATOR = new LinearInterpolator();
private final int VOICE_REC_DEFAULT_REQUEST_CODE = 1024;
private final int SUGGESTION_ITEM_HEIGHT_DP = 48;
private final int SUGGESTION_ITEM_ANIM_DURATION = 120;
private final int OVERFLOW_ICON_WIDTH_DP = 36;
private Activity mHostActivity;
private Drawable mBackgroundDrawable;
private boolean mDismissOnOutsideTouch = true;
private View mQuerySection;
private ImageView mVoiceInputOrClearButton;
private EditText mSearchInput;
private boolean mShowSearchKey;
private boolean mIsFocused;
private TextView mSearchBarTitle;
private OnQueryChangeListener mQueryListener;
private ProgressBar mSearchProgress;
private ImageView mMenuSearchOrExitButton;
private OnLeftMenuClickListener mOnMenuClickListener;
private DrawerArrowDrawable mMenuBtnDrawable;
private Drawable mIconClear;
private Drawable mIconMic;
private Drawable mIconOverflowMenu;
private Drawable mIconBackArrow;
private Drawable mIconSearch;
private OnFocusChangeListener mFocusChangeListener;
private boolean mShowMenuAction;
private String mOldQuery = "";
private OnSearchListener mSearchListener;
private int mVoiceRecRequestCode = VOICE_REC_DEFAULT_REQUEST_CODE;
private String mVoiceRecHint;
private boolean mShowVoiceInput;
private boolean mShowHintNotFocused;
private String mSearchHint;
private boolean mMenuOpen = false;
private boolean mIsActiveOnClick;
private ImageView mOverflowMenu;
private MenuBuilder mMenuBuilder;
private MenuPopupHelper mMenuPopupHelper;
private SupportMenuInflater mMenuInflater;
private OnMenuItemClickListener mOnOverflowMenuItemListener;
private boolean mHideOverflowMenuFocused;
private boolean mShowOverFlowMenu;
private boolean mSearchEnabled;
private boolean mSkipQueryFocusChangeEvent;
private boolean mSkipTextChangeEvent;
private View mDivider;
private RelativeLayout mSuggestionsSection;
private View mSuggestionListContainer;
private RecyclerView mSuggestionsList;
private SearchSuggestionsAdapter mSuggestionsAdapter;
private boolean isCollapsing = false;
/**
* Interface for implementing a callback to be
* invoked when the left menu (navigation menu) is
* clicked.
*/
public interface OnLeftMenuClickListener{
/**
* Called when the menu button was
* clicked and the menu's state is now opened.
*/
void onMenuOpened();
/**
* Called when the back button was
* clicked and the menu's state is now closed.
*/
void onMenuClosed();
}
/**
* Interface for implementing a listener to listen
* to when the current search has completed.
*/
public interface OnSearchListener {
/**
* Called when a suggestion was clicked indicating
* that the current search has completed.
*
* @param searchSuggestion
*/
void onSuggestionClicked(SearchSuggestion searchSuggestion);
/**
* Called when the current search has completed
* as a result of pressing search key in the keyboard.
*/
void onSearchAction();
}
/**
* Interface for implementing a listener to listen
* when a menu in the overflow menu has been selected.
*/
public interface OnMenuItemClickListener{
/**
* Called when a menu item in has been
* selected.
*
* @param item the selected menu item.
*/
void onMenuItemSelected(MenuItem item);
}
/**
* Interface for implementing a listener to listen
* to for state changes in the query text.
*/
public interface OnQueryChangeListener{
/**
* Called when the query has changed. it will
* be invoked when one or more characters in the
* query was changed.
*
* @param oldQuery the previous query
* @param newQuery the new query
*/
void onSearchTextChanged(String oldQuery, String newQuery);
}
/**
* Interface for implementing a listener to listen
* to for focus state changes.
*/
public interface OnFocusChangeListener{
/**
* Called when the search bar has gained focus
* and listeners are now active.
*/
void onFocus();
/**
* Called when the search bar has lost focus
* and listeners are no more active.
*/
void onFocusCleared();
}
public FloatingSearchView(Context context) {
this(context, null);
}
public FloatingSearchView(Context context, AttributeSet attrs){
super(context, attrs);
init(attrs);
}
private void init(AttributeSet attrs){
mHostActivity = getHostActivity();
LayoutInflater layoutInflater = (LayoutInflater) getContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
inflate(getContext(), R.layout.floating_search_layout, this);
mBackgroundDrawable = new ColorDrawable(Color.BLACK);
mQuerySection = findViewById(R.id.search_query_section);
mVoiceInputOrClearButton = (ImageView)findViewById(R.id.search_bar_mic_or_ex);
mSearchInput = (EditText)findViewById(R.id.search_bar_text);
mSearchBarTitle = (TextView)findViewById(R.id.search_bar_title);
mMenuSearchOrExitButton = (ImageView)findViewById(R.id.search_bar_exit);
mSearchProgress = (ProgressBar)findViewById(R.id.search_bar_search_progress);
mShowMenuAction = true;
mOverflowMenu = (ImageView)findViewById(R.id.search_bar_overflow_menu);
mMenuBuilder = new MenuBuilder(getContext());
mMenuPopupHelper = new MenuPopupHelper(getContext(), mMenuBuilder, mOverflowMenu);
initDrawables();
mVoiceInputOrClearButton.setImageDrawable(mIconMic);
mOverflowMenu.setImageDrawable(mIconOverflowMenu);
mDivider = findViewById(R.id.divider);
mSuggestionsSection = (RelativeLayout)findViewById(R.id.search_suggestions_section);
mSuggestionListContainer = findViewById(R.id.suggestions_list_container);
mSuggestionsList = (RecyclerView)findViewById(R.id.suggestions_list);
setupViews(attrs);
}
private void initDrawables(){
mMenuBtnDrawable = new DrawerArrowDrawable(getContext());
mIconClear = getResources().getDrawable(R.drawable.ic_clear_black_24dp);
mIconClear = DrawableCompat.wrap(mIconClear);
mIconMic = getResources().getDrawable(R.drawable.ic_mic_black_24dp);
mIconMic = DrawableCompat.wrap(mIconMic);
mIconOverflowMenu = getResources().getDrawable(R.drawable.ic_more_vert_black_24dp);
mIconOverflowMenu = DrawableCompat.wrap(mIconOverflowMenu);
mIconBackArrow = getResources().getDrawable(R.drawable.ic_arrow_back_black_24dp);
mIconBackArrow = DrawableCompat.wrap(mIconBackArrow);
mIconSearch = getResources().getDrawable(R.drawable.ic_search_black_24dp);
mIconSearch = DrawableCompat.wrap(mIconSearch);
setIconsColor(getResources().getColor(R.color.gray_active_icon));
}
private void setIconsColor(int color){
mMenuBtnDrawable.setColor(color);
DrawableCompat.setTint(mIconClear, color);
DrawableCompat.setTint(mIconMic, color);
DrawableCompat.setTint(mIconOverflowMenu, color);
DrawableCompat.setTint(mIconBackArrow, color);
DrawableCompat.setTint(mIconSearch, color);
}
private boolean mIsInitialLayout = true;
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
if(mIsInitialLayout) {
int addedHeight = Util.dpToPx(5*3);
//we need to add 5dp to the mSuggestionsSection because we are
//going to move it up by 5dp in order o cover the search bar's
//rounded corners. We also need to add an additional 10dp to
//mSuggestionsSection in order to hide mSuggestionListContainer
//rounded corners and top/bottom padding.
mSuggestionsSection.getLayoutParams().height = mSuggestionsSection.getMeasuredHeight()
+addedHeight;
mIsInitialLayout = false;
}
//todo check if this is safe here
adjustSearchInputPadding();
//pass on the layout
super.onLayout(changed, l, t, r, b);
}
private void setupViews(AttributeSet attrs){
if(attrs!=null)
applyXmlAttributes(attrs);
mBackgroundDrawable.setAlpha(BACKGROUND_DRAWABLE_ALPHA_SEARCH_INACTIVE);
int sdkVersion = Build.VERSION.SDK_INT;
if(sdkVersion < Build.VERSION_CODES.JELLY_BEAN) {
setBackgroundDrawable(mBackgroundDrawable);
} else {
setBackground(mBackgroundDrawable);
}
setupQueryBar();
if(!isInEditMode())
setupSuggestionSection();
}
private void applyXmlAttributes(AttributeSet attrs){
TypedArray a = getContext().obtainStyledAttributes(attrs, R.styleable.FloatingSearchView);
try {
setDismissOnOutsideClick(true);
int searchBarWidth = a.getDimensionPixelSize(R.styleable.FloatingSearchView_floatingSearch_searchBarWidth, ViewGroup.LayoutParams.MATCH_PARENT);
mQuerySection.getLayoutParams().width = searchBarWidth;
mDivider.getLayoutParams().width = searchBarWidth;
mSuggestionListContainer.getLayoutParams().width = searchBarWidth;
int searchBarLeftMargin = a.getDimensionPixelSize(R.styleable.FloatingSearchView_floatingSearch_searchBarMarginLeft, ATTRS_SEARCH_BAR_MARGIN_DEFAULT);
int searchBarTopMargin = a.getDimensionPixelSize(R.styleable.FloatingSearchView_floatingSearch_searchBarMarginTop, ATTRS_SEARCH_BAR_MARGIN_DEFAULT);
int searchBarRightMargin = a.getDimensionPixelSize(R.styleable.FloatingSearchView_floatingSearch_searchBarMarginRight, ATTRS_SEARCH_BAR_MARGIN_DEFAULT);
LayoutParams querySectionLP = (LayoutParams)mQuerySection.getLayoutParams();
LayoutParams dividerLP = (LayoutParams)mDivider.getLayoutParams();
LinearLayout.LayoutParams suggestListSectionLP = (LinearLayout.LayoutParams)mSuggestionsSection.getLayoutParams();
querySectionLP.setMargins(searchBarLeftMargin, searchBarTopMargin, searchBarRightMargin, 0);
dividerLP.setMargins(searchBarLeftMargin, 0, searchBarRightMargin, ((MarginLayoutParams) mDivider.getLayoutParams()).bottomMargin);
suggestListSectionLP.setMargins(searchBarLeftMargin, 0, searchBarRightMargin, 0);
mQuerySection.setLayoutParams(querySectionLP);
mDivider.setLayoutParams(dividerLP);
mSuggestionsSection.setLayoutParams(suggestListSectionLP);
setSearchHint(a.getString(R.styleable.FloatingSearchView_floatingSearch_searchHint));
setShowHintWhenNotFocused(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_showSearchHintWhenNotFocused, ATTRS_SEARCH_BAR_SHOW_SEARCH_HINT_NOT_FOCUSED_DEFAULT));
setLeftShowMenu(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_showMenuAction, ATTRS_SEARCH_BAR_SHOW_MENU_ACTION_DEFAULT));
setShowVoiceInput(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_showVoiceInput, ATTRS_SEARCH_BAR_SHOW_VOICE_ACTION_DEFAULT));
setShowSearchKey(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_showSearchKey, ATTRS_SEARCH_BAR_SHOW_SEARCH_KEY_DEFAULT));
setVoiceSearchHint(a.getString(R.styleable.FloatingSearchView_floatingSearch_voiceRecHint));
setDismissOnOutsideClick(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_dismissOnOutsideTouch, ATTRS_DISMISS_ON_OUTSIDE_TOUCH_DEFAULT));
setShowOverflowMenu(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_showOverFlowMenu, ATTRS_SHOW_OVERFLOW_MENU_DEFAULT));
if (a.hasValue(R.styleable.FloatingSearchView_floatingSearch_menu)) {
inflateOverflowMenu(a.getResourceId(R.styleable.FloatingSearchView_floatingSearch_menu, 0));
}
setHideOverflowMenuWhenFocused(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_hideOverflowMenuWhenFocused, ATTRS_HIDE_OVERFLOW_MENU_FOCUSED_DEFAULT));
} finally {
a.recycle();
}
}
private Activity getHostActivity() {
Context context = getContext();
while (context instanceof ContextWrapper) {
if (context instanceof Activity) {
return (Activity)context;
}
context = ((ContextWrapper)context).getBaseContext();
}
return null;
}
private void setupQueryBar(){
if(!isInEditMode() && mHostActivity!=null)
mHostActivity.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN);
mOverflowMenu.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mMenuPopupHelper.show();
}
});
mMenuBuilder.setCallback(new MenuBuilder.Callback() {
@Override
public boolean onMenuItemSelected(MenuBuilder menu, MenuItem item) {
if (mOnOverflowMenuItemListener != null)
mOnOverflowMenuItemListener.onMenuItemSelected(item);
//todo check if we should care about this return or not
return false;
}
@Override
public void onMenuModeChange(MenuBuilder menu) {
}
});
mVoiceInputOrClearButton.setVisibility(mShowVoiceInput ? View.VISIBLE : View.INVISIBLE);
mVoiceInputOrClearButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (mSearchInput.getText().length() == 0) {
startVoiceInput();
} else {
mSearchInput.setText("");
}
}
});
mSearchBarTitle.setVisibility(GONE);
mSearchBarTitle.setTextColor(getResources().getColor(R.color.gray_active_icon));
mSearchInput.addTextChangedListener(new TextWatcherAdapter() {
public void onTextChanged(final CharSequence s, int start, int before, int count) {
if (mSkipTextChangeEvent) {
mSkipTextChangeEvent = false;
} else {
if (mSearchInput.getText().length() == 0) {
if (mShowVoiceInput)
changeIcon(mVoiceInputOrClearButton, mIconMic, true);
else
mVoiceInputOrClearButton.setVisibility(View.INVISIBLE);
} else if (mOldQuery.length() == 0) {
changeIcon(mVoiceInputOrClearButton, mIconClear, true);
mVoiceInputOrClearButton.setVisibility(View.VISIBLE);
}
if (mQueryListener != null && mIsFocused)
mQueryListener.onSearchTextChanged(mOldQuery, mSearchInput.getText().toString());
mOldQuery = mSearchInput.getText().toString();
}
}
});
mSearchInput.setOnFocusChangeListener(new TextView.OnFocusChangeListener() {
@Override
public void onFocusChange(View v, boolean hasFocus) {
if(mSkipQueryFocusChangeEvent){
mSkipQueryFocusChangeEvent = false;
}else {
if (hasFocus != mIsFocused)
setSearchFocused(hasFocus);
}
}
});
mSearchInput.setOnKeyListener(new OnKeyListener() {
public boolean onKey(View view, int keyCode, KeyEvent keyEvent) {
if (mShowSearchKey && keyCode == KeyEvent.KEYCODE_ENTER) {
setSearchFocused(false);
if (mSearchListener != null)
mSearchListener.onSearchAction();
return true;
}
return false;
}
});
refreshLeftIcon();
mMenuSearchOrExitButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (mSearchInput.isFocused()) {
setSearchFocused(false);
} else {
if (mShowMenuAction) {
toggleMenu();
} else {
setSearchFocused(true);
}
}
}
});
}
private void refreshLeftIcon(){
if(mShowMenuAction)
mMenuSearchOrExitButton.setImageDrawable(mMenuBtnDrawable);
else
mMenuSearchOrExitButton.setImageDrawable(mIconSearch);
}
/**
* Sets the menu button's color.
*
* @param color the color to be applied to the
* left menu button.
*/
public void setLeftMenuIconColor(int color){
mMenuBtnDrawable.setColor(color);
}
/**
* Set if search is enabled.
*
* <p>When enabled, the search
* input will gain focus when clicking on it and action
* items that are associated with search only will become
* visible.</p>
*
* @param enabled True to enable search
*/
public void setSearchEnabled(boolean enabled){
//todo avoid unnecessary work
if(enabled)
showSearchDependentActions();
else
hideSearchDependentActions();
this.mSearchEnabled = enabled;
mSearchInput.setEnabled(enabled);
}
private void showSearchDependentActions(){
ViewCompat.animate(mSearchBarTitle).alpha(0.0f).setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationStart(View view) {
mSearchBarTitle.setVisibility(INVISIBLE);
}
}).start();
ViewCompat.animate(mSearchInput).alpha(1.0f).setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationStart(View view) {
mSearchInput.setVisibility(VISIBLE);
}
}).start();
ViewCompat.animate(mVoiceInputOrClearButton).alpha(1.0f).setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationEnd(View view) {
mVoiceInputOrClearButton.setVisibility(VISIBLE);
}
}).start();
}
private void hideSearchDependentActions(){
ViewCompat.animate(mSearchBarTitle).alpha(1.0f).setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationStart(View view) {
mSearchBarTitle.setVisibility(VISIBLE);
}
}).start();
ViewCompat.animate(mSearchInput).alpha(0.0f).setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationStart(View view) {
mSearchInput.setVisibility(INVISIBLE);
}
}).start();
ViewCompat.animate(mVoiceInputOrClearButton).alpha(0.0f).setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationEnd(View view) {
mVoiceInputOrClearButton.setVisibility(INVISIBLE);
}
}).start();
}
/*
* Sets the padding for the search input TextView. This
* will set the available space for text before the TextView
* needs to scroll to make space for the text.
*/
private void adjustSearchInputPadding(){
int newPaddingEnd = 0;
newPaddingEnd += mVoiceInputOrClearButton.getWidth();
if(mShowOverFlowMenu) {
if(!(mHideOverflowMenuFocused && mSearchInput.isFocused()))
newPaddingEnd += mOverflowMenu.getWidth();
}
mSearchInput.setPadding(0, 0, newPaddingEnd, 0);
mSearchBarTitle.setPadding(0, 0, newPaddingEnd, 0);
}
/**
* Mimics a menu click that opens the menu. Useful when for navigation
* drawers when they open as a result of dragging.
*/
public void openMenu(boolean withAnim) {
openMenu(true, withAnim, false);
}
/**
* Mimics a menu click that closes. Useful when fo navigation
* drawers when they close as a result of selecting and item.
*
* @param withAnim true, will close the menu button with
* the Material animation
*/
public void closeMenu(boolean withAnim){
closeMenu(true, withAnim, false);
}
/**
* Set the visibility of the menu action
* button.
*
* @param show true to make the menu button visible, false
* to make it invisible and place a search icon
* instead.
*/
public void setLeftShowMenu(boolean show){
mShowMenuAction = show;
refreshLeftIcon();
}
/**
* Sets whether the overflow menu icon
* will slide off the search bar when search
* gains focus.
*
* @param hide
*/
public void setHideOverflowMenuWhenFocused(boolean hide){
this.mHideOverflowMenuFocused = hide;
}
/**
* Control whether the overflow menu is
* present or not.
*
* @param show
*/
public void setShowOverflowMenu(boolean show){
this.mShowOverFlowMenu = show;
if(mShowVoiceInput)
if(show)showOverflowMenuWithAnim(false);
else hideOverflowMenu(false);
}
private void showOverflowMenuWithAnim(boolean withAnim){
mOverflowMenu.setClickable(true);
if(withAnim) {
ViewPropertyAnimatorCompatSet hidAnimSet = new ViewPropertyAnimatorCompatSet();
hidAnimSet.playSequentially(ViewCompat.animate(mVoiceInputOrClearButton).translationX(0),
ViewCompat.animate(mOverflowMenu).alpha(1.0f)).setDuration(150).start();
} else{
mOverflowMenu.setAlpha(1.0f);
mVoiceInputOrClearButton.setTranslationX(0);
}
}
private void hideOverflowMenu(boolean withAnim){
//this is needed because we're going to move
//mVoiceInputOrClearButton right into the position of
//mOverflowMenu, and we don't want to receive click events
//from mOverflowMenu.
mOverflowMenu.setClickable(false);
//accounts for anim direction based on the language direction
int deltaX = isRTL() ? -Util.dpToPx(OVERFLOW_ICON_WIDTH_DP)
: Util.dpToPx(OVERFLOW_ICON_WIDTH_DP);
if(withAnim) {
ViewPropertyAnimatorCompatSet hidAnimSet = new ViewPropertyAnimatorCompatSet();
hidAnimSet.playSequentially(ViewCompat.animate(mOverflowMenu).alpha(0.0f),
ViewCompat.animate(mVoiceInputOrClearButton).translationXBy(deltaX)).setDuration(150).start();
}else {
mOverflowMenu.setAlpha(0.0f);
mVoiceInputOrClearButton.setTranslationX(deltaX);
}
}
/**
* Inflates the menu items from
* an xml resource.
*
* @param menuId a menu xml resource reference
*/
public void inflateOverflowMenu(int menuId){
mMenuBuilder.clearAll();
getMenuInflater().inflate(menuId, mMenuBuilder);
}
private MenuInflater getMenuInflater() {
if (mMenuInflater == null) {
mMenuInflater = new SupportMenuInflater(getContext());
}
return mMenuInflater;
}
private void toggleMenu(){
if(mMenuOpen){
closeMenu(true, true, true);
}else{
openMenu(true, true, true);
}
}
private void openMenu(boolean changeMenuIcon, boolean withAnim, boolean notifyListener){
if (mOnMenuClickListener != null && notifyListener)
mOnMenuClickListener.onMenuOpened();
mMenuOpen = true;
if (changeMenuIcon)
openMenuDrawable(mMenuBtnDrawable, withAnim);
}
private void closeMenu(boolean changeMenuIcon, boolean withAnim, boolean notifyListener) {
if (mOnMenuClickListener != null && notifyListener)
mOnMenuClickListener.onMenuClosed();
mMenuOpen = false;
if(changeMenuIcon)
closeMenuDrawable(mMenuBtnDrawable, withAnim);
}
/**
* Enables clients to directly manipulate
* the menu icon's progress.
*
* <p>Useful for custom animation/behaviors.</p>
*
* @param progress the desired progress of the menu
* icon's rotation. 0.0 == hamburger
* shape, 1.0 == back arrow shape
*/
public void setMenuIconProgress(float progress){
mMenuBtnDrawable.setProgress(progress);
if(progress == 0)
closeMenu(false, true, true);
else if(progress == 1.0)
openMenu(false);
}
/**
* Set a hint that will appear in the
* search input. Default hint is R.string.abc_search_hint
* which is "search..." (when device language is set to english)
*
* @param searchHint
*/
public void setSearchHint(String searchHint){
mSearchHint = searchHint != null ? searchHint : getResources().getString(R.string.abc_search_hint);
if(mShowHintNotFocused || mSearchInput.isFocused())
mSearchInput.setHint(mSearchHint);
else
mSearchInput.setHint("");
}
/**
* Control whether the hint will be shown
* when the search is not focused.
*
* @param show true to show hint when search
* is inactive
*/
public void setShowHintWhenNotFocused(boolean show){
mShowHintNotFocused = show;
if(mShowHintNotFocused)
mSearchInput.setHint(mSearchHint);
}
/**
* Sets the title for the search bar.
*
* <p>Note that this is the regular text, not the
* hint. It won't have any effect if called when
* mShowHintInactive is true.</p>
*
* @param title the title to be shown when search
* is not focused
*/
public void setSearchBarTitle(CharSequence title){
mSearchBarTitle.setText(title);
}
/**
* Set the visibility of the voice recognition action
* button.
*
* @param show true to make the voice button visible, false
* to make it invisible.
*/
public void setShowVoiceInput(boolean show){
mShowVoiceInput = show;
mVoiceInputOrClearButton.setVisibility(mShowVoiceInput ? View.VISIBLE : View.GONE);
}
/**
* Sets the request code with which the voice
* recognition intent will be fired.
*
* <p>
* The default request code is 1024. Clients should use
* this methods to provide a unique request code if the
* default code conflicts with one of their codes in order
* to avoid undesired results.
* </p>
*
* @param requestCode
*/
public void setVoiceRecRequestCode(int requestCode){
mVoiceRecRequestCode = requestCode;
}
private void startVoiceInput(){
Intent voiceIntent = createVoiceRecIntent(mHostActivity, mVoiceRecHint);
if(mHostActivity!=null)
mHostActivity.startActivityForResult(voiceIntent, mVoiceRecRequestCode);
}
private Intent createVoiceRecIntent(Activity activity, String hint){
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_PROMPT, hint);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);
return intent;
}
/**
* Sets the hit that will appear in the
* voice recognition dialog.
*
* @param hint
*/
public void setVoiceSearchHint(String hint){
mVoiceRecHint = hint != null ? hint : getResources().getString(R.string.abc_search_hint);
}
/**
* Handles voice recognition activity return.
*
* <p>In order for voice rec to work, this must be called from
* the client activity's onActivityResult()</p>
*
* @param requestCode the code with which the voice recognition intent
* was started.
* @param resultCode
* @param data
*/
public void onHostActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == mVoiceRecRequestCode){
if(resultCode == Activity.RESULT_OK){
ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
if (!matches.isEmpty()) {
String newQuery = matches.get(0);
mSearchInput.requestFocus();
mSearchInput.setText(newQuery);
mSearchInput.setSelection(mSearchInput.getText().length());
}
}
}
}
/**
* Sets whether the the button with the search icon
* will appear in the soft-keyboard or not.
*
* <p>Notice that if this is set to false,
* {@link OnSearchListener#onSearchAction()} onSearchAction}, will
* not get called.</p>
*
* @param show to show the search button in
* the soft-keyboard.
*/
public void setShowSearchKey(boolean show){
mShowSearchKey = show;
if(show)
mSearchInput.setImeOptions(EditorInfo.IME_ACTION_SEARCH);
else
mSearchInput.setImeOptions(EditorInfo.IME_ACTION_NONE);
}
/**
* Returns the current query text.
*
* @return the current query
*/
public String getQuery(){
return mSearchInput.getText().toString();
}
/**
* Shows a circular progress on top of the
* menu action button.
*
* <p>Call hidProgress()
* to change back to normal and make the menu
* action visible.</p>
*/
public void showProgress(){
mMenuSearchOrExitButton.setVisibility(View.GONE);
mSearchProgress.setVisibility(View.VISIBLE);
ObjectAnimator fadeInProgress = new ObjectAnimator().ofFloat(mSearchProgress, "alpha", 0.0f, 1.0f);
fadeInProgress.start();
}
/**
* Hides the progress bar after
* a prior call to showProgress()
*/
public void hideProgress() {
mMenuSearchOrExitButton.setVisibility(View.VISIBLE);
mSearchProgress.setVisibility(View.GONE);
ObjectAnimator fadeInExit = new ObjectAnimator().ofFloat(mMenuSearchOrExitButton, "alpha", 0.0f, 1.0f);
fadeInExit.start();
}
private void setupSuggestionSection() {
boolean showItemsFromBottom = true;
LinearLayoutManager layoutManager = new LinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, showItemsFromBottom);
mSuggestionsList.setLayoutManager(layoutManager);
mSuggestionsList.setItemAnimator(null);
final GestureDetector gestureDetector = new GestureDetector(getContext(),new GestureDetectorListenerAdapter(){
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
if(mHostActivity!=null)
Util.closeSoftKeyboard(mHostActivity);
return false;
}
});
mSuggestionsList.addOnItemTouchListener(new OnItemTouchListenerAdapter() {
@Override
public boolean onInterceptTouchEvent(RecyclerView rv, MotionEvent e) {
gestureDetector.onTouchEvent(e);
return false;
}
});
mSuggestionsAdapter = new SearchSuggestionsAdapter(getContext(), new SearchSuggestionsAdapter.Listener() {
@Override
public void onItemSelected(SearchSuggestion item) {
setSearchFocused(false);
if(mSearchListener!=null)
mSearchListener.onSuggestionClicked(item);
}
@Override
public void onMoveItemToSearchClicked(SearchSuggestion item) {
mSearchInput.setText(item.getBody());
//move cursor to end of text
mSearchInput.setSelection(mSearchInput.getText().length());
}
});
mSuggestionsList.setAdapter(mSuggestionsAdapter);
int cardViewBottomPadding = Util.dpToPx(5);
//move up the suggestions section enough to cover the search bar
//card's bottom left and right corners
mSuggestionsSection.setTranslationY(-cardViewBottomPadding);
}
private void moveSuggestListToInitialPos(){
//move the suggestions list to the collapsed position
//which is translationY of -listHeight
mSuggestionListContainer.setTranslationY(-mSuggestionListContainer.getMeasuredHeight());
}
/**
* Clears the current suggestions and replaces it
* with the provided list of new suggestions.
*
* @param newSearchSuggestions a list containing the new suggestions
*/
public void swapSuggestions(final List<? extends SearchSuggestion> newSearchSuggestions){
Collections.reverse(newSearchSuggestions);
swapSuggestions(newSearchSuggestions, true);
}
private void swapSuggestions(final List<? extends SearchSuggestion> newSearchSuggestions, boolean withAnim){
mDivider.setVisibility(View.GONE);
//update adapter
mSuggestionsAdapter.swapData(newSearchSuggestions);
//todo inspect line
//this is needed because the list gets populated
//from bottom up.
mSuggestionsList.scrollBy(0, -(newSearchSuggestions.size() * getTotalItemsHeight(newSearchSuggestions)));
int fiveDp = Util.dpToPx(6);
int threeDp = Util.dpToPx(3);
ViewCompat.animate(mSuggestionListContainer).cancel();
float translationY = (-mSuggestionListContainer.getHeight())+getVisibleItemsHeight(newSearchSuggestions);
//todo refactor go over and make more clear
final float newTranslationY = translationY<0 ?
newSearchSuggestions.size()==0 ? translationY : translationY+threeDp
: -fiveDp;
if(withAnim) {
ViewCompat.animate(mSuggestionListContainer).
setStartDelay(SUGGESTION_ITEM_ANIM_DURATION).
setInterpolator(SUGGEST_ITEM_ADD_ANIM_INTERPOLATOR).
setDuration(SUGGEST_ITEM_ADD_ANIM_DURATION).
translationY(newTranslationY).
setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationCancel(View view) {
mSuggestionListContainer.setTranslationY(newTranslationY);
}
}).start();
}else{
//todo refactor
//the extra -3*fiveDp is because this will only
//get called from onRestoreSavedState(), and when it is called,
//the full height of mSuggestionListContainer is not known.
//*refactor* as soon as possible to eliminate confusion.
float transY = translationY<0 ?
newSearchSuggestions.size()==0 ? translationY : translationY+threeDp-3*fiveDp
: -fiveDp;
mSuggestionListContainer.setTranslationY(transY);
}
if(newSearchSuggestions.size()>0)
mDivider.setVisibility(View.VISIBLE);
else
mDivider.setVisibility(View.GONE);
}
//returns the height that a given suggestion list's items
//will take up.
private int getVisibleItemsHeight(List<? extends SearchSuggestion> suggestions){
int visibleItemsHeight = 0;
for(SearchSuggestion suggestion: suggestions) {
visibleItemsHeight += getSuggestionItemHeight(suggestion);
//if the current total is more than the list container's height, we
//don't care about the rest of the items' heights because they won't be
//visible.
if(visibleItemsHeight>mSuggestionListContainer.getHeight())
break;
}
return visibleItemsHeight;
}
private int getTotalItemsHeight(List<? extends SearchSuggestion> suggestions){
int totalItemHeight = 0;
for(SearchSuggestion suggestion: suggestions)
totalItemHeight += getSuggestionItemHeight(suggestion);
return totalItemHeight;
}
//returns the height of a given suggestion item based on it's text length
private int getSuggestionItemHeight(SearchSuggestion suggestion) {
TextPaint paint = new TextPaint(Paint.ANTI_ALIAS_FLAG | Paint.LINEAR_TEXT_FLAG);
paint.setTextSize(Util.spToPx(18));
paint.setTypeface(Typeface.DEFAULT);
//the width taken up by the left and right icon and the text's left padding
//todo use res dimensions for measuring
int leftRightMarginsWidth = Util.dpToPx(124);
StaticLayout textLayout = new StaticLayout( suggestion.getBody(), paint, mSuggestionsList.getWidth()-leftRightMarginsWidth, Layout.Alignment.ALIGN_CENTER, 1.0f, 1.0f, false);
int heightPlusPadding = textLayout.getHeight()+Util.dpToPx(8);
//min height is the dictated by the left and right icons' height
//todo use res dimensions for measuring
int minHeight = Util.dpToPx(48);
int height = heightPlusPadding >= minHeight ? heightPlusPadding : minHeight;
return height;
}
/**
* Collapses the suggestions list and
* then clears its suggestion items.
*/
public void clearSuggestions(){
clearSuggestions(null);
}
private interface OnSuggestionsClearListener{
void onCleared();
}
private void clearSuggestions(final OnSuggestionsClearListener listener) {
if(!isCollapsing) {
collapseSuggestionsSection(new OnSuggestionsCollapsedListener() {
@Override
public void onCollapsed() {
mSuggestionsAdapter.clearDataSet();
if (listener != null)
listener.onCleared();
mDivider.setVisibility(GONE);
}
});
}
}
private interface OnSuggestionsCollapsedListener{
void onCollapsed();
}
private void collapseSuggestionsSection(final OnSuggestionsCollapsedListener listener){
isCollapsing = true;
final int destTranslationY = -(mSuggestionListContainer.getHeight()+Util.dpToPx(3));
ViewCompat.animate(mSuggestionListContainer).
translationY(destTranslationY).
setDuration(SUGGEST_LIST_COLLAPSE_ANIM_DURATION).
setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationEnd(View view) {
if (listener != null)
listener.onCollapsed();
isCollapsing = false;
}
@Override
public void onAnimationCancel(View view) {
mSuggestionListContainer.setTranslationY(destTranslationY);
}
}).start();
}
public void clearSearchFocus(){
setSearchFocused(false);
}
public boolean isSearchBarFocused(){
return mIsFocused;
}
private void setSearchFocused(boolean focused){
this.mIsFocused = focused;
if(focused){
if(mShowMenuAction && !mMenuOpen)
openMenuDrawable(mMenuBtnDrawable, true);
else if(!mShowMenuAction)
changeIcon(mMenuSearchOrExitButton, mIconBackArrow, true);
if(mMenuOpen)
closeMenu(false, true, true);
moveSuggestListToInitialPos();
mSuggestionsSection.setVisibility(VISIBLE);
fadeInBackground();
mSearchInput.requestFocus();
if(mShowOverFlowMenu && mHideOverflowMenuFocused)
hideOverflowMenu(true);
adjustSearchInputPadding();
Util.showSoftKeyboard(getContext(), mSearchInput);
if(mFocusChangeListener!=null)
mFocusChangeListener.onFocus();
}else{
if(mShowMenuAction)
closeMenuDrawable(mMenuBtnDrawable, true);
else
changeIcon(mMenuSearchOrExitButton, mIconSearch, true);
clearSuggestions(new OnSuggestionsClearListener() {
@Override
public void onCleared() {
mSuggestionsSection.setVisibility(View.INVISIBLE);
}
});
fadeOutBackground();
findViewById(R.id.search_bar).requestFocus();
if(mHostActivity!=null)
Util.closeSoftKeyboard(mHostActivity);
if(mShowVoiceInput && !mHideOverflowMenuFocused)
changeIcon(mVoiceInputOrClearButton, mIconMic, true);
if(mSearchInput.length()!=0)
mSearchInput.setText("");
if(mShowOverFlowMenu && mHideOverflowMenuFocused)
showOverflowMenuWithAnim(true);
adjustSearchInputPadding();
if(mFocusChangeListener!=null) {
mFocusChangeListener.onFocusCleared();
}
}
}
private void changeIcon(ImageView imageView, Drawable newIcon, boolean withAnim) {
imageView.setImageDrawable(newIcon);
if(withAnim) {
ObjectAnimator fadeInVoiceInputOrClear = new ObjectAnimator().ofFloat(imageView, "alpha", 0.0f, 1.0f);
fadeInVoiceInputOrClear.start();
}else{
imageView.setAlpha(1.0f);
}
}
/**
* Sets the listener that will listen for query
* changes as they are being typed.
*
* @param listener listener for query changes
*/
public void setOnQueryChangeListener(OnQueryChangeListener listener){
this.mQueryListener = listener;
}
/**
* Sets the listener that will be called when
* an action that completes the current search
* session has occurred and the search lost focus.
*
* <p>When called, a client would ideally grab the
* search or suggestion query from the callback parameter or
* from {@link #getQuery() getquery} and perform the necessary
* query against its data source.</p>
*
* @param listener listener for query completion
*/
public void setOnSearchListener(OnSearchListener listener) {
this.mSearchListener = listener;
}
/**
* Sets the listener that will be called when the focus
* of the search has changed.
*
* @param listener listener for search focus changes
*/
public void setOnFocusChangeListener(OnFocusChangeListener listener){
this.mFocusChangeListener = listener;
}
/**
* Sets the listener that will be called when the
* left/start menu (or navigation menu) is clicked.
*
* <p>Note that this is different from the overflow menu
* that has a separate listener.</p>
*
* @param listener
*/
public void setOnLeftMenuClickListener(OnLeftMenuClickListener listener){
this.mOnMenuClickListener = listener;
}
/**
* Sets the listener that will be called when
* an item in the overflow menu is clicked.
*
* @param listener listener to listen to menu item clicks
*/
public void setOnMenuItemClickListener(OnMenuItemClickListener listener){
this.mOnOverflowMenuItemListener = listener;
}
private void openMenuDrawable(final DrawerArrowDrawable drawerArrowDrawable, boolean withAnim){
if(withAnim){
ValueAnimator anim = ValueAnimator.ofFloat(0.0f, 1.0f);
anim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
float value = (Float) animation.getAnimatedValue();
drawerArrowDrawable.setProgress(value);
}
});
anim.setDuration(MENU_ICON_ANIM_DURATION);
anim.start();
}else{
drawerArrowDrawable.setProgress(1.0f);
}
}
private void closeMenuDrawable(final DrawerArrowDrawable drawerArrowDrawable, boolean withAnim) {
if(withAnim) {
ValueAnimator anim = ValueAnimator.ofFloat(1.0f, 0.0f);
anim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
float value = (Float) animation.getAnimatedValue();
drawerArrowDrawable.setProgress(value);
}
});
anim.setDuration(MENU_ICON_ANIM_DURATION);
anim.start();
}else{
drawerArrowDrawable.setProgress(0.0f);
}
}
private void fadeOutBackground(){
ValueAnimator anim = ValueAnimator.ofInt(BACKGROUND_DRAWABLE_ALPHA_SEARCH_ACTIVE, BACKGROUND_DRAWABLE_ALPHA_SEARCH_INACTIVE);
anim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
int value = (Integer) animation.getAnimatedValue();
mBackgroundDrawable.setAlpha(value);
}
});
anim.setDuration(BACKGROUND_FADE__ANIM_DURATION);
anim.start();
}
private void fadeInBackground(){
ValueAnimator anim = ValueAnimator.ofInt(BACKGROUND_DRAWABLE_ALPHA_SEARCH_INACTIVE, BACKGROUND_DRAWABLE_ALPHA_SEARCH_ACTIVE);
anim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
int value = (Integer) animation.getAnimatedValue();
mBackgroundDrawable.setAlpha(value);
}
});
anim.setDuration(BACKGROUND_FADE__ANIM_DURATION);
anim.start();
}
/**
* Set whether a touch outside of the
* search bar's bounds will cause the search bar to
* loos focus.
*
* @param enable true to dismiss on outside touch, false otherwise.
*/
public void setDismissOnOutsideClick(boolean enable){
mDismissOnOutsideTouch = enable;
mSuggestionsSection.setOnTouchListener(new OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
//todo check if this is called twice
if (mDismissOnOutsideTouch && mIsFocused)
setSearchFocused(false);
return true;
}
});
}
private boolean isRTL(){
Configuration config = getResources().getConfiguration();
return ViewCompat.getLayoutDirection(this) == ViewCompat.LAYOUT_DIRECTION_RTL;
}
@Override
public Parcelable onSaveInstanceState() {
Parcelable superState = super.onSaveInstanceState();
return new SavedState(superState, mIsFocused, mSuggestionsAdapter.getDataSet(), getQuery());
}
@Override
public void onRestoreInstanceState(Parcelable state) {
final SavedState savedState = (SavedState) state;
super.onRestoreInstanceState(savedState.getSuperState());
if(savedState.isFocused) {
mBackgroundDrawable.setAlpha(BACKGROUND_DRAWABLE_ALPHA_SEARCH_ACTIVE);
mSkipTextChangeEvent = savedState.isFocused;
mSkipQueryFocusChangeEvent = savedState.isFocused;
mIsFocused = savedState.isFocused;
mSuggestionsSection.setVisibility(VISIBLE);
ViewTreeObserver vto = mSuggestionListContainer.getViewTreeObserver();
vto.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
if (Build.VERSION.SDK_INT < 16) {
mSuggestionListContainer.getViewTreeObserver().removeGlobalOnLayoutListener(this);
} else {
mSuggestionListContainer.getViewTreeObserver().removeOnGlobalLayoutListener(this);
}
swapSuggestions(savedState.suggestions, false);
}
});
if (savedState.mQuery.length() == 0) {
if (mShowVoiceInput)
changeIcon(mVoiceInputOrClearButton, mIconMic, false);
else
mVoiceInputOrClearButton.setVisibility(View.INVISIBLE);
} else if (mOldQuery.length() == 0) {
changeIcon(mVoiceInputOrClearButton, mIconClear, false);
mVoiceInputOrClearButton.setVisibility(View.VISIBLE);
}
if(mShowOverFlowMenu && mHideOverflowMenuFocused)
hideOverflowMenu(false);
if(mShowMenuAction && !mMenuOpen)
openMenuDrawable(mMenuBtnDrawable, false);
else if(!mShowMenuAction)
changeIcon(mMenuSearchOrExitButton, mIconBackArrow, false);
adjustSearchInputPadding();
Util.showSoftKeyboard(getContext(), mSearchInput);
}
}
static class SavedState extends BaseSavedState {
List<? extends SearchSuggestion> suggestions = new ArrayList<>();
Creator SUGGEST_CREATOR;
boolean isFocused;
String mQuery;
SavedState(Parcelable superState, boolean isFocused, List<? extends SearchSuggestion> suggestions, String query){
super(superState);
this.isFocused = isFocused;
this.suggestions = suggestions;
if(!suggestions.isEmpty())
SUGGEST_CREATOR = suggestions.get(0).getCreator();
this.mQuery = query;
}
private SavedState(Parcel in) {
super(in);
isFocused = (in.readInt() != 0);
if(SUGGEST_CREATOR!=null)
in.readTypedList(suggestions, SUGGEST_CREATOR);
mQuery = in.readString();
}
@Override
public void writeToParcel(Parcel out, int flags) {
super.writeToParcel(out, flags);
out.writeInt(isFocused ? 1 : 0);
out.writeTypedList(suggestions);
out.writeString(mQuery);
}
public static final Creator<SavedState> CREATOR
= new Creator<SavedState>() {
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
//remove any ongoing animations to prevent leaks
ViewCompat.animate(mSuggestionListContainer).cancel();
}
}
|
library/src/main/java/com/arlib/floatingsearchview/FloatingSearchView.java
|
/*
* Copyright (C) 2015 Arlib
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arlib.floatingsearchview;
import android.animation.ObjectAnimator;
import android.animation.ValueAnimator;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.res.Configuration;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Parcel;
import android.os.Parcelable;
import android.speech.RecognizerIntent;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.ViewPropertyAnimatorListenerAdapter;
import android.support.v7.graphics.drawable.DrawerArrowDrawable;
import android.support.v7.view.SupportMenuInflater;
import android.support.v7.view.ViewPropertyAnimatorCompatSet;
import android.support.v7.view.menu.MenuBuilder;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.Layout;
import android.text.StaticLayout;
import android.text.TextPaint;
import android.util.AttributeSet;
import android.util.Log;
import android.util.TypedValue;
import android.view.GestureDetector;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.view.WindowManager;
import android.view.animation.Interpolator;
import android.view.animation.LinearInterpolator;
import android.view.inputmethod.EditorInfo;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.arlib.floatingsearchview.suggestions.SearchSuggestionsAdapter;
import com.arlib.floatingsearchview.suggestions.model.SearchSuggestion;
import com.arlib.floatingsearchview.util.MenuPopupHelper;
import com.arlib.floatingsearchview.util.Util;
import com.arlib.floatingsearchview.util.adapter.GestureDetectorListenerAdapter;
import com.arlib.floatingsearchview.util.adapter.OnItemTouchListenerAdapter;
import com.arlib.floatingsearchview.util.adapter.TextWatcherAdapter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* A search UI widget that implements a floating search box also called persistent
* search.
*/
public class FloatingSearchView extends FrameLayout {
private static final String TAG = "FloatingSearchView";
private final int BACKGROUND_DRAWABLE_ALPHA_SEARCH_ACTIVE = 150;
private final int BACKGROUND_DRAWABLE_ALPHA_SEARCH_INACTIVE = 0;
private final int MENU_ICON_ANIM_DURATION = 250;
private final int BACKGROUND_FADE__ANIM_DURATION = 250;
private final int ATTRS_SEARCH_BAR_MARGIN_DEFAULT = 0;
private final boolean ATTRS_SEARCH_BAR_SHOW_MENU_ACTION_DEFAULT = true;
private final boolean ATTRS_DISMISS_ON_OUTSIDE_TOUCH_DEFAULT = false;
private final boolean ATTRS_SEARCH_BAR_SHOW_VOICE_ACTION_DEFAULT = false;
private final boolean ATTRS_SEARCH_BAR_SHOW_SEARCH_KEY_DEFAULT = true;
private final boolean ATTRS_SEARCH_BAR_SHOW_SEARCH_HINT_NOT_FOCUSED_DEFAULT = true;
private final boolean ATTRS_HIDE_OVERFLOW_MENU_FOCUSED_DEFAULT = true;
private final boolean ATTRS_SHOW_OVERFLOW_MENU_DEFAULT = true;
private final int SUGGEST_LIST_COLLAPSE_ANIM_DURATION = 200;
private final Interpolator SUGGEST_LIST_COLLAPSE_ANIM_INTERPOLATOR = new LinearInterpolator();
private final int SUGGEST_ITEM_ADD_ANIM_DURATION = 250;
private final Interpolator SUGGEST_ITEM_ADD_ANIM_INTERPOLATOR = new LinearInterpolator();
private final int VOICE_REC_DEFAULT_REQUEST_CODE = 1024;
private final int SUGGESTION_ITEM_HEIGHT_DP = 48;
private final int SUGGESTION_ITEM_ANIM_DURATION = 120;
private final int OVERFLOW_ICON_WIDTH_DP = 36;
private Drawable mBackgroundDrawable;
private boolean mDismissOnOutsideTouch = true;
private View mQuerySection;
private ImageView mVoiceInputOrClearButton;
private EditText mSearchInput;
private boolean mShowSearchKey;
private boolean mIsFocused;
private TextView mSearchBarTitle;
private OnQueryChangeListener mQueryListener;
private ProgressBar mSearchProgress;
private ImageView mMenuSearchOrExitButton;
private OnLeftMenuClickListener mOnMenuClickListener;
private DrawerArrowDrawable mMenuBtnDrawable;
private Drawable mIconClear;
private Drawable mIconMic;
private Drawable mIconOverflowMenu;
private Drawable mIconBackArrow;
private Drawable mIconSearch;
private OnFocusChangeListener mFocusChangeListener;
private boolean mShowMenuAction;
private String mOldQuery = "";
private OnSearchListener mSearchListener;
private int mVoiceRecRequestCode = VOICE_REC_DEFAULT_REQUEST_CODE;
private String mVoiceRecHint;
private boolean mShowVoiceInput;
private boolean mShowHintNotFocused;
private String mSearchHint;
private boolean mMenuOpen = false;
private boolean mIsActiveOnClick;
private ImageView mOverflowMenu;
private MenuBuilder mMenuBuilder;
private MenuPopupHelper mMenuPopupHelper;
private SupportMenuInflater mMenuInflater;
private OnMenuItemClickListener mOnOverflowMenuItemListener;
private boolean mHideOverflowMenuFocused;
private boolean mShowOverFlowMenu;
private boolean mSearchEnabled;
private boolean mSkipQueryFocusChangeEvent;
private boolean mSkipTextChangeEvent;
private View mDivider;
private RelativeLayout mSuggestionsSection;
private View mSuggestionListContainer;
private RecyclerView mSuggestionsList;
private SearchSuggestionsAdapter mSuggestionsAdapter;
private boolean isCollapsing = false;
/**
* Interface for implementing a callback to be
* invoked when the left menu (navigation menu) is
* clicked.
*/
public interface OnLeftMenuClickListener{
/**
* Called when the menu button was
* clicked and the menu's state is now opened.
*/
void onMenuOpened();
/**
* Called when the back button was
* clicked and the menu's state is now closed.
*/
void onMenuClosed();
}
/**
* Interface for implementing a listener to listen
* to when the current search has completed.
*/
public interface OnSearchListener {
/**
* Called when a suggestion was clicked indicating
* that the current search has completed.
*
* @param searchSuggestion
*/
void onSuggestionClicked(SearchSuggestion searchSuggestion);
/**
* Called when the current search has completed
* as a result of pressing search key in the keyboard.
*/
void onSearchAction();
}
/**
* Interface for implementing a listener to listen
* when a menu in the overflow menu has been selected.
*/
public interface OnMenuItemClickListener{
/**
* Called when a menu item in has been
* selected.
*
* @param item the selected menu item.
*/
void onMenuItemSelected(MenuItem item);
}
/**
* Interface for implementing a listener to listen
* to for state changes in the query text.
*/
public interface OnQueryChangeListener{
/**
* Called when the query has changed. it will
* be invoked when one or more characters in the
* query was changed.
*
* @param oldQuery the previous query
* @param newQuery the new query
*/
void onSearchTextChanged(String oldQuery, String newQuery);
}
/**
* Interface for implementing a listener to listen
* to for focus state changes.
*/
public interface OnFocusChangeListener{
/**
* Called when the search bar has gained focus
* and listeners are now active.
*/
void onFocus();
/**
* Called when the search bar has lost focus
* and listeners are no more active.
*/
void onFocusCleared();
}
public FloatingSearchView(Context context) {
this(context, null);
}
public FloatingSearchView(Context context, AttributeSet attrs){
super(context, attrs);
init(attrs);
}
private void init(AttributeSet attrs){
LayoutInflater layoutInflater = (LayoutInflater) getContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
inflate(getContext(), R.layout.floating_search_layout, this);
mBackgroundDrawable = new ColorDrawable(Color.BLACK);
mQuerySection = findViewById(R.id.search_query_section);
mVoiceInputOrClearButton = (ImageView)findViewById(R.id.search_bar_mic_or_ex);
mSearchInput = (EditText)findViewById(R.id.search_bar_text);
mSearchBarTitle = (TextView)findViewById(R.id.search_bar_title);
mMenuSearchOrExitButton = (ImageView)findViewById(R.id.search_bar_exit);
mSearchProgress = (ProgressBar)findViewById(R.id.search_bar_search_progress);
mShowMenuAction = true;
mOverflowMenu = (ImageView)findViewById(R.id.search_bar_overflow_menu);
mMenuBuilder = new MenuBuilder(getContext());
mMenuPopupHelper = new MenuPopupHelper(getContext(), mMenuBuilder, mOverflowMenu);
initDrawables();
mVoiceInputOrClearButton.setImageDrawable(mIconMic);
mOverflowMenu.setImageDrawable(mIconOverflowMenu);
mDivider = findViewById(R.id.divider);
mSuggestionsSection = (RelativeLayout)findViewById(R.id.search_suggestions_section);
mSuggestionListContainer = findViewById(R.id.suggestions_list_container);
mSuggestionsList = (RecyclerView)findViewById(R.id.suggestions_list);
setupViews(attrs);
}
private void initDrawables(){
mMenuBtnDrawable = new DrawerArrowDrawable(getContext());
mIconClear = getResources().getDrawable(R.drawable.ic_clear_black_24dp);
mIconClear = DrawableCompat.wrap(mIconClear);
mIconMic = getResources().getDrawable(R.drawable.ic_mic_black_24dp);
mIconMic = DrawableCompat.wrap(mIconMic);
mIconOverflowMenu = getResources().getDrawable(R.drawable.ic_more_vert_black_24dp);
mIconOverflowMenu = DrawableCompat.wrap(mIconOverflowMenu);
mIconBackArrow = getResources().getDrawable(R.drawable.ic_arrow_back_black_24dp);
mIconBackArrow = DrawableCompat.wrap(mIconBackArrow);
mIconSearch = getResources().getDrawable(R.drawable.ic_search_black_24dp);
mIconSearch = DrawableCompat.wrap(mIconSearch);
setIconsColor(getResources().getColor(R.color.gray_active_icon));
}
private void setIconsColor(int color){
mMenuBtnDrawable.setColor(color);
DrawableCompat.setTint(mIconClear, color);
DrawableCompat.setTint(mIconMic, color);
DrawableCompat.setTint(mIconOverflowMenu, color);
DrawableCompat.setTint(mIconBackArrow, color);
DrawableCompat.setTint(mIconSearch, color);
}
private boolean mIsInitialLayout = true;
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
if(mIsInitialLayout) {
int addedHeight = Util.dpToPx(5*3);
//we need to add 5dp to the mSuggestionsSection because we are
//going to move it up by 5dp in order o cover the search bar's
//rounded corners. We also need to add an additional 10dp to
//mSuggestionsSection in order to hide mSuggestionListContainer
//rounded corners and top/bottom padding.
mSuggestionsSection.getLayoutParams().height = mSuggestionsSection.getMeasuredHeight()
+addedHeight;
mIsInitialLayout = false;
}
//todo check if this is safe here
adjustSearchInputPadding();
//pass on the layout
super.onLayout(changed, l, t, r, b);
}
private void setupViews(AttributeSet attrs){
if(attrs!=null)
applyXmlAttributes(attrs);
mBackgroundDrawable.setAlpha(BACKGROUND_DRAWABLE_ALPHA_SEARCH_INACTIVE);
int sdkVersion = Build.VERSION.SDK_INT;
if(sdkVersion < Build.VERSION_CODES.JELLY_BEAN) {
setBackgroundDrawable(mBackgroundDrawable);
} else {
setBackground(mBackgroundDrawable);
}
setupQueryBar();
if(!isInEditMode())
setupSuggestionSection();
}
private void applyXmlAttributes(AttributeSet attrs){
TypedArray a = getContext().obtainStyledAttributes(attrs, R.styleable.FloatingSearchView);
try {
setDismissOnOutsideClick(true);
int searchBarWidth = a.getDimensionPixelSize(R.styleable.FloatingSearchView_floatingSearch_searchBarWidth, ViewGroup.LayoutParams.MATCH_PARENT);
mQuerySection.getLayoutParams().width = searchBarWidth;
mDivider.getLayoutParams().width = searchBarWidth;
mSuggestionListContainer.getLayoutParams().width = searchBarWidth;
int searchBarLeftMargin = a.getDimensionPixelSize(R.styleable.FloatingSearchView_floatingSearch_searchBarMarginLeft, ATTRS_SEARCH_BAR_MARGIN_DEFAULT);
int searchBarTopMargin = a.getDimensionPixelSize(R.styleable.FloatingSearchView_floatingSearch_searchBarMarginTop, ATTRS_SEARCH_BAR_MARGIN_DEFAULT);
int searchBarRightMargin = a.getDimensionPixelSize(R.styleable.FloatingSearchView_floatingSearch_searchBarMarginRight, ATTRS_SEARCH_BAR_MARGIN_DEFAULT);
LayoutParams querySectionLP = (LayoutParams)mQuerySection.getLayoutParams();
LayoutParams dividerLP = (LayoutParams)mDivider.getLayoutParams();
LinearLayout.LayoutParams suggestListSectionLP = (LinearLayout.LayoutParams)mSuggestionsSection.getLayoutParams();
querySectionLP.setMargins(searchBarLeftMargin, searchBarTopMargin, searchBarRightMargin, 0);
dividerLP.setMargins(searchBarLeftMargin, 0, searchBarRightMargin, ((MarginLayoutParams) mDivider.getLayoutParams()).bottomMargin);
suggestListSectionLP.setMargins(searchBarLeftMargin, 0, searchBarRightMargin, 0);
mQuerySection.setLayoutParams(querySectionLP);
mDivider.setLayoutParams(dividerLP);
mSuggestionsSection.setLayoutParams(suggestListSectionLP);
setSearchHint(a.getString(R.styleable.FloatingSearchView_floatingSearch_searchHint));
setShowHintWhenNotFocused(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_showSearchHintWhenNotFocused, ATTRS_SEARCH_BAR_SHOW_SEARCH_HINT_NOT_FOCUSED_DEFAULT));
setLeftShowMenu(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_showMenuAction, ATTRS_SEARCH_BAR_SHOW_MENU_ACTION_DEFAULT));
setShowVoiceInput(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_showVoiceInput, ATTRS_SEARCH_BAR_SHOW_VOICE_ACTION_DEFAULT));
setShowSearchKey(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_showSearchKey, ATTRS_SEARCH_BAR_SHOW_SEARCH_KEY_DEFAULT));
setVoiceSearchHint(a.getString(R.styleable.FloatingSearchView_floatingSearch_voiceRecHint));
setDismissOnOutsideClick(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_dismissOnOutsideTouch, ATTRS_DISMISS_ON_OUTSIDE_TOUCH_DEFAULT));
setShowOverflowMenu(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_showOverFlowMenu, ATTRS_SHOW_OVERFLOW_MENU_DEFAULT));
if (a.hasValue(R.styleable.FloatingSearchView_floatingSearch_menu)) {
inflateOverflowMenu(a.getResourceId(R.styleable.FloatingSearchView_floatingSearch_menu, 0));
}
setHideOverflowMenuWhenFocused(a.getBoolean(R.styleable.FloatingSearchView_floatingSearch_hideOverflowMenuWhenFocused, ATTRS_HIDE_OVERFLOW_MENU_FOCUSED_DEFAULT));
} finally {
a.recycle();
}
}
private void setupQueryBar(){
if(!isInEditMode())
((Activity) getContext()).getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_PAN);
mOverflowMenu.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mMenuPopupHelper.show();
}
});
mMenuBuilder.setCallback(new MenuBuilder.Callback() {
@Override
public boolean onMenuItemSelected(MenuBuilder menu, MenuItem item) {
if (mOnOverflowMenuItemListener != null)
mOnOverflowMenuItemListener.onMenuItemSelected(item);
//todo check if we should care about this return or not
return false;
}
@Override
public void onMenuModeChange(MenuBuilder menu) {
}
});
mVoiceInputOrClearButton.setVisibility(mShowVoiceInput ? View.VISIBLE : View.INVISIBLE);
mVoiceInputOrClearButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (mSearchInput.getText().length() == 0) {
startVoiceInput();
} else {
mSearchInput.setText("");
}
}
});
mSearchBarTitle.setVisibility(GONE);
mSearchBarTitle.setTextColor(getResources().getColor(R.color.gray_active_icon));
mSearchInput.addTextChangedListener(new TextWatcherAdapter() {
public void onTextChanged(final CharSequence s, int start, int before, int count) {
if (mSkipTextChangeEvent) {
mSkipTextChangeEvent = false;
} else {
if (mSearchInput.getText().length() == 0) {
if (mShowVoiceInput)
changeIcon(mVoiceInputOrClearButton, mIconMic, true);
else
mVoiceInputOrClearButton.setVisibility(View.INVISIBLE);
} else if (mOldQuery.length() == 0) {
changeIcon(mVoiceInputOrClearButton, mIconClear, true);
mVoiceInputOrClearButton.setVisibility(View.VISIBLE);
}
if (mQueryListener != null && mIsFocused)
mQueryListener.onSearchTextChanged(mOldQuery, mSearchInput.getText().toString());
mOldQuery = mSearchInput.getText().toString();
}
}
});
mSearchInput.setOnFocusChangeListener(new TextView.OnFocusChangeListener() {
@Override
public void onFocusChange(View v, boolean hasFocus) {
if(mSkipQueryFocusChangeEvent){
mSkipQueryFocusChangeEvent = false;
}else {
if (hasFocus != mIsFocused)
setSearchFocused(hasFocus);
}
}
});
mSearchInput.setOnKeyListener(new OnKeyListener() {
public boolean onKey(View view, int keyCode, KeyEvent keyEvent) {
if (mShowSearchKey && keyCode == KeyEvent.KEYCODE_ENTER) {
setSearchFocused(false);
if (mSearchListener != null)
mSearchListener.onSearchAction();
return true;
}
return false;
}
});
refreshLeftIcon();
mMenuSearchOrExitButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (mSearchInput.isFocused()) {
setSearchFocused(false);
} else {
if (mShowMenuAction) {
toggleMenu();
} else {
setSearchFocused(true);
}
}
}
});
}
private void refreshLeftIcon(){
if(mShowMenuAction)
mMenuSearchOrExitButton.setImageDrawable(mMenuBtnDrawable);
else
mMenuSearchOrExitButton.setImageDrawable(mIconSearch);
}
/**
* Sets the menu button's color.
*
* @param color the color to be applied to the
* left menu button.
*/
public void setLeftMenuIconColor(int color){
mMenuBtnDrawable.setColor(color);
}
/**
* Set if search is enabled.
*
* <p>When enabled, the search
* input will gain focus when clicking on it and action
* items that are associated with search only will become
* visible.</p>
*
* @param enabled True to enable search
*/
public void setSearchEnabled(boolean enabled){
//todo avoid unnecessary work
if(enabled)
showSearchDependentActions();
else
hideSearchDependentActions();
this.mSearchEnabled = enabled;
mSearchInput.setEnabled(enabled);
}
private void showSearchDependentActions(){
ViewCompat.animate(mSearchBarTitle).alpha(0.0f).setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationStart(View view) {
mSearchBarTitle.setVisibility(INVISIBLE);
}
}).start();
ViewCompat.animate(mSearchInput).alpha(1.0f).setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationStart(View view) {
mSearchInput.setVisibility(VISIBLE);
}
}).start();
ViewCompat.animate(mVoiceInputOrClearButton).alpha(1.0f).setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationEnd(View view) {
mVoiceInputOrClearButton.setVisibility(VISIBLE);
}
}).start();
}
private void hideSearchDependentActions(){
ViewCompat.animate(mSearchBarTitle).alpha(1.0f).setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationStart(View view) {
mSearchBarTitle.setVisibility(VISIBLE);
}
}).start();
ViewCompat.animate(mSearchInput).alpha(0.0f).setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationStart(View view) {
mSearchInput.setVisibility(INVISIBLE);
}
}).start();
ViewCompat.animate(mVoiceInputOrClearButton).alpha(0.0f).setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationEnd(View view) {
mVoiceInputOrClearButton.setVisibility(INVISIBLE);
}
}).start();
}
/*
* Sets the padding for the search input TextView. This
* will set the available space for text before the TextView
* needs to scroll to make space for the text.
*/
private void adjustSearchInputPadding(){
int newPaddingEnd = 0;
newPaddingEnd += mVoiceInputOrClearButton.getWidth();
if(mShowOverFlowMenu) {
if(!(mHideOverflowMenuFocused && mSearchInput.isFocused()))
newPaddingEnd += mOverflowMenu.getWidth();
}
mSearchInput.setPadding(0, 0, newPaddingEnd, 0);
mSearchBarTitle.setPadding(0, 0, newPaddingEnd, 0);
}
/**
* Mimics a menu click that opens the menu. Useful when for navigation
* drawers when they open as a result of dragging.
*/
public void openMenu(boolean withAnim) {
openMenu(true, withAnim, false);
}
/**
* Mimics a menu click that closes. Useful when fo navigation
* drawers when they close as a result of selecting and item.
*
* @param withAnim true, will close the menu button with
* the Material animation
*/
public void closeMenu(boolean withAnim){
closeMenu(true, withAnim, false);
}
/**
* Set the visibility of the menu action
* button.
*
* @param show true to make the menu button visible, false
* to make it invisible and place a search icon
* instead.
*/
public void setLeftShowMenu(boolean show){
mShowMenuAction = show;
refreshLeftIcon();
}
/**
* Sets whether the overflow menu icon
* will slide off the search bar when search
* gains focus.
*
* @param hide
*/
public void setHideOverflowMenuWhenFocused(boolean hide){
this.mHideOverflowMenuFocused = hide;
}
/**
* Control whether the overflow menu is
* present or not.
*
* @param show
*/
public void setShowOverflowMenu(boolean show){
this.mShowOverFlowMenu = show;
if(mShowVoiceInput)
if(show)showOverflowMenuWithAnim(false);
else hideOverflowMenu(false);
}
private void showOverflowMenuWithAnim(boolean withAnim){
mOverflowMenu.setClickable(true);
if(withAnim) {
ViewPropertyAnimatorCompatSet hidAnimSet = new ViewPropertyAnimatorCompatSet();
hidAnimSet.playSequentially(ViewCompat.animate(mVoiceInputOrClearButton).translationX(0),
ViewCompat.animate(mOverflowMenu).alpha(1.0f)).setDuration(150).start();
} else{
mOverflowMenu.setAlpha(1.0f);
mVoiceInputOrClearButton.setTranslationX(0);
}
}
private void hideOverflowMenu(boolean withAnim){
//this is needed because we're going to move
//mVoiceInputOrClearButton right into the position of
//mOverflowMenu, and we don't want to receive click events
//from mOverflowMenu.
mOverflowMenu.setClickable(false);
//accounts for anim direction based on the language direction
int deltaX = isRTL() ? -Util.dpToPx(OVERFLOW_ICON_WIDTH_DP)
: Util.dpToPx(OVERFLOW_ICON_WIDTH_DP);
if(withAnim) {
ViewPropertyAnimatorCompatSet hidAnimSet = new ViewPropertyAnimatorCompatSet();
hidAnimSet.playSequentially(ViewCompat.animate(mOverflowMenu).alpha(0.0f),
ViewCompat.animate(mVoiceInputOrClearButton).translationXBy(deltaX)).setDuration(150).start();
}else {
mOverflowMenu.setAlpha(0.0f);
mVoiceInputOrClearButton.setTranslationX(deltaX);
}
}
/**
* Inflates the menu items from
* an xml resource.
*
* @param menuId a menu xml resource reference
*/
public void inflateOverflowMenu(int menuId){
mMenuBuilder.clearAll();
getMenuInflater().inflate(menuId, mMenuBuilder);
}
private MenuInflater getMenuInflater() {
if (mMenuInflater == null) {
mMenuInflater = new SupportMenuInflater(getContext());
}
return mMenuInflater;
}
private void toggleMenu(){
if(mMenuOpen){
closeMenu(true, true, true);
}else{
openMenu(true, true, true);
}
}
private void openMenu(boolean changeMenuIcon, boolean withAnim, boolean notifyListener){
if (mOnMenuClickListener != null && notifyListener)
mOnMenuClickListener.onMenuOpened();
mMenuOpen = true;
if (changeMenuIcon)
openMenuDrawable(mMenuBtnDrawable, withAnim);
}
private void closeMenu(boolean changeMenuIcon, boolean withAnim, boolean notifyListener) {
if (mOnMenuClickListener != null && notifyListener)
mOnMenuClickListener.onMenuClosed();
mMenuOpen = false;
if(changeMenuIcon)
closeMenuDrawable(mMenuBtnDrawable, withAnim);
}
/**
* Enables clients to directly manipulate
* the menu icon's progress.
*
* <p>Useful for custom animation/behaviors.</p>
*
* @param progress the desired progress of the menu
* icon's rotation. 0.0 == hamburger
* shape, 1.0 == back arrow shape
*/
public void setMenuIconProgress(float progress){
mMenuBtnDrawable.setProgress(progress);
if(progress == 0)
closeMenu(false, true, true);
else if(progress == 1.0)
openMenu(false);
}
/**
* Set a hint that will appear in the
* search input. Default hint is R.string.abc_search_hint
* which is "search..." (when device language is set to english)
*
* @param searchHint
*/
public void setSearchHint(String searchHint){
mSearchHint = searchHint != null ? searchHint : getResources().getString(R.string.abc_search_hint);
if(mShowHintNotFocused || mSearchInput.isFocused())
mSearchInput.setHint(mSearchHint);
else
mSearchInput.setHint("");
}
/**
* Control whether the hint will be shown
* when the search is not focused.
*
* @param show true to show hint when search
* is inactive
*/
public void setShowHintWhenNotFocused(boolean show){
mShowHintNotFocused = show;
if(mShowHintNotFocused)
mSearchInput.setHint(mSearchHint);
}
/**
* Sets the title for the search bar.
*
* <p>Note that this is the regular text, not the
* hint. It won't have any effect if called when
* mShowHintInactive is true.</p>
*
* @param title the title to be shown when search
* is not focused
*/
public void setSearchBarTitle(CharSequence title){
mSearchBarTitle.setText(title);
}
/**
* Set the visibility of the voice recognition action
* button.
*
* @param show true to make the voice button visible, false
* to make it invisible.
*/
public void setShowVoiceInput(boolean show){
mShowVoiceInput = show;
mVoiceInputOrClearButton.setVisibility(mShowVoiceInput ? View.VISIBLE : View.GONE);
}
/**
* Sets the request code with which the voice
* recognition intent will be fired.
*
* <p>
* The default request code is 1024. Clients should use
* this methods to provide a unique request code if the
* default code conflicts with one of their codes in order
* to avoid undesired results.
* </p>
*
* @param requestCode
*/
public void setVoiceRecRequestCode(int requestCode){
mVoiceRecRequestCode = requestCode;
}
private void startVoiceInput(){
Activity hostActivity = (Activity)getContext();
Intent voiceIntent = createVoiceRecIntent(hostActivity, mVoiceRecHint);
hostActivity.startActivityForResult(voiceIntent, mVoiceRecRequestCode);
}
private Intent createVoiceRecIntent(Activity activity, String hint){
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_PROMPT, hint);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);
return intent;
}
/**
* Sets the hit that will appear in the
* voice recognition dialog.
*
* @param hint
*/
public void setVoiceSearchHint(String hint){
mVoiceRecHint = hint != null ? hint : getResources().getString(R.string.abc_search_hint);
}
/**
* Handles voice recognition activity return.
*
* <p>In order for voice rec to work, this must be called from
* the client activity's onActivityResult()</p>
*
* @param requestCode the code with which the voice recognition intent
* was started.
* @param resultCode
* @param data
*/
public void onHostActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == mVoiceRecRequestCode){
if(resultCode == Activity.RESULT_OK){
ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
if (!matches.isEmpty()) {
String newQuery = matches.get(0);
mSearchInput.requestFocus();
mSearchInput.setText(newQuery);
mSearchInput.setSelection(mSearchInput.getText().length());
}
}
}
}
/**
* Sets whether the the button with the search icon
* will appear in the soft-keyboard or not.
*
* <p>Notice that if this is set to false,
* {@link OnSearchListener#onSearchAction()} onSearchAction}, will
* not get called.</p>
*
* @param show to show the search button in
* the soft-keyboard.
*/
public void setShowSearchKey(boolean show){
mShowSearchKey = show;
if(show)
mSearchInput.setImeOptions(EditorInfo.IME_ACTION_SEARCH);
else
mSearchInput.setImeOptions(EditorInfo.IME_ACTION_NONE);
}
/**
* Returns the current query text.
*
* @return the current query
*/
public String getQuery(){
return mSearchInput.getText().toString();
}
/**
* Shows a circular progress on top of the
* menu action button.
*
* <p>Call hidProgress()
* to change back to normal and make the menu
* action visible.</p>
*/
public void showProgress(){
mMenuSearchOrExitButton.setVisibility(View.GONE);
mSearchProgress.setVisibility(View.VISIBLE);
ObjectAnimator fadeInProgress = new ObjectAnimator().ofFloat(mSearchProgress, "alpha", 0.0f, 1.0f);
fadeInProgress.start();
}
/**
* Hides the progress bar after
* a prior call to showProgress()
*/
public void hideProgress() {
mMenuSearchOrExitButton.setVisibility(View.VISIBLE);
mSearchProgress.setVisibility(View.GONE);
ObjectAnimator fadeInExit = new ObjectAnimator().ofFloat(mMenuSearchOrExitButton, "alpha", 0.0f, 1.0f);
fadeInExit.start();
}
private void setupSuggestionSection() {
boolean showItemsFromBottom = true;
LinearLayoutManager layoutManager = new LinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, showItemsFromBottom);
mSuggestionsList.setLayoutManager(layoutManager);
mSuggestionsList.setItemAnimator(null);
final GestureDetector gestureDetector = new GestureDetector(getContext(),new GestureDetectorListenerAdapter(){
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
Util.closeSoftKeyboard((Activity) getContext());
return false;
}
});
mSuggestionsList.addOnItemTouchListener(new OnItemTouchListenerAdapter() {
@Override
public boolean onInterceptTouchEvent(RecyclerView rv, MotionEvent e) {
gestureDetector.onTouchEvent(e);
return false;
}
});
mSuggestionsAdapter = new SearchSuggestionsAdapter(getContext(), new SearchSuggestionsAdapter.Listener() {
@Override
public void onItemSelected(SearchSuggestion item) {
setSearchFocused(false);
if(mSearchListener!=null)
mSearchListener.onSuggestionClicked(item);
}
@Override
public void onMoveItemToSearchClicked(SearchSuggestion item) {
mSearchInput.setText(item.getBody());
//move cursor to end of text
mSearchInput.setSelection(mSearchInput.getText().length());
}
});
mSuggestionsList.setAdapter(mSuggestionsAdapter);
int cardViewBottomPadding = Util.dpToPx(5);
//move up the suggestions section enough to cover the search bar
//card's bottom left and right corners
mSuggestionsSection.setTranslationY(-cardViewBottomPadding);
}
private void moveSuggestListToInitialPos(){
//move the suggestions list to the collapsed position
//which is translationY of -listHeight
mSuggestionListContainer.setTranslationY(-mSuggestionListContainer.getMeasuredHeight());
}
/**
* Clears the current suggestions and replaces it
* with the provided list of new suggestions.
*
* @param newSearchSuggestions a list containing the new suggestions
*/
public void swapSuggestions(final List<? extends SearchSuggestion> newSearchSuggestions){
Collections.reverse(newSearchSuggestions);
swapSuggestions(newSearchSuggestions, true);
}
private void swapSuggestions(final List<? extends SearchSuggestion> newSearchSuggestions, boolean withAnim){
mDivider.setVisibility(View.GONE);
//update adapter
mSuggestionsAdapter.swapData(newSearchSuggestions);
//todo inspect line
//this is needed because the list gets populated
//from bottom up.
mSuggestionsList.scrollBy(0, -(newSearchSuggestions.size() * getTotalItemsHeight(newSearchSuggestions)));
int fiveDp = Util.dpToPx(6);
int threeDp = Util.dpToPx(3);
ViewCompat.animate(mSuggestionListContainer).cancel();
float translationY = (-mSuggestionListContainer.getHeight())+getVisibleItemsHeight(newSearchSuggestions);
//todo refactor go over and make more clear
final float newTranslationY = translationY<0 ?
newSearchSuggestions.size()==0 ? translationY : translationY+threeDp
: -fiveDp;
if(withAnim) {
ViewCompat.animate(mSuggestionListContainer).
setStartDelay(SUGGESTION_ITEM_ANIM_DURATION).
setInterpolator(SUGGEST_ITEM_ADD_ANIM_INTERPOLATOR).
setDuration(SUGGEST_ITEM_ADD_ANIM_DURATION).
translationY(newTranslationY).
setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationCancel(View view) {
mSuggestionListContainer.setTranslationY(newTranslationY);
}
}).start();
}else{
//todo refactor
//the extra -3*fiveDp is because this will only
//get called from onRestoreSavedState(), and when it is called,
//the full height of mSuggestionListContainer is not known.
//*refactor* as soon as possible to eliminate confusion.
float transY = translationY<0 ?
newSearchSuggestions.size()==0 ? translationY : translationY+threeDp-3*fiveDp
: -fiveDp;
mSuggestionListContainer.setTranslationY(transY);
}
if(newSearchSuggestions.size()>0)
mDivider.setVisibility(View.VISIBLE);
else
mDivider.setVisibility(View.GONE);
}
//returns the height that a given suggestion list's items
//will take up.
private int getVisibleItemsHeight(List<? extends SearchSuggestion> suggestions){
int visibleItemsHeight = 0;
for(SearchSuggestion suggestion: suggestions) {
visibleItemsHeight += getSuggestionItemHeight(suggestion);
//if the current total is more than the list container's height, we
//don't care about the rest of the items' heights because they won't be
//visible.
if(visibleItemsHeight>mSuggestionListContainer.getHeight())
break;
}
return visibleItemsHeight;
}
private int getTotalItemsHeight(List<? extends SearchSuggestion> suggestions){
int totalItemHeight = 0;
for(SearchSuggestion suggestion: suggestions)
totalItemHeight += getSuggestionItemHeight(suggestion);
return totalItemHeight;
}
//returns the height of a given suggestion item based on it's text length
private int getSuggestionItemHeight(SearchSuggestion suggestion) {
TextPaint paint = new TextPaint(Paint.ANTI_ALIAS_FLAG | Paint.LINEAR_TEXT_FLAG);
paint.setTextSize(Util.spToPx(18));
paint.setTypeface(Typeface.DEFAULT);
//the width taken up by the left and right icon and the text's left padding
//todo use res dimensions for measuring
int leftRightMarginsWidth = Util.dpToPx(124);
StaticLayout textLayout = new StaticLayout( suggestion.getBody(), paint, mSuggestionsList.getWidth()-leftRightMarginsWidth, Layout.Alignment.ALIGN_CENTER, 1.0f, 1.0f, false);
int heightPlusPadding = textLayout.getHeight()+Util.dpToPx(8);
//min height is the dictated by the left and right icons' height
//todo use res dimensions for measuring
int minHeight = Util.dpToPx(48);
int height = heightPlusPadding >= minHeight ? heightPlusPadding : minHeight;
return height;
}
/**
* Collapses the suggestions list and
* then clears its suggestion items.
*/
public void clearSuggestions(){
clearSuggestions(null);
}
private interface OnSuggestionsClearListener{
void onCleared();
}
private void clearSuggestions(final OnSuggestionsClearListener listener) {
if(!isCollapsing) {
collapseSuggestionsSection(new OnSuggestionsCollapsedListener() {
@Override
public void onCollapsed() {
mSuggestionsAdapter.clearDataSet();
if (listener != null)
listener.onCleared();
mDivider.setVisibility(GONE);
}
});
}
}
private interface OnSuggestionsCollapsedListener{
void onCollapsed();
}
private void collapseSuggestionsSection(final OnSuggestionsCollapsedListener listener){
isCollapsing = true;
final int destTranslationY = -(mSuggestionListContainer.getHeight()+Util.dpToPx(3));
ViewCompat.animate(mSuggestionListContainer).
translationY(destTranslationY).
setDuration(SUGGEST_LIST_COLLAPSE_ANIM_DURATION).
setListener(new ViewPropertyAnimatorListenerAdapter() {
@Override
public void onAnimationEnd(View view) {
if (listener != null)
listener.onCollapsed();
isCollapsing = false;
}
@Override
public void onAnimationCancel(View view) {
mSuggestionListContainer.setTranslationY(destTranslationY);
}
}).start();
}
public void clearSearchFocus(){
setSearchFocused(false);
}
public boolean isSearchBarFocused(){
return mIsFocused;
}
private void setSearchFocused(boolean focused){
this.mIsFocused = focused;
if(focused){
if(mShowMenuAction && !mMenuOpen)
openMenuDrawable(mMenuBtnDrawable, true);
else if(!mShowMenuAction)
changeIcon(mMenuSearchOrExitButton, mIconBackArrow, true);
if(mMenuOpen)
closeMenu(false, true, true);
moveSuggestListToInitialPos();
mSuggestionsSection.setVisibility(VISIBLE);
fadeInBackground();
mSearchInput.requestFocus();
if(mShowOverFlowMenu && mHideOverflowMenuFocused)
hideOverflowMenu(true);
adjustSearchInputPadding();
Util.showSoftKeyboard(getContext(), mSearchInput);
if(mFocusChangeListener!=null)
mFocusChangeListener.onFocus();
}else{
if(mShowMenuAction)
closeMenuDrawable(mMenuBtnDrawable, true);
else
changeIcon(mMenuSearchOrExitButton, mIconSearch, true);
clearSuggestions(new OnSuggestionsClearListener() {
@Override
public void onCleared() {
mSuggestionsSection.setVisibility(View.INVISIBLE);
}
});
fadeOutBackground();
findViewById(R.id.search_bar).requestFocus();
Util.closeSoftKeyboard((Activity) getContext());
if(mShowVoiceInput && !mHideOverflowMenuFocused)
changeIcon(mVoiceInputOrClearButton, mIconMic, true);
if(mSearchInput.length()!=0)
mSearchInput.setText("");
if(mShowOverFlowMenu && mHideOverflowMenuFocused)
showOverflowMenuWithAnim(true);
adjustSearchInputPadding();
if(mFocusChangeListener!=null) {
mFocusChangeListener.onFocusCleared();
}
}
}
private void changeIcon(ImageView imageView, Drawable newIcon, boolean withAnim) {
imageView.setImageDrawable(newIcon);
if(withAnim) {
ObjectAnimator fadeInVoiceInputOrClear = new ObjectAnimator().ofFloat(imageView, "alpha", 0.0f, 1.0f);
fadeInVoiceInputOrClear.start();
}else{
imageView.setAlpha(1.0f);
}
}
/**
* Sets the listener that will listen for query
* changes as they are being typed.
*
* @param listener listener for query changes
*/
public void setOnQueryChangeListener(OnQueryChangeListener listener){
this.mQueryListener = listener;
}
/**
* Sets the listener that will be called when
* an action that completes the current search
* session has occurred and the search lost focus.
*
* <p>When called, a client would ideally grab the
* search or suggestion query from the callback parameter or
* from {@link #getQuery() getquery} and perform the necessary
* query against its data source.</p>
*
* @param listener listener for query completion
*/
public void setOnSearchListener(OnSearchListener listener) {
this.mSearchListener = listener;
}
/**
* Sets the listener that will be called when the focus
* of the search has changed.
*
* @param listener listener for search focus changes
*/
public void setOnFocusChangeListener(OnFocusChangeListener listener){
this.mFocusChangeListener = listener;
}
/**
* Sets the listener that will be called when the
* left/start menu (or navigation menu) is clicked.
*
* <p>Note that this is different from the overflow menu
* that has a separate listener.</p>
*
* @param listener
*/
public void setOnLeftMenuClickListener(OnLeftMenuClickListener listener){
this.mOnMenuClickListener = listener;
}
/**
* Sets the listener that will be called when
* an item in the overflow menu is clicked.
*
* @param listener listener to listen to menu item clicks
*/
public void setOnMenuItemClickListener(OnMenuItemClickListener listener){
this.mOnOverflowMenuItemListener = listener;
}
private void openMenuDrawable(final DrawerArrowDrawable drawerArrowDrawable, boolean withAnim){
if(withAnim){
ValueAnimator anim = ValueAnimator.ofFloat(0.0f, 1.0f);
anim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
float value = (Float) animation.getAnimatedValue();
drawerArrowDrawable.setProgress(value);
}
});
anim.setDuration(MENU_ICON_ANIM_DURATION);
anim.start();
}else{
drawerArrowDrawable.setProgress(1.0f);
}
}
private void closeMenuDrawable(final DrawerArrowDrawable drawerArrowDrawable, boolean withAnim) {
if(withAnim) {
ValueAnimator anim = ValueAnimator.ofFloat(1.0f, 0.0f);
anim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
float value = (Float) animation.getAnimatedValue();
drawerArrowDrawable.setProgress(value);
}
});
anim.setDuration(MENU_ICON_ANIM_DURATION);
anim.start();
}else{
drawerArrowDrawable.setProgress(0.0f);
}
}
private void fadeOutBackground(){
ValueAnimator anim = ValueAnimator.ofInt(BACKGROUND_DRAWABLE_ALPHA_SEARCH_ACTIVE, BACKGROUND_DRAWABLE_ALPHA_SEARCH_INACTIVE);
anim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
int value = (Integer) animation.getAnimatedValue();
mBackgroundDrawable.setAlpha(value);
}
});
anim.setDuration(BACKGROUND_FADE__ANIM_DURATION);
anim.start();
}
private void fadeInBackground(){
ValueAnimator anim = ValueAnimator.ofInt(BACKGROUND_DRAWABLE_ALPHA_SEARCH_INACTIVE, BACKGROUND_DRAWABLE_ALPHA_SEARCH_ACTIVE);
anim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
int value = (Integer) animation.getAnimatedValue();
mBackgroundDrawable.setAlpha(value);
}
});
anim.setDuration(BACKGROUND_FADE__ANIM_DURATION);
anim.start();
}
/**
* Set whether a touch outside of the
* search bar's bounds will cause the search bar to
* loos focus.
*
* @param enable true to dismiss on outside touch, false otherwise.
*/
public void setDismissOnOutsideClick(boolean enable){
mDismissOnOutsideTouch = enable;
mSuggestionsSection.setOnTouchListener(new OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
//todo check if this is called twice
if (mDismissOnOutsideTouch && mIsFocused)
setSearchFocused(false);
return true;
}
});
}
private boolean isRTL(){
Configuration config = getResources().getConfiguration();
return ViewCompat.getLayoutDirection(this) == ViewCompat.LAYOUT_DIRECTION_RTL;
}
@Override
public Parcelable onSaveInstanceState() {
Parcelable superState = super.onSaveInstanceState();
return new SavedState(superState, mIsFocused, mSuggestionsAdapter.getDataSet(), getQuery());
}
@Override
public void onRestoreInstanceState(Parcelable state) {
final SavedState savedState = (SavedState) state;
super.onRestoreInstanceState(savedState.getSuperState());
if(savedState.isFocused) {
mBackgroundDrawable.setAlpha(BACKGROUND_DRAWABLE_ALPHA_SEARCH_ACTIVE);
mSkipTextChangeEvent = savedState.isFocused;
mSkipQueryFocusChangeEvent = savedState.isFocused;
mIsFocused = savedState.isFocused;
mSuggestionsSection.setVisibility(VISIBLE);
ViewTreeObserver vto = mSuggestionListContainer.getViewTreeObserver();
vto.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
if (Build.VERSION.SDK_INT < 16) {
mSuggestionListContainer.getViewTreeObserver().removeGlobalOnLayoutListener(this);
} else {
mSuggestionListContainer.getViewTreeObserver().removeOnGlobalLayoutListener(this);
}
swapSuggestions(savedState.suggestions, false);
}
});
if (savedState.mQuery.length() == 0) {
if (mShowVoiceInput)
changeIcon(mVoiceInputOrClearButton, mIconMic, false);
else
mVoiceInputOrClearButton.setVisibility(View.INVISIBLE);
} else if (mOldQuery.length() == 0) {
changeIcon(mVoiceInputOrClearButton, mIconClear, false);
mVoiceInputOrClearButton.setVisibility(View.VISIBLE);
}
if(mShowOverFlowMenu && mHideOverflowMenuFocused)
hideOverflowMenu(false);
if(mShowMenuAction && !mMenuOpen)
openMenuDrawable(mMenuBtnDrawable, false);
else if(!mShowMenuAction)
changeIcon(mMenuSearchOrExitButton, mIconBackArrow, false);
adjustSearchInputPadding();
Util.showSoftKeyboard(getContext(), mSearchInput);
}
}
static class SavedState extends BaseSavedState {
List<? extends SearchSuggestion> suggestions = new ArrayList<>();
Creator SUGGEST_CREATOR;
boolean isFocused;
String mQuery;
SavedState(Parcelable superState, boolean isFocused, List<? extends SearchSuggestion> suggestions, String query){
super(superState);
this.isFocused = isFocused;
this.suggestions = suggestions;
if(!suggestions.isEmpty())
SUGGEST_CREATOR = suggestions.get(0).getCreator();
this.mQuery = query;
}
private SavedState(Parcel in) {
super(in);
isFocused = (in.readInt() != 0);
if(SUGGEST_CREATOR!=null)
in.readTypedList(suggestions, SUGGEST_CREATOR);
mQuery = in.readString();
}
@Override
public void writeToParcel(Parcel out, int flags) {
super.writeToParcel(out, flags);
out.writeInt(isFocused ? 1 : 0);
out.writeTypedList(suggestions);
out.writeString(mQuery);
}
public static final Creator<SavedState> CREATOR
= new Creator<SavedState>() {
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
//remove any ongoing animations to prevent leaks
ViewCompat.animate(mSuggestionListContainer).cancel();
}
}
|
removed unsafe Activity cast
|
library/src/main/java/com/arlib/floatingsearchview/FloatingSearchView.java
|
removed unsafe Activity cast
|
|
Java
|
apache-2.0
|
1a276c7a3b2fb82c965e5c3e593390d1b1d4d19c
| 0
|
thanujalk/carbon-identity,thilina27/carbon-identity,kesavany/carbon-identity,darshanasbg/carbon-identity,IsuraD/carbon-identity,IsuraD/carbon-identity,kesavany/carbon-identity,thanujalk/carbon-identity,keerthu/carbon-identity,malithie/carbon-identity,thusithathilina/carbon-identity,IndunilRathnayake/carbon-identity,virajsenevirathne/carbon-identity,nuwand/carbon-identity,thariyarox/carbon-identity,pulasthi7/carbon-identity,virajsenevirathne/carbon-identity,keerthu/carbon-identity,madurangasiriwardena/carbon-identity,pulasthi7/carbon-identity,nuwandi-is/carbon-identity,mefarazath/carbon-identity,Niranjan-K/carbon-identity,darshanasbg/carbon-identity,nuwand/carbon-identity,0xkasun/carbon-identity,wso2/carbon-identity,GayanM/carbon-identity,DMHP/carbon-identity,Niranjan-K/carbon-identity,DMHP/carbon-identity,darshanasbg/carbon-identity,thariyarox/carbon-identity,madurangasiriwardena/carbon-identity,nuwandi-is/carbon-identity,0xkasun/carbon-identity,thilina27/carbon-identity,wso2/carbon-identity,IndunilRathnayake/carbon-identity,GayanM/carbon-identity,keerthu/carbon-identity,DMHP/carbon-identity,pulasthi7/carbon-identity,thusithathilina/carbon-identity,thariyarox/carbon-identity,malithie/carbon-identity,malithie/carbon-identity,kesavany/carbon-identity,nuwand/carbon-identity,wso2/carbon-identity,madurangasiriwardena/carbon-identity,IndunilRathnayake/carbon-identity,ChamaraPhilipsuom/carbon-identity,mefarazath/carbon-identity,ChamaraPhilipsuom/carbon-identity,0xkasun/carbon-identity,mefarazath/carbon-identity,GayanM/carbon-identity,thanujalk/carbon-identity,nuwandi-is/carbon-identity,thusithathilina/carbon-identity,thilina27/carbon-identity,virajsenevirathne/carbon-identity,IsuraD/carbon-identity,ChamaraPhilipsuom/carbon-identity,Niranjan-K/carbon-identity
|
/*
* Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.application.authentication.framework.handler.request.impl;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.base.MultitenantConstants;
import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCache;
import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCacheEntry;
import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCacheKey;
import org.wso2.carbon.identity.application.authentication.framework.config.ConfigurationFacade;
import org.wso2.carbon.identity.application.authentication.framework.config.model.SequenceConfig;
import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext;
import org.wso2.carbon.identity.application.authentication.framework.context.SessionContext;
import org.wso2.carbon.identity.application.authentication.framework.exception.FrameworkException;
import org.wso2.carbon.identity.application.authentication.framework.handler.request.RequestCoordinator;
import org.wso2.carbon.identity.application.authentication.framework.internal.FrameworkServiceComponent;
import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser;
import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants;
import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkUtils;
import org.wso2.carbon.registry.core.utils.UUIDGenerator;
import org.wso2.carbon.user.api.Tenant;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
/**
* Request Coordinator
*/
public class DefaultRequestCoordinator implements RequestCoordinator {
private static final Log log = LogFactory.getLog(DefaultRequestCoordinator.class);
private static volatile DefaultRequestCoordinator instance;
public static DefaultRequestCoordinator getInstance() {
if (instance == null) {
synchronized (DefaultRequestCoordinator.class) {
if (instance == null) {
instance = new DefaultRequestCoordinator();
}
}
}
return instance;
}
@Override
public void handle(HttpServletRequest request, HttpServletResponse response) throws IOException {
try {
AuthenticationContext context;
AuthenticationRequestCacheEntry authRequest = null;
String sessionDataKey = request.getParameter("sessionDataKey");
boolean returning = false;
// Check whether this is the start of the authentication flow.
// 'type' parameter should be present if so. This parameter contains
// the request type (e.g. samlsso) set by the calling servlet.
// TODO: use a different mechanism to determine the flow start.
if (request.getParameter("type") != null) {
// Retrieve AuthenticationRequestCache Entry which is stored stored from servlet.
if (sessionDataKey != null) {
log.debug("retrieving authentication request from cache..");
authRequest = FrameworkUtils.getAuthenticationRequestFromCache(sessionDataKey);
if (authRequest == null) {
// authRequest cannot be retrieved from cache. Cache
throw new FrameworkException("Invalid authentication request. Session data key : " + sessionDataKey);
}
} else if (!Boolean.parseBoolean(request.getParameter(FrameworkConstants.LOGOUT))) {
// sessionDataKey is null and not a logout request
if (log.isDebugEnabled()) {
log.debug("Session data key is null in the request and not a logout request.");
}
FrameworkUtils.sendToRetryPage(request, response);
}
// if there is a cache entry, wrap the original request with params in cache entry
if (authRequest != null) {
request = FrameworkUtils.getCommonAuthReqWithParams(request, authRequest);
FrameworkUtils.removeAuthenticationRequestFromCache(sessionDataKey);
}
context = initializeFlow(request, response);
} else {
returning = true;
context = FrameworkUtils.getContextData(request);
}
if (context != null) {
context.setReturning(returning);
// if this is the flow start, store the original request in the context
if (!context.isReturning() && authRequest != null) {
context.setAuthenticationRequest(authRequest.getAuthenticationRequest());
}
if (!context.isLogoutRequest()) {
FrameworkUtils.getAuthenticationRequestHandler().handle(request, response,
context);
} else {
FrameworkUtils.getLogoutRequestHandler().handle(request, response, context);
}
} else {
if (log.isDebugEnabled()) {
String key = request.getParameter("sessionDataKey");
if (key == null) {
log.debug("Session data key is null in the request");
} else {
log.debug("Session data key : " + key);
}
}
log.error("Context does not exist. Probably due to invalidated cache");
FrameworkUtils.sendToRetryPage(request, response);
}
} catch (Throwable e) {
log.error("Exception in Authentication Framework", e);
FrameworkUtils.sendToRetryPage(request, response);
}
}
/**
* Handles the initial request (from the calling servlet)
*
* @param request
* @param response
* @throws ServletException
* @throws IOException
* @throws
*/
protected AuthenticationContext initializeFlow(HttpServletRequest request,
HttpServletResponse response) throws FrameworkException {
if (log.isDebugEnabled()) {
log.debug("Initializing the flow");
}
// "sessionDataKey" - calling servlet maintains its state information
// using this
String callerSessionDataKey = request.getParameter(FrameworkConstants.SESSION_DATA_KEY);
// "commonAuthCallerPath" - path of the calling servlet. This is the url
// response should be sent to
String callerPath = getCallerPath(request);
// "type" - type of the request. e.g. samlsso, openid, oauth, passivests
String requestType = request.getParameter(FrameworkConstants.RequestParams.TYPE);
// "relyingParty"
String relyingParty = request.getParameter(FrameworkConstants.RequestParams.ISSUER);
// tenant domain
String tenantDomain = getTenantDomain(request);
// Store the request data sent by the caller
AuthenticationContext context = new AuthenticationContext();
context.setCallerSessionKey(callerSessionDataKey);
context.setCallerPath(callerPath);
context.setRequestType(requestType);
context.setRelyingParty(relyingParty);
context.setTenantDomain(tenantDomain);
// generate a new key to hold the context data object
String contextId = UUIDGenerator.generateUUID();
context.setContextIdentifier(contextId);
if (log.isDebugEnabled()) {
log.debug("Framework contextId: " + contextId);
}
// if this a logout request from the calling servlet
if (request.getParameter(FrameworkConstants.RequestParams.LOGOUT) != null) {
if (log.isDebugEnabled()) {
log.debug("Starting a logout flow");
}
context.setLogoutRequest(true);
if (context.getRelyingParty() == null || context.getRelyingParty().trim().length() == 0) {
if (log.isDebugEnabled()) {
log.debug("relyingParty param is null. This is a possible logout scenario.");
}
Cookie cookie = FrameworkUtils.getAuthCookie(request);
if (cookie != null) {
context.setSessionIdentifier(cookie.getValue());
}
return context;
}
} else {
if (log.isDebugEnabled()) {
log.debug("Starting an authentication flow");
}
}
findPreviousAuthenticatedSession(request, context);
buildOutboundQueryString(request, context);
return context;
}
private String getCallerPath(HttpServletRequest request) throws FrameworkException {
String callerPath = request.getParameter(FrameworkConstants.RequestParams.CALLER_PATH);
try {
if (callerPath != null) {
callerPath = URLDecoder.decode(callerPath, "UTF-8");
}
} catch (UnsupportedEncodingException e) {
throw new FrameworkException(e.getMessage(), e);
}
return callerPath;
}
private String getTenantDomain(HttpServletRequest request) throws FrameworkException {
String tenantDomain = request.getParameter(FrameworkConstants.RequestParams.TENANT_DOMAIN);
if (tenantDomain == null || tenantDomain.isEmpty() || "null".equals(tenantDomain)) {
String tenantId = request.getParameter(FrameworkConstants.RequestParams.TENANT_ID);
if (tenantId != null && !"-1234".equals(tenantId)) {
try {
Tenant tenant = FrameworkServiceComponent.getRealmService().getTenantManager()
.getTenant(Integer.parseInt(tenantId));
if (tenant != null) {
tenantDomain = tenant.getDomain();
}
} catch (Exception e) {
throw new FrameworkException(e.getMessage(), e);
}
} else {
tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
}
}
return tenantDomain;
}
protected void findPreviousAuthenticatedSession(HttpServletRequest request,
AuthenticationContext context) throws FrameworkException {
// Get service provider chain
SequenceConfig sequenceConfig = ConfigurationFacade.getInstance().getSequenceConfig(
context.getRequestType(),
request.getParameter(FrameworkConstants.RequestParams.ISSUER),
context.getTenantDomain());
Cookie cookie = FrameworkUtils.getAuthCookie(request);
// if cookie exists user has previously authenticated
if (cookie != null) {
if (log.isDebugEnabled()) {
log.debug(FrameworkConstants.COMMONAUTH_COOKIE
+ " cookie is available with the value: " + cookie.getValue());
}
// get the authentication details from the cache
SessionContext sessionContext = FrameworkUtils.getSessionContextFromCache(cookie
.getValue());
if (sessionContext != null) {
context.setSessionIdentifier(cookie.getValue());
String appName = sequenceConfig.getApplicationConfig().getApplicationName();
if (log.isDebugEnabled()) {
log.debug("Service Provider is: " + appName);
}
SequenceConfig previousAuthenticatedSeq = sessionContext
.getAuthenticatedSequences().get(appName);
if (previousAuthenticatedSeq != null) {
if (log.isDebugEnabled()) {
log.debug("A previously authenticated sequence found for the SP: "
+ appName);
}
context.setPreviousSessionFound(true);
sequenceConfig = previousAuthenticatedSeq;
AuthenticatedUser authenticatedUser = sequenceConfig.getAuthenticatedUser();
String authenticatedUserTenantDomain = sequenceConfig.getAuthenticatedUser().getTenantDomain();
if (authenticatedUser != null) {
// set the user for the current authentication/logout flow
context.setSubject(authenticatedUser);
if (log.isDebugEnabled()) {
log.debug("Already authenticated by username: " +
authenticatedUser.getAuthenticatedSubjectIdentifier());
}
if (authenticatedUserTenantDomain != null) {
// set the user tenant domain for the current authentication/logout flow
context.setProperty("user-tenant-domain", authenticatedUserTenantDomain);
if (log.isDebugEnabled()) {
log.debug("Authenticated user tenant domain: " + authenticatedUserTenantDomain);
}
}
}
}
context.setPreviousAuthenticatedIdPs(sessionContext.getAuthenticatedIdPs());
} else {
if (log.isDebugEnabled()) {
log.debug("Failed to find the SessionContext from the cache. Possible cache timeout.");
}
}
}
context.setServiceProviderName(sequenceConfig.getApplicationConfig().getApplicationName());
// set the sequence for the current authentication/logout flow
context.setSequenceConfig(sequenceConfig);
}
private void buildOutboundQueryString(HttpServletRequest request, AuthenticationContext context) throws FrameworkException {
// Build the outbound query string that will be sent to the authentication endpoint and
// federated IdPs
StringBuilder outboundQueryStringBuilder = new StringBuilder();
outboundQueryStringBuilder.append(FrameworkUtils.getQueryStringWithConfiguredParams(request));
if (StringUtils.isNotEmpty(outboundQueryStringBuilder.toString())) {
outboundQueryStringBuilder.append("&");
}
try {
outboundQueryStringBuilder.append("sessionDataKey=").append(context.getContextIdentifier())
.append("&relyingParty=").append(URLEncoder.encode(context.getRelyingParty(), "UTF-8")).append("&type=")
.append(context.getRequestType()).append("&sp=")
.append(context.getServiceProviderName()).append("&isSaaSApp=")
.append(context.getSequenceConfig().getApplicationConfig().isSaaSApp());
} catch (UnsupportedEncodingException e) {
throw new FrameworkException("Error while URL Encoding", e);
}
if (log.isDebugEnabled()) {
log.debug("Outbound Query String: " + outboundQueryStringBuilder.toString());
}
context.setContextIdIncludedQueryParams(outboundQueryStringBuilder.toString());
context.setOrignalRequestQueryParams(outboundQueryStringBuilder.toString());
}
}
|
components/authentication-framework/org.wso2.carbon.identity.application.authentication.framework/src/main/java/org/wso2/carbon/identity/application/authentication/framework/handler/request/impl/DefaultRequestCoordinator.java
|
/*
* Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.application.authentication.framework.handler.request.impl;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.base.MultitenantConstants;
import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCache;
import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCacheEntry;
import org.wso2.carbon.identity.application.authentication.framework.cache.AuthenticationRequestCacheKey;
import org.wso2.carbon.identity.application.authentication.framework.config.ConfigurationFacade;
import org.wso2.carbon.identity.application.authentication.framework.config.model.SequenceConfig;
import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext;
import org.wso2.carbon.identity.application.authentication.framework.context.SessionContext;
import org.wso2.carbon.identity.application.authentication.framework.exception.FrameworkException;
import org.wso2.carbon.identity.application.authentication.framework.handler.request.RequestCoordinator;
import org.wso2.carbon.identity.application.authentication.framework.internal.FrameworkServiceComponent;
import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser;
import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants;
import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkUtils;
import org.wso2.carbon.registry.core.utils.UUIDGenerator;
import org.wso2.carbon.user.api.Tenant;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
/**
* Request Coordinator
*/
public class DefaultRequestCoordinator implements RequestCoordinator {
private static final Log log = LogFactory.getLog(DefaultRequestCoordinator.class);
private static volatile DefaultRequestCoordinator instance;
public static DefaultRequestCoordinator getInstance() {
if (instance == null) {
synchronized (DefaultRequestCoordinator.class) {
if (instance == null) {
instance = new DefaultRequestCoordinator();
}
}
}
return instance;
}
@Override
public void handle(HttpServletRequest request, HttpServletResponse response) throws IOException {
try {
AuthenticationContext context;
AuthenticationRequestCacheEntry authRequest = null;
String sessionDataKey = request.getParameter("sessionDataKey");
boolean returning = false;
// Check whether this is the start of the authentication flow.
// 'type' parameter should be present if so. This parameter contains
// the request type (e.g. samlsso) set by the calling servlet.
// TODO: use a different mechanism to determine the flow start.
if (request.getParameter("type") != null) {
// Retrieve AuthenticationRequestCache Entry which is stored stored from servlet.
if (sessionDataKey != null) {
log.debug("retrieving authentication request from cache..");
authRequest = FrameworkUtils.getAuthenticationRequestFromCache(sessionDataKey);
if (authRequest == null) {
// authRequest cannot be retrieved from cache. Cache
throw new FrameworkException("Invalid authentication request. Session data key : " + sessionDataKey);
}
} else if (!Boolean.parseBoolean(request.getParameter(FrameworkConstants.LOGOUT))) {
// sessionDataKey is null and not a logout request
if (log.isDebugEnabled()) {
log.debug("Session data key is null in the request and not a logout request.");
}
FrameworkUtils.sendToRetryPage(request, response);
}
// if there is a cache entry, wrap the original request with params in cache entry
if (authRequest != null) {
request = FrameworkUtils.getCommonAuthReqWithParams(request, authRequest);
FrameworkUtils.removeAuthenticationRequestFromCache(sessionDataKey);
}
context = initializeFlow(request, response);
} else {
returning = true;
context = FrameworkUtils.getContextData(request);
}
if (context != null) {
context.setReturning(returning);
// if this is the flow start, store the original request in the context
if (!context.isReturning() && authRequest != null) {
context.setAuthenticationRequest(authRequest.getAuthenticationRequest());
}
if (!context.isLogoutRequest()) {
FrameworkUtils.getAuthenticationRequestHandler().handle(request, response,
context);
} else {
FrameworkUtils.getLogoutRequestHandler().handle(request, response, context);
}
} else {
if (log.isDebugEnabled()) {
String key = request.getParameter("sessionDataKey");
if (key == null) {
log.debug("Session data key is null in the request");
} else {
log.debug("Session data key : " + key);
}
}
log.error("Context does not exist. Probably due to invalidated cache");
FrameworkUtils.sendToRetryPage(request, response);
}
} catch (Throwable e) {
log.error("Exception in Authentication Framework", e);
FrameworkUtils.sendToRetryPage(request, response);
}
}
/**
* Handles the initial request (from the calling servlet)
*
* @param request
* @param response
* @throws ServletException
* @throws IOException
* @throws
*/
protected AuthenticationContext initializeFlow(HttpServletRequest request,
HttpServletResponse response) throws FrameworkException {
if (log.isDebugEnabled()) {
log.debug("Initializing the flow");
}
// "sessionDataKey" - calling servlet maintains its state information
// using this
String callerSessionDataKey = request.getParameter(FrameworkConstants.SESSION_DATA_KEY);
// "commonAuthCallerPath" - path of the calling servlet. This is the url
// response should be sent to
String callerPath = getCallerPath(request);
// "type" - type of the request. e.g. samlsso, openid, oauth, passivests
String requestType = request.getParameter(FrameworkConstants.RequestParams.TYPE);
// "relyingParty"
String relyingParty = request.getParameter(FrameworkConstants.RequestParams.ISSUER);
// tenant domain
String tenantDomain = getTenantDomain(request);
// Store the request data sent by the caller
AuthenticationContext context = new AuthenticationContext();
context.setCallerSessionKey(callerSessionDataKey);
context.setCallerPath(callerPath);
context.setRequestType(requestType);
try {
context.setRelyingParty(URLEncoder.encode(relyingParty, "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new FrameworkException(e.getMessage(), e);
}
context.setTenantDomain(tenantDomain);
// generate a new key to hold the context data object
String contextId = UUIDGenerator.generateUUID();
context.setContextIdentifier(contextId);
if (log.isDebugEnabled()) {
log.debug("Framework contextId: " + contextId);
}
// if this a logout request from the calling servlet
if (request.getParameter(FrameworkConstants.RequestParams.LOGOUT) != null) {
if (log.isDebugEnabled()) {
log.debug("Starting a logout flow");
}
context.setLogoutRequest(true);
if (context.getRelyingParty() == null || context.getRelyingParty().trim().length() == 0) {
if (log.isDebugEnabled()) {
log.debug("relyingParty param is null. This is a possible logout scenario.");
}
Cookie cookie = FrameworkUtils.getAuthCookie(request);
if (cookie != null) {
context.setSessionIdentifier(cookie.getValue());
}
return context;
}
} else {
if (log.isDebugEnabled()) {
log.debug("Starting an authentication flow");
}
}
findPreviousAuthenticatedSession(request, context);
buildOutboundQueryString(request, context);
return context;
}
private String getCallerPath(HttpServletRequest request) throws FrameworkException {
String callerPath = request.getParameter(FrameworkConstants.RequestParams.CALLER_PATH);
try {
if (callerPath != null) {
callerPath = URLDecoder.decode(callerPath, "UTF-8");
}
} catch (UnsupportedEncodingException e) {
throw new FrameworkException(e.getMessage(), e);
}
return callerPath;
}
private String getTenantDomain(HttpServletRequest request) throws FrameworkException {
String tenantDomain = request.getParameter(FrameworkConstants.RequestParams.TENANT_DOMAIN);
if (tenantDomain == null || tenantDomain.isEmpty() || "null".equals(tenantDomain)) {
String tenantId = request.getParameter(FrameworkConstants.RequestParams.TENANT_ID);
if (tenantId != null && !"-1234".equals(tenantId)) {
try {
Tenant tenant = FrameworkServiceComponent.getRealmService().getTenantManager()
.getTenant(Integer.parseInt(tenantId));
if (tenant != null) {
tenantDomain = tenant.getDomain();
}
} catch (Exception e) {
throw new FrameworkException(e.getMessage(), e);
}
} else {
tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
}
}
return tenantDomain;
}
protected void findPreviousAuthenticatedSession(HttpServletRequest request,
AuthenticationContext context) throws FrameworkException {
// Get service provider chain
SequenceConfig sequenceConfig = ConfigurationFacade.getInstance().getSequenceConfig(
context.getRequestType(),
request.getParameter(FrameworkConstants.RequestParams.ISSUER),
context.getTenantDomain());
Cookie cookie = FrameworkUtils.getAuthCookie(request);
// if cookie exists user has previously authenticated
if (cookie != null) {
if (log.isDebugEnabled()) {
log.debug(FrameworkConstants.COMMONAUTH_COOKIE
+ " cookie is available with the value: " + cookie.getValue());
}
// get the authentication details from the cache
SessionContext sessionContext = FrameworkUtils.getSessionContextFromCache(cookie
.getValue());
if (sessionContext != null) {
context.setSessionIdentifier(cookie.getValue());
String appName = sequenceConfig.getApplicationConfig().getApplicationName();
if (log.isDebugEnabled()) {
log.debug("Service Provider is: " + appName);
}
SequenceConfig previousAuthenticatedSeq = sessionContext
.getAuthenticatedSequences().get(appName);
if (previousAuthenticatedSeq != null) {
if (log.isDebugEnabled()) {
log.debug("A previously authenticated sequence found for the SP: "
+ appName);
}
context.setPreviousSessionFound(true);
sequenceConfig = previousAuthenticatedSeq;
AuthenticatedUser authenticatedUser = sequenceConfig.getAuthenticatedUser();
String authenticatedUserTenantDomain = sequenceConfig.getAuthenticatedUser().getTenantDomain();
if (authenticatedUser != null) {
// set the user for the current authentication/logout flow
context.setSubject(authenticatedUser);
if (log.isDebugEnabled()) {
log.debug("Already authenticated by username: " +
authenticatedUser.getAuthenticatedSubjectIdentifier());
}
if (authenticatedUserTenantDomain != null) {
// set the user tenant domain for the current authentication/logout flow
context.setProperty("user-tenant-domain", authenticatedUserTenantDomain);
if (log.isDebugEnabled()) {
log.debug("Authenticated user tenant domain: " + authenticatedUserTenantDomain);
}
}
}
}
context.setPreviousAuthenticatedIdPs(sessionContext.getAuthenticatedIdPs());
} else {
if (log.isDebugEnabled()) {
log.debug("Failed to find the SessionContext from the cache. Possible cache timeout.");
}
}
}
context.setServiceProviderName(sequenceConfig.getApplicationConfig().getApplicationName());
// set the sequence for the current authentication/logout flow
context.setSequenceConfig(sequenceConfig);
}
private void buildOutboundQueryString(HttpServletRequest request, AuthenticationContext context) {
// Build the outbound query string that will be sent to the authentication endpoint and
// federated IdPs
StringBuilder outboundQueryStringBuilder = new StringBuilder();
outboundQueryStringBuilder.append(FrameworkUtils.getQueryStringWithConfiguredParams(request));
if (StringUtils.isNotEmpty(outboundQueryStringBuilder.toString())) {
outboundQueryStringBuilder.append("&");
}
outboundQueryStringBuilder.append("sessionDataKey=").append(context.getContextIdentifier())
.append("&relyingParty=").append(context.getRelyingParty()).append("&type=")
.append(context.getRequestType()).append("&sp=")
.append(context.getServiceProviderName()).append("&isSaaSApp=")
.append(context.getSequenceConfig().getApplicationConfig().isSaaSApp());
if (log.isDebugEnabled()) {
log.debug("Outbound Query String: " + outboundQueryStringBuilder.toString());
}
context.setContextIdIncludedQueryParams(outboundQueryStringBuilder.toString());
context.setOrignalRequestQueryParams(outboundQueryStringBuilder.toString());
}
}
|
Implemeting comment suggestions
|
components/authentication-framework/org.wso2.carbon.identity.application.authentication.framework/src/main/java/org/wso2/carbon/identity/application/authentication/framework/handler/request/impl/DefaultRequestCoordinator.java
|
Implemeting comment suggestions
|
|
Java
|
apache-2.0
|
616b50d4851e712ae563e337e7f12c155697b1c9
| 0
|
cunningt/camel,cunningt/camel,cunningt/camel,tadayosi/camel,adessaigne/camel,adessaigne/camel,christophd/camel,tadayosi/camel,adessaigne/camel,apache/camel,apache/camel,tadayosi/camel,apache/camel,cunningt/camel,apache/camel,tadayosi/camel,christophd/camel,cunningt/camel,christophd/camel,christophd/camel,apache/camel,tadayosi/camel,cunningt/camel,adessaigne/camel,tadayosi/camel,christophd/camel,adessaigne/camel,adessaigne/camel,christophd/camel,apache/camel
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor.resume.kafka;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import org.apache.camel.Resumable;
import org.apache.camel.ResumeCache;
import org.apache.camel.Service;
import org.apache.camel.UpdatableConsumerResumeStrategy;
import org.apache.camel.util.StringHelper;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractKafkaResumeStrategy<K, V>
implements UpdatableConsumerResumeStrategy<K, V, Resumable<K, V>>, Service {
public static final int UNLIMITED = -1;
private static final Logger LOG = LoggerFactory.getLogger(AbstractKafkaResumeStrategy.class);
private final String topic;
private Consumer<K, V> consumer;
private Producer<K, V> producer;
private long errorCount;
private final List<Future<RecordMetadata>> sentItems = new ArrayList<>();
private final ResumeCache<K, V> resumeCache;
private boolean subscribed;
private Properties producerConfig;
private Properties consumerConfig;
public AbstractKafkaResumeStrategy(String bootstrapServers, String topic, ResumeCache<K, V> resumeCache) {
this.topic = topic;
this.producerConfig = createProducer(bootstrapServers);
this.consumerConfig = createConsumer(bootstrapServers);
this.resumeCache = resumeCache;
init();
}
public AbstractKafkaResumeStrategy(String topic, ResumeCache<K, V> resumeCache, Properties producerConfig,
Properties consumerConfig) {
this.topic = topic;
this.resumeCache = resumeCache;
this.producerConfig = producerConfig;
this.consumerConfig = consumerConfig;
init();
}
/**
* Creates a basic string-based producer
*
* @param bootstrapServers the Kafka host
* @return A set of default properties for producing string-based key/pair records from Kafka
*/
public static Properties createProducer(String bootstrapServers) {
Properties config = new Properties();
config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
StringHelper.notEmpty(bootstrapServers, "bootstrapServers");
config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
return config;
}
/**
* Creates a basic string-based consumer
*
* @param bootstrapServers the Kafka host
* @return A set of default properties for consuming string-based key/pair records from Kafka
*/
public static Properties createConsumer(String bootstrapServers) {
Properties config = new Properties();
config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
StringHelper.notEmpty(bootstrapServers, "bootstrapServers");
config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
String groupId = UUID.randomUUID().toString();
LOG.debug("Creating consumer with {}[{}]", ConsumerConfig.GROUP_ID_CONFIG, groupId);
config.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, Boolean.TRUE.toString());
return config;
}
/**
* Sends data to a topic
*
* @param message the message to send
* @throws ExecutionException
* @throws InterruptedException
*/
public void produce(K key, V message) throws ExecutionException, InterruptedException {
ProducerRecord<K, V> record = new ProducerRecord<>(topic, key, message);
errorCount = 0;
Future<RecordMetadata> future = producer.send(record, (recordMetadata, e) -> {
if (e != null) {
LOG.error("Failed to send message {}", e.getMessage(), e);
errorCount++;
}
});
sentItems.add(future);
}
@Override
public void updateLastOffset(Resumable<K, V> offset) throws Exception {
K key = offset.getAddressable();
V offsetValue = offset.getLastOffset().offset();
LOG.debug("Updating offset on Kafka with key {} to {}", key, offsetValue);
produce(key, offsetValue);
resumeCache.add(key, offsetValue);
}
protected void loadCache() throws Exception {
subscribe();
LOG.debug("Loading records from topic {}", topic);
ConsumerRecords<K, V> records;
do {
records = consume();
if (records.isEmpty()) {
break;
}
for (ConsumerRecord<K, V> record : records) {
V value = record.value();
LOG.trace("Read from Kafka: {}", value);
resumeCache.add(record.key(), record.value());
if (resumeCache.isFull()) {
break;
}
}
} while (true);
unsubscribe();
}
// TODO: bad method ...
/**
* @param topic the topic to consume the messages from
*/
public void checkAndSubscribe(String topic) {
if (!subscribed) {
consumer.subscribe(Collections.singletonList(topic));
subscribed = true;
}
}
// TODO: bad method ...
/**
* @param topic the topic to consume the messages from
*/
public void checkAndSubscribe(String topic, long remaining) {
if (!subscribed) {
consumer.subscribe(Collections.singletonList(topic), new ConsumerRebalanceListener() {
@Override
public void onPartitionsRevoked(Collection<TopicPartition> collection) {
}
@Override
public void onPartitionsAssigned(Collection<TopicPartition> assignments) {
consumer.seekToEnd(assignments);
for (TopicPartition assignment : assignments) {
final long endPosition = consumer.position(assignment);
final long startPosition = endPosition - remaining;
if (startPosition >= 0) {
consumer.seek(assignment, startPosition);
} else {
LOG.info(
"Ignoring the seek command because the initial offset is negative (the topic is likely empty)");
}
}
}
});
subscribed = true;
}
}
public abstract void subscribe() throws Exception;
public void unsubscribe() {
try {
consumer.unsubscribe();
} catch (IllegalStateException e) {
LOG.warn("The consumer is likely already closed. Skipping unsubscribing from {}", topic);
} catch (Exception e) {
LOG.error("Error unsubscribing from the Kafka topic {}: {}", topic, e.getMessage(), e);
}
}
/**
* Consumes message from the given topic until the predicate returns false
*
* @return
*/
public ConsumerRecords<K, V> consume() {
int retries = 10;
return consume(retries);
}
public ConsumerRecords<K, V> consume(int retries) {
while (retries > 0) {
ConsumerRecords<K, V> records = consumer.poll(Duration.ofMillis(100));
if (!records.isEmpty()) {
return records;
}
retries--;
}
return ConsumerRecords.empty();
}
public long getErrorCount() {
return errorCount;
}
public List<Future<RecordMetadata>> getSentItems() {
return Collections.unmodifiableList(sentItems);
}
@Override
public void build() {
Service.super.build();
}
@Override
public void init() {
Service.super.init();
LOG.debug("Initializing the Kafka resume strategy");
if (consumer == null) {
consumer = new KafkaConsumer<>(consumerConfig);
}
if (producer == null) {
producer = new KafkaProducer<>(producerConfig);
}
}
@Override
public void stop() {
}
@Override
public void close() throws IOException {
Service.super.close();
}
@Override
public void start() {
LOG.info("Starting the kafka resume strategy");
try {
loadCache();
} catch (Exception e) {
LOG.error("Failed to load already processed items: {}", e.getMessage(), e);
}
}
}
|
components/camel-kafka/src/main/java/org/apache/camel/processor/resume/kafka/AbstractKafkaResumeStrategy.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor.resume.kafka;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import org.apache.camel.Resumable;
import org.apache.camel.ResumeCache;
import org.apache.camel.Service;
import org.apache.camel.UpdatableConsumerResumeStrategy;
import org.apache.camel.util.StringHelper;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractKafkaResumeStrategy<K, V>
implements UpdatableConsumerResumeStrategy<K, V, Resumable<K, V>>, Service {
public static final int UNLIMITED = -1;
private static final Logger LOG = LoggerFactory.getLogger(AbstractKafkaResumeStrategy.class);
private final String topic;
private Consumer<K, V> consumer;
private Producer<K, V> producer;
private long errorCount;
private final List<Future<RecordMetadata>> sentItems = new ArrayList<>();
private final ResumeCache<K, V> resumeCache;
private boolean subscribed;
private Properties producerConfig;
private Properties consumerConfig;
public AbstractKafkaResumeStrategy(String bootstrapServers, String topic, ResumeCache<K, V> resumeCache) {
this.topic = topic;
this.producerConfig = createProducer(bootstrapServers);
this.consumerConfig = createConsumer(bootstrapServers);
this.resumeCache = resumeCache;
init();
}
public AbstractKafkaResumeStrategy(String topic, ResumeCache<K, V> resumeCache, Properties producerConfig,
Properties consumerConfig) {
this.topic = topic;
this.resumeCache = resumeCache;
this.producerConfig = producerConfig;
this.consumerConfig = consumerConfig;
init();
}
private Properties createProducer(String bootstrapServers) {
Properties config = new Properties();
config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
StringHelper.notEmpty(bootstrapServers, "bootstrapServers");
config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
// set up the producer to remove all batching on send, we want all sends
// to be fully synchronous
config.putIfAbsent(ProducerConfig.ACKS_CONFIG, "1");
config.putIfAbsent(ProducerConfig.BATCH_SIZE_CONFIG, "0");
return config;
}
private Properties createConsumer(String bootstrapServers) {
Properties config = new Properties();
config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
StringHelper.notEmpty(bootstrapServers, "bootstrapServers");
config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
String groupId = UUID.randomUUID().toString();
LOG.debug("Creating consumer with {}[{}]", ConsumerConfig.GROUP_ID_CONFIG, groupId);
config.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, Boolean.TRUE.toString());
return config;
}
/**
* Sends data to a topic
*
* @param topic the topic to send data to
* @param message the message to send
* @throws ExecutionException
* @throws InterruptedException
*/
public void produce(String topic, K key, V message) throws ExecutionException, InterruptedException {
ProducerRecord<K, V> record = new ProducerRecord<>(topic, key, message);
errorCount = 0;
Future<RecordMetadata> future = producer.send(record, (recordMetadata, e) -> {
if (e != null) {
LOG.error("Failed to send message {}", e.getMessage(), e);
errorCount++;
}
});
sentItems.add(future);
}
@Override
public void updateLastOffset(Resumable<K, V> offset) throws Exception {
K key = offset.getAddressable();
V offsetValue = offset.getLastOffset().offset();
LOG.debug("Updating offset on Kafka with key {} to {}", key, offsetValue);
produce(topic, key, offsetValue);
resumeCache.add(key, offsetValue);
}
protected void loadCache() throws Exception {
subscribe();
LOG.debug("Loading records from topic {}", topic);
ConsumerRecords<K, V> records;
do {
records = consume();
if (records.isEmpty()) {
break;
}
for (ConsumerRecord<K, V> record : records) {
V value = record.value();
LOG.trace("Read from Kafka: {}", value);
resumeCache.add(record.key(), record.value());
if (resumeCache.isFull()) {
break;
}
}
} while (true);
unsubscribe();
}
// TODO: bad method ...
/**
* @param topic the topic to consume the messages from
*/
public void checkAndSubscribe(String topic) {
if (!subscribed) {
consumer.subscribe(Collections.singletonList(topic));
subscribed = true;
}
}
// TODO: bad method ...
/**
* @param topic the topic to consume the messages from
*/
public void checkAndSubscribe(String topic, long remaining) {
if (!subscribed) {
consumer.subscribe(Collections.singletonList(topic), new ConsumerRebalanceListener() {
@Override
public void onPartitionsRevoked(Collection<TopicPartition> collection) {
}
@Override
public void onPartitionsAssigned(Collection<TopicPartition> assignments) {
consumer.seekToEnd(assignments);
for (TopicPartition assignment : assignments) {
final long endPosition = consumer.position(assignment);
final long startPosition = endPosition - remaining;
if (startPosition >= 0) {
consumer.seek(assignment, startPosition);
} else {
LOG.info(
"Ignoring the seek command because the initial offset is negative (the topic is likely empty)");
}
}
}
});
subscribed = true;
}
}
public abstract void subscribe() throws Exception;
// TODO: bad method
public void unsubscribe() {
consumer.unsubscribe();
}
/**
* Consumes message from the given topic until the predicate returns false
*
* @return
*/
public ConsumerRecords<K, V> consume() {
int retries = 10;
return consume(retries);
}
public ConsumerRecords<K, V> consume(int retries) {
while (retries > 0) {
ConsumerRecords<K, V> records = consumer.poll(Duration.ofMillis(100));
if (!records.isEmpty()) {
return records;
}
retries--;
}
return ConsumerRecords.empty();
}
public long getErrorCount() {
return errorCount;
}
public List<Future<RecordMetadata>> getSentItems() {
return Collections.unmodifiableList(sentItems);
}
@Override
public void build() {
Service.super.build();
}
@Override
public void init() {
Service.super.init();
LOG.debug("Initializing the Kafka resume strategy");
if (consumer == null) {
consumer = new KafkaConsumer<>(consumerConfig);
}
if (producer == null) {
producer = new KafkaProducer<>(producerConfig);
}
}
@Override
public void stop() {
}
@Override
public void close() throws IOException {
Service.super.close();
}
@Override
public void start() {
LOG.info("Starting the kafka resume strategy");
try {
loadCache();
} catch (Exception e) {
LOG.error("Failed to load already processed items: {}", e.getMessage(), e);
}
}
}
|
CAMEL-15562: minor cleanups for the Kafka strategy
|
components/camel-kafka/src/main/java/org/apache/camel/processor/resume/kafka/AbstractKafkaResumeStrategy.java
|
CAMEL-15562: minor cleanups for the Kafka strategy
|
|
Java
|
apache-2.0
|
4c536288b364ab1eb11fdf08d5fc3b18374cae9c
| 0
|
wso2/carbon-identity-framework,wso2/carbon-identity-framework,wso2/carbon-identity-framework,wso2/carbon-identity-framework
|
/*
* Copyright (c) 2014 WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.idp.mgt;
import org.apache.axiom.om.util.Base64;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.core.util.KeyStoreManager;
import org.wso2.carbon.identity.application.common.ApplicationAuthenticatorService;
import org.wso2.carbon.identity.application.common.ProvisioningConnectorService;
import org.wso2.carbon.identity.application.common.model.ClaimConfig;
import org.wso2.carbon.identity.application.common.model.ClaimMapping;
import org.wso2.carbon.identity.application.common.model.FederatedAuthenticatorConfig;
import org.wso2.carbon.identity.application.common.model.IdentityProvider;
import org.wso2.carbon.identity.application.common.model.IdentityProviderProperty;
import org.wso2.carbon.identity.application.common.model.LocalRole;
import org.wso2.carbon.identity.application.common.model.PermissionsAndRoleConfig;
import org.wso2.carbon.identity.application.common.model.Property;
import org.wso2.carbon.identity.application.common.model.ProvisioningConnectorConfig;
import org.wso2.carbon.identity.application.common.model.RoleMapping;
import org.wso2.carbon.identity.application.common.util.IdentityApplicationConstants;
import org.wso2.carbon.identity.application.common.util.IdentityApplicationManagementUtil;
import org.wso2.carbon.identity.base.IdentityConstants;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.core.ServiceURLBuilder;
import org.wso2.carbon.identity.core.URLBuilderException;
import org.wso2.carbon.identity.core.model.ExpressionNode;
import org.wso2.carbon.identity.core.model.FilterTreeBuilder;
import org.wso2.carbon.identity.core.model.Node;
import org.wso2.carbon.identity.core.model.OperationNode;
import org.wso2.carbon.identity.core.util.IdentityTenantUtil;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.identity.role.mgt.core.IdentityRoleManagementException;
import org.wso2.carbon.identity.role.mgt.core.RoleManagementService;
import org.wso2.carbon.idp.mgt.dao.CacheBackedIdPMgtDAO;
import org.wso2.carbon.idp.mgt.dao.FileBasedIdPMgtDAO;
import org.wso2.carbon.idp.mgt.dao.IdPManagementDAO;
import org.wso2.carbon.idp.mgt.internal.IdPManagementServiceComponent;
import org.wso2.carbon.idp.mgt.internal.IdpMgtServiceComponentHolder;
import org.wso2.carbon.idp.mgt.listener.IdentityProviderMgtListener;
import org.wso2.carbon.idp.mgt.model.ConnectedAppsResult;
import org.wso2.carbon.idp.mgt.model.IdpSearchResult;
import org.wso2.carbon.idp.mgt.util.IdPManagementConstants;
import org.wso2.carbon.idp.mgt.util.IdPManagementUtil;
import org.wso2.carbon.idp.mgt.util.MetadataConverter;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.api.UserStoreManager;
import org.wso2.carbon.user.core.UserCoreConstants;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.KeyStore;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.xml.stream.XMLStreamException;
import static org.wso2.carbon.user.core.UserCoreConstants.INTERNAL_DOMAIN;
import static org.wso2.carbon.user.core.UserCoreConstants.WORKFLOW_DOMAIN;
import static org.wso2.carbon.user.mgt.UserMgtConstants.APPLICATION_DOMAIN;
public class IdentityProviderManager implements IdpManager {
private static final Log log = LogFactory.getLog(IdentityProviderManager.class);
private static final Log diagnosticLog = LogFactory.getLog("diagnostics");
private static final String OPENID_IDP_ENTITY_ID = "IdPEntityId";
private static CacheBackedIdPMgtDAO dao = new CacheBackedIdPMgtDAO(new IdPManagementDAO());
private static volatile IdentityProviderManager instance = new IdentityProviderManager();
private IdentityProviderManager() {
}
/**
* @return
*/
public static IdentityProviderManager getInstance() {
return instance;
}
/**
* Retrieves resident Identity provider for a given tenant.
*
* @param tenantDomain Tenant domain whose resident IdP is requested
* @return <code>LocalIdentityProvider</code>
* @throws IdentityProviderManagementException Error when getting Resident Identity Providers
*/
@Override
public IdentityProvider getResidentIdP(String tenantDomain)
throws IdentityProviderManagementException {
IdPManagementUtil.setTenantSpecifiers(tenantDomain);
String openIdUrl;
String oauth1RequestTokenUrl;
String oauth1AuthorizeUrl;
String oauth1AccessTokenUrl;
String oauth2AuthzEPUrl;
String oauth2TokenEPUrl;
String oauth2RevokeEPUrl;
String oauth2IntrospectEpUrl;
String oauth2UserInfoEPUrl;
String oidcCheckSessionEPUrl;
String oidcLogoutEPUrl;
String oIDCWebFingerEPUrl;
String oAuth2DCREPUrl;
String oAuth2JWKSPage;
String oIDCDiscoveryEPUrl;
String passiveStsUrl;
String stsUrl;
String scimUsersEndpoint;
String scimGroupsEndpoint;
String scim2UsersEndpoint;
String scim2GroupsEndpoint;
openIdUrl = IdentityUtil.getProperty(IdentityConstants.ServerConfig.OPENID_SERVER_URL);
oauth1RequestTokenUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH1_REQUEST_TOKEN_URL);
oauth1AuthorizeUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH1_AUTHORIZE_URL);
oauth1AccessTokenUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH1_ACCESSTOKEN_URL);
oauth2AuthzEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_AUTHZ_EP_URL);
oauth2TokenEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_TOKEN_EP_URL);
oauth2UserInfoEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_USERINFO_EP_URL);
oidcCheckSessionEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_CHECK_SESSION_EP_URL);
oidcLogoutEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_LOGOUT_EP_URL);
passiveStsUrl = IdentityUtil.getProperty(IdentityConstants.STS.PSTS_IDENTITY_PROVIDER_URL);
stsUrl = IdentityUtil.getProperty(IdentityConstants.STS.STS_IDENTITY_PROVIDER_URL);
scimUsersEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM.USER_EP_URL);
scimGroupsEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM.GROUP_EP_URL);
scim2UsersEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM2.USER_EP_URL);
scim2GroupsEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM2.GROUP_EP_URL);
oauth2RevokeEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_REVOKE_EP_URL);
oauth2IntrospectEpUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_INTROSPECT_EP_URL);
oIDCWebFingerEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_WEB_FINGER_EP_URL);
oAuth2DCREPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_DCR_EP_URL);
oAuth2JWKSPage = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_JWKS_EP_URL);
oIDCDiscoveryEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_DISCOVERY_EP_URL);
if (StringUtils.isBlank(openIdUrl)) {
openIdUrl = IdentityUtil.getServerURL(IdentityConstants.OpenId.OPENID, true, true);
}
if (StringUtils.isBlank(oauth1RequestTokenUrl)) {
oauth1RequestTokenUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.REQUEST_TOKEN, true, true);
}
if (StringUtils.isBlank(oauth1AuthorizeUrl)) {
oauth1AuthorizeUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.AUTHORIZE_URL, true, true);
}
if (StringUtils.isBlank(oauth1AccessTokenUrl)) {
oauth1AccessTokenUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.ACCESS_TOKEN, true, true);
}
oauth2AuthzEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.AUTHORIZE, oauth2AuthzEPUrl);
oauth2TokenEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.TOKEN, oauth2TokenEPUrl);
oauth2RevokeEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.REVOKE, oauth2RevokeEPUrl);
oauth2IntrospectEpUrl = resolveAbsoluteURL(IdentityConstants.OAuth.INTROSPECT, oauth2IntrospectEpUrl);
oauth2IntrospectEpUrl = addTenantPathParamInLegacyMode(oauth2IntrospectEpUrl, tenantDomain);
oauth2UserInfoEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.USERINFO, oauth2UserInfoEPUrl);
oidcCheckSessionEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.CHECK_SESSION, oidcCheckSessionEPUrl);
oidcLogoutEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.LOGOUT, oidcLogoutEPUrl);
oAuth2DCREPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.DCR, oAuth2DCREPUrl);
oAuth2DCREPUrl = addTenantPathParamInLegacyMode(oAuth2DCREPUrl, tenantDomain);
oAuth2JWKSPage = resolveAbsoluteURL(IdentityConstants.OAuth.JWKS, oAuth2JWKSPage);
oAuth2JWKSPage = addTenantPathParamInLegacyMode(oAuth2JWKSPage, tenantDomain);
oIDCDiscoveryEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.DISCOVERY, oIDCDiscoveryEPUrl);
oIDCDiscoveryEPUrl = addTenantPathParamInLegacyMode(oIDCDiscoveryEPUrl, tenantDomain);
passiveStsUrl = resolveAbsoluteURL(IdentityConstants.STS.PASSIVE_STS, passiveStsUrl);
// If sts url is configured in file, change it according to tenant domain. If not configured, add a default url
if (StringUtils.isNotBlank(stsUrl)) {
stsUrl = stsUrl.replace(IdentityConstants.STS.WSO2_CARBON_STS, getTenantContextFromTenantDomain(tenantDomain) +
IdentityConstants.STS.WSO2_CARBON_STS);
} else {
stsUrl = IdentityUtil.getServerURL("services/" + getTenantContextFromTenantDomain(tenantDomain) +
IdentityConstants.STS.WSO2_CARBON_STS, true, true);
}
if (StringUtils.isBlank(scimUsersEndpoint)) {
scimUsersEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM.USER_EP, true, false);
}
if (StringUtils.isBlank(scimGroupsEndpoint)) {
scimGroupsEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM.GROUP_EP, true, false);
}
if (StringUtils.isBlank(scim2UsersEndpoint)) {
scim2UsersEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM2.USER_EP, true, false);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
scim2UsersEndpoint = getTenantUrl(scim2UsersEndpoint, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("SCIM 2.0 Users endpoint is malformed");
}
if (StringUtils.isBlank(scim2GroupsEndpoint)) {
scim2GroupsEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM2.GROUP_EP, true, false);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
scim2GroupsEndpoint = getTenantUrl(scim2GroupsEndpoint, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("SCIM 2.0 Groups endpoint is malformed");
}
IdentityProvider identityProvider = dao.getIdPByName(null,
IdentityApplicationConstants.RESIDENT_IDP_RESERVED_NAME,
IdentityTenantUtil.getTenantId(tenantDomain), tenantDomain);
if (identityProvider == null) {
String message = "Could not find Resident Identity Provider for tenant " + tenantDomain;
throw new IdentityProviderManagementException(message);
}
int tenantId = -1;
try {
tenantId = IdPManagementServiceComponent.getRealmService().getTenantManager().getTenantId(tenantDomain);
} catch (UserStoreException e) {
throw new IdentityProviderManagementException(
"Exception occurred while retrieving Tenant ID from Tenant Domain " + tenantDomain, e);
}
X509Certificate cert = null;
try {
IdentityTenantUtil.initializeRegistry(tenantId);
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext();
carbonContext.setTenantDomain(tenantDomain, true);
KeyStoreManager keyStoreManager = KeyStoreManager.getInstance(tenantId);
if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
// derive key store name
String ksName = tenantDomain.trim().replace(".", "-");
// derive JKS name
String jksName = ksName + ".jks";
KeyStore keyStore = keyStoreManager.getKeyStore(jksName);
cert = (X509Certificate) keyStore.getCertificate(tenantDomain);
} else {
cert = keyStoreManager.getDefaultPrimaryCertificate();
}
} catch (Exception e) {
String msg = "Error retrieving primary certificate for tenant : " + tenantDomain;
throw new IdentityProviderManagementException(msg, e);
} finally {
PrivilegedCarbonContext.endTenantFlow();
}
if (cert == null) {
throw new IdentityProviderManagementException(
"Cannot find the primary certificate for tenant " + tenantDomain);
}
try {
identityProvider.setCertificate(Base64.encode(cert.getEncoded()));
} catch (CertificateEncodingException e) {
String msg = "Error occurred while encoding primary certificate for tenant domain " + tenantDomain;
throw new IdentityProviderManagementException(msg, e);
}
List<FederatedAuthenticatorConfig> fedAuthnCofigs = new ArrayList<FederatedAuthenticatorConfig>();
List<Property> propertiesList = null;
FederatedAuthenticatorConfig openIdFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.OpenID.NAME);
if (openIdFedAuthn == null) {
openIdFedAuthn = new FederatedAuthenticatorConfig();
openIdFedAuthn.setName(IdentityApplicationConstants.Authenticator.OpenID.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(openIdFedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(openIdFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OpenID.OPEN_ID_URL) == null) {
Property openIdUrlProp = new Property();
openIdUrlProp.setName(IdentityApplicationConstants.Authenticator.OpenID.OPEN_ID_URL);
openIdUrlProp.setValue(openIdUrl);
propertiesList.add(openIdUrlProp);
}
openIdFedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(openIdFedAuthn);
// SAML2 related endpoints.
FederatedAuthenticatorConfig saml2SSOFedAuthn = buildSAMLProperties(identityProvider, tenantDomain);
fedAuthnCofigs.add(saml2SSOFedAuthn);
FederatedAuthenticatorConfig oauth1FedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.OAuth10A.NAME);
if (oauth1FedAuthn == null) {
oauth1FedAuthn = new FederatedAuthenticatorConfig();
oauth1FedAuthn.setName(IdentityApplicationConstants.OAuth10A.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(oauth1FedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(oauth1FedAuthn.getProperties(),
IdentityApplicationConstants.OAuth10A.OAUTH1_REQUEST_TOKEN_URL) == null) {
Property oauth1ReqTokUrlProp = new Property();
oauth1ReqTokUrlProp.setName(IdentityApplicationConstants.OAuth10A.OAUTH1_REQUEST_TOKEN_URL);
oauth1ReqTokUrlProp.setValue(oauth1RequestTokenUrl);
propertiesList.add(oauth1ReqTokUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oauth1FedAuthn.getProperties(),
IdentityApplicationConstants.OAuth10A.OAUTH1_AUTHORIZE_URL) == null) {
Property oauth1AuthzUrlProp = new Property();
oauth1AuthzUrlProp.setName(IdentityApplicationConstants.OAuth10A.OAUTH1_AUTHORIZE_URL);
oauth1AuthzUrlProp.setValue(oauth1AuthorizeUrl);
propertiesList.add(oauth1AuthzUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oauth1FedAuthn.getProperties(),
IdentityApplicationConstants.OAuth10A.OAUTH1_ACCESS_TOKEN_URL) == null) {
Property oauth1AccessTokUrlProp = new Property();
oauth1AccessTokUrlProp.setName(IdentityApplicationConstants.OAuth10A.OAUTH1_ACCESS_TOKEN_URL);
oauth1AccessTokUrlProp.setValue(oauth1AccessTokenUrl);
propertiesList.add(oauth1AccessTokUrlProp);
}
oauth1FedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(oauth1FedAuthn);
FederatedAuthenticatorConfig oidcFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.OIDC.NAME);
if (oidcFedAuthn == null) {
oidcFedAuthn = new FederatedAuthenticatorConfig();
oidcFedAuthn.setName(IdentityApplicationConstants.Authenticator.OIDC.NAME);
}
propertiesList = new ArrayList<Property>();
Property idPEntityIdProp = resolveFedAuthnProperty(getOIDCResidentIdPEntityId(), oidcFedAuthn,
OPENID_IDP_ENTITY_ID);
propertiesList.add(idPEntityIdProp);
Property authzUrlProp = resolveFedAuthnProperty(oauth2AuthzEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_AUTHZ_URL);
propertiesList.add(authzUrlProp);
Property tokenUrlProp = resolveFedAuthnProperty(oauth2TokenEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_TOKEN_URL);
propertiesList.add(tokenUrlProp);
Property revokeUrlProp = resolveFedAuthnProperty(oauth2RevokeEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_REVOKE_URL);
propertiesList.add(revokeUrlProp);
Property instropsectUrlProp = resolveFedAuthnProperty(oauth2IntrospectEpUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_INTROSPECT_URL);
propertiesList.add(instropsectUrlProp);
Property userInfoUrlProp = resolveFedAuthnProperty(oauth2UserInfoEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_USER_INFO_EP_URL);
propertiesList.add(userInfoUrlProp);
Property checkSessionUrlProp = resolveFedAuthnProperty(oidcCheckSessionEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OIDC_CHECK_SESSION_URL);
propertiesList.add(checkSessionUrlProp);
Property logoutUrlProp = resolveFedAuthnProperty(oidcLogoutEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OIDC_LOGOUT_URL);
propertiesList.add(logoutUrlProp);
Property dcrUrlProp = resolveFedAuthnProperty(oAuth2DCREPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_DCR_EP_URL);
propertiesList.add(dcrUrlProp);
Property webFingerUrlProp = resolveFedAuthnProperty(oIDCWebFingerEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OIDC_WEB_FINGER_EP_URL);
propertiesList.add(webFingerUrlProp);
Property jwksUrlProp = resolveFedAuthnProperty(oAuth2JWKSPage, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_JWKS_EP_URL);
propertiesList.add(jwksUrlProp);
Property discoveryUrlProp = resolveFedAuthnProperty(oIDCDiscoveryEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OIDC_DISCOVERY_EP_URL);
propertiesList.add(discoveryUrlProp);
oidcFedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(oidcFedAuthn);
FederatedAuthenticatorConfig passiveSTSFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.PassiveSTS.NAME);
if (passiveSTSFedAuthn == null) {
passiveSTSFedAuthn = new FederatedAuthenticatorConfig();
passiveSTSFedAuthn.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.NAME);
}
propertiesList = new ArrayList<>();
Property passiveSTSUrlProperty = IdentityApplicationManagementUtil.getProperty(passiveSTSFedAuthn
.getProperties(), IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL);
if (passiveSTSUrlProperty == null) {
passiveSTSUrlProperty = new Property();
passiveSTSUrlProperty.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL);
}
passiveSTSUrlProperty.setValue(passiveStsUrl);
propertiesList.add(passiveSTSUrlProperty);
Property stsIdPEntityIdProperty = IdentityApplicationManagementUtil.getProperty(passiveSTSFedAuthn
.getProperties(), IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_ENTITY_ID);
if (stsIdPEntityIdProperty == null) {
stsIdPEntityIdProperty = new Property();
stsIdPEntityIdProperty.setName(IdentityApplicationConstants.Authenticator.PassiveSTS
.IDENTITY_PROVIDER_ENTITY_ID);
stsIdPEntityIdProperty.setValue(IdPManagementUtil.getResidentIdPEntityId());
}
propertiesList.add(stsIdPEntityIdProperty);
for (Property property : passiveSTSFedAuthn.getProperties()) {
if (property != null && !IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL
.equals(property.getName()) && !IdentityApplicationConstants.Authenticator.PassiveSTS
.IDENTITY_PROVIDER_ENTITY_ID.equals(property.getName())) {
propertiesList.add(property);
}
}
passiveSTSFedAuthn
.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(passiveSTSFedAuthn);
FederatedAuthenticatorConfig stsFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.WSTrust.NAME);
if (stsFedAuthn == null) {
stsFedAuthn = new FederatedAuthenticatorConfig();
stsFedAuthn.setName(IdentityApplicationConstants.Authenticator.WSTrust.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(stsFedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(stsFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.WSTrust.IDENTITY_PROVIDER_URL) == null) {
Property stsUrlProp = new Property();
stsUrlProp.setName(IdentityApplicationConstants.Authenticator.WSTrust.IDENTITY_PROVIDER_URL);
stsUrlProp.setValue(stsUrl);
propertiesList.add(stsUrlProp);
}
stsFedAuthn
.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(stsFedAuthn);
List<IdentityProviderProperty> identityProviderProperties = new ArrayList<IdentityProviderProperty>();
FederatedAuthenticatorConfig sessionTimeoutConfig = new FederatedAuthenticatorConfig();
sessionTimeoutConfig.setName(IdentityApplicationConstants.NAME);
propertiesList = new ArrayList<Property>(Arrays.asList(sessionTimeoutConfig.getProperties()));
Property cleanUpPeriodProp = new Property();
cleanUpPeriodProp.setName(IdentityApplicationConstants.CLEAN_UP_PERIOD);
String cleanUpPeriod = IdentityUtil.getProperty(IdentityConstants.ServerConfig.CLEAN_UP_PERIOD);
if (StringUtils.isBlank(cleanUpPeriod)) {
cleanUpPeriod = IdentityApplicationConstants.CLEAN_UP_PERIOD_DEFAULT;
} else if (!StringUtils.isNumeric(cleanUpPeriod)) {
log.warn("PersistanceCleanUpPeriod in identity.xml should be a numeric value");
cleanUpPeriod = IdentityApplicationConstants.CLEAN_UP_PERIOD_DEFAULT;
}
cleanUpPeriodProp.setValue(cleanUpPeriod);
propertiesList.add(cleanUpPeriodProp);
sessionTimeoutConfig.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(sessionTimeoutConfig);
identityProvider.setFederatedAuthenticatorConfigs(fedAuthnCofigs
.toArray(new FederatedAuthenticatorConfig[fedAuthnCofigs.size()]));
ProvisioningConnectorConfig scimProvConn = IdentityApplicationManagementUtil
.getProvisioningConnector(identityProvider.getProvisioningConnectorConfigs(),
"scim");
if (scimProvConn == null) {
scimProvConn = new ProvisioningConnectorConfig();
scimProvConn.setName("scim");
}
propertiesList = new ArrayList<>(Arrays.asList(scimProvConn.getProvisioningProperties()));
Property scimUserEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM.USERS_EP_URL);
if (scimUserEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM.USERS_EP_URL);
property.setValue(scimUsersEndpoint);
propertiesList.add(property);
} else if (!scimUsersEndpoint.equalsIgnoreCase(scimUserEndpointProperty.getValue())) {
scimUserEndpointProperty.setValue(scimUsersEndpoint);
}
Property scimGroupEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM.GROUPS_EP_URL);
if (scimGroupEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM.GROUPS_EP_URL);
property.setValue(scimGroupsEndpoint);
propertiesList.add(property);
} else if (!scimGroupsEndpoint.equalsIgnoreCase(scimGroupEndpointProperty.getValue())) {
scimGroupEndpointProperty.setValue(scimGroupsEndpoint);
}
Property scim2UserEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM2.USERS_EP_URL);
if (scim2UserEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM2.USERS_EP_URL);
property.setValue(scim2UsersEndpoint);
propertiesList.add(property);
} else if (!scim2UsersEndpoint.equalsIgnoreCase(scim2UserEndpointProperty.getValue())) {
scim2UserEndpointProperty.setValue(scim2UsersEndpoint);
}
Property scim2GroupEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM2.GROUPS_EP_URL);
if (scim2GroupEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM2.GROUPS_EP_URL);
property.setValue(scim2GroupsEndpoint);
propertiesList.add(property);
} else if (!scim2GroupsEndpoint.equalsIgnoreCase(scim2GroupEndpointProperty.getValue())) {
scim2GroupEndpointProperty.setValue(scim2GroupsEndpoint);
}
scimProvConn.setProvisioningProperties(propertiesList.toArray(new Property[propertiesList.size()]));
identityProvider.setProvisioningConnectorConfigs(new ProvisioningConnectorConfig[]{scimProvConn});
return identityProvider;
}
private String buildSAMLUrl(String urlFromConfigFile, String tenantDomain, String defaultContext,
boolean appendTenantDomainInLegacyMode) throws IdentityProviderManagementException {
String url = urlFromConfigFile;
if (StringUtils.isBlank(url)) {
// Now we need to build the URL based on the default context.
try {
url = ServiceURLBuilder.create().addPath(defaultContext).build().getAbsolutePublicURL();
} catch (URLBuilderException ex) {
throw new IdentityProviderManagementException("Error while building URL for context: "
+ defaultContext + " for tenantDomain: " + tenantDomain, ex);
}
}
//Should not append the tenant domain as a query parameter if it is super tenant.
if (appendTenantDomainInLegacyMode && isNotSuperTenant(tenantDomain)) {
Map<String, String[]> queryParams = new HashMap<>();
queryParams.put(MultitenantConstants.TENANT_DOMAIN, new String[] {tenantDomain});
try {
url = IdentityUtil.buildQueryUrl(url, queryParams);
} catch (UnsupportedEncodingException e) {
throw new IdentityProviderManagementException("Error while building URL for context: "
+ defaultContext + " for tenantDomain: " + tenantDomain, e);
}
}
return resolveAbsoluteURL(defaultContext, url);
}
private boolean isNotSuperTenant(String tenantDomain) {
return !StringUtils.equals(tenantDomain, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME);
}
private FederatedAuthenticatorConfig buildSAMLProperties(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
String samlSSOUrl = buildSAMLUrl(IdentityUtil.getProperty(IdentityConstants.ServerConfig.SSO_IDP_URL),
tenantDomain, IdPManagementConstants.SAMLSSO, true);
String samlLogoutUrl = buildSAMLUrl(IdentityUtil.getProperty(IdentityConstants.ServerConfig.SSO_IDP_URL),
tenantDomain, IdPManagementConstants.SAMLSSO, true);
String samlECPUrl = buildSAMLUrl(IdentityUtil.getProperty(IdentityConstants.ServerConfig.SAML_ECP_URL),
tenantDomain, IdPManagementConstants.SAML_ECP_URL, true);
String samlArtifactUrl = buildSAMLUrl(IdentityUtil.getProperty(IdentityConstants.ServerConfig.SSO_ARTIFACT_URL),
tenantDomain, IdPManagementConstants.SSO_ARTIFACT_URL, false);
FederatedAuthenticatorConfig samlFederatedAuthConfig = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
if (samlFederatedAuthConfig == null) {
samlFederatedAuthConfig = new FederatedAuthenticatorConfig();
samlFederatedAuthConfig.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
}
List<Property> propertiesList = new ArrayList<>();
Property samlSSOUrlProperty = resolveFedAuthnProperty(samlSSOUrl, samlFederatedAuthConfig,
IdentityApplicationConstants.Authenticator.SAML2SSO.SSO_URL);
propertiesList.add(samlSSOUrlProperty);
Property samlLogoutUrlProperty = resolveFedAuthnProperty(samlLogoutUrl, samlFederatedAuthConfig,
IdentityApplicationConstants.Authenticator.SAML2SSO.LOGOUT_REQ_URL);
propertiesList.add(samlLogoutUrlProperty);
Property samlECPUrlProperty = resolveFedAuthnProperty(samlECPUrl, samlFederatedAuthConfig,
IdentityApplicationConstants.Authenticator.SAML2SSO.ECP_URL);
propertiesList.add(samlECPUrlProperty);
Property samlArtifactUrlProperty = resolveFedAuthnProperty(samlArtifactUrl, samlFederatedAuthConfig,
IdentityApplicationConstants.Authenticator.SAML2SSO.ARTIFACT_RESOLVE_URL);
propertiesList.add(samlArtifactUrlProperty);
Property idPEntityIdProperty =
IdentityApplicationManagementUtil.getProperty(samlFederatedAuthConfig.getProperties(),
IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID);
if (idPEntityIdProperty == null) {
idPEntityIdProperty = new Property();
idPEntityIdProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID);
idPEntityIdProperty.setValue(IdPManagementUtil.getResidentIdPEntityId());
}
propertiesList.add(idPEntityIdProperty);
if (IdentityTenantUtil.isTenantQualifiedUrlsEnabled()) {
// Add SSO URL as a destination URL if not already available.
addSSOUrlAsDestinationUrl(samlFederatedAuthConfig, samlSSOUrl, propertiesList);
}
for (Property property : samlFederatedAuthConfig.getProperties()) {
if (property != null &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.SSO_URL.equals(property.getName()) &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.LOGOUT_REQ_URL.equals(property.getName()) &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.ECP_URL.equals(property.getName()) &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(property.getName())) {
propertiesList.add(property);
}
}
Property samlMetadataValidityPeriodProperty =
IdentityApplicationManagementUtil.getProperty(samlFederatedAuthConfig.
getProperties(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD);
if (samlMetadataValidityPeriodProperty == null) {
samlMetadataValidityPeriodProperty = new Property();
samlMetadataValidityPeriodProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD);
samlMetadataValidityPeriodProperty.setValue(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT);
}
propertiesList.add(samlMetadataValidityPeriodProperty);
Property samlMetadataSigningEnabledProperty =
IdentityApplicationManagementUtil.getProperty(samlFederatedAuthConfig.
getProperties(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED);
if (samlMetadataSigningEnabledProperty == null) {
samlMetadataSigningEnabledProperty = new Property();
samlMetadataSigningEnabledProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED);
samlMetadataSigningEnabledProperty.setValue(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED_DEFAULT);
}
propertiesList.add(samlMetadataSigningEnabledProperty);
Property samlAuthnRequestSigningProperty =
IdentityApplicationManagementUtil.getProperty(samlFederatedAuthConfig.
getProperties(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_ENABLED);
if (samlAuthnRequestSigningProperty == null) {
samlAuthnRequestSigningProperty = new Property();
samlAuthnRequestSigningProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_ENABLED);
samlAuthnRequestSigningProperty.setValue(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_DEFAULT);
}
propertiesList.add(samlAuthnRequestSigningProperty);
samlFederatedAuthConfig.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
return samlFederatedAuthConfig;
}
private void addSSOUrlAsDestinationUrl(FederatedAuthenticatorConfig federatedAuthenticatorConfig,
String ssoUrl,
List<Property> propertiesList) {
// First find the available configured destination URLs.
List<Property> destinationURLs = Arrays.stream(federatedAuthenticatorConfig.getProperties())
.filter(property -> property.getName()
.startsWith(IdentityApplicationConstants.Authenticator.SAML2SSO.DESTINATION_URL_PREFIX))
.collect(Collectors.toList());
// Check whether the SSO URL is already available as a destination URL
boolean isSAMLSSOUrlNotPresentAsDestination = destinationURLs.stream()
.noneMatch(x -> StringUtils.equals(ssoUrl, x.getValue()));
if (isSAMLSSOUrlNotPresentAsDestination) {
// There are no destination properties matching the default SSO URL.
int propertyNameIndex = destinationURLs.size() + 1;
Property destinationURLProperty = buildDestinationURLProperty(ssoUrl, propertyNameIndex);
propertiesList.add(destinationURLProperty);
}
}
private Property buildDestinationURLProperty(String destinationURL, int index) {
Property destinationURLProperty = new Property();
destinationURLProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.DESTINATION_URL_PREFIX +
IdentityApplicationConstants.MULTIVALUED_PROPERTY_CHARACTER + index);
destinationURLProperty.setValue(destinationURL);
return destinationURLProperty;
}
private Property resolveFedAuthnProperty(String epUrl, FederatedAuthenticatorConfig fedAuthnConfig,
String propertyName) {
Property property =
IdentityApplicationManagementUtil.getProperty(fedAuthnConfig.getProperties(), propertyName);
if (property == null || IdentityTenantUtil.isTenantQualifiedUrlsEnabled()) {
// In tenant qualified mode we have to always give send the calculated URL and not the value stored in DB.
property = new Property();
property.setName(propertyName);
// Set the calculated SAML endpoint URL.
property.setValue(epUrl);
}
return property;
}
/**
* Add Resident Identity provider for a given tenant.
*
* @param identityProvider <code>IdentityProvider</code>
* @param tenantDomain Tenant domain whose resident IdP is requested
* @throws IdentityProviderManagementException Error when adding Resident Identity Provider
*/
@Override
public void addResidentIdP(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
// invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreAddResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
if (identityProvider.getFederatedAuthenticatorConfigs() == null) {
identityProvider.setFederatedAuthenticatorConfigs(new FederatedAuthenticatorConfig[0]);
}
FederatedAuthenticatorConfig saml2SSOResidentAuthenticatorConfig = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
if (saml2SSOResidentAuthenticatorConfig == null) {
saml2SSOResidentAuthenticatorConfig = new FederatedAuthenticatorConfig();
saml2SSOResidentAuthenticatorConfig.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
}
if (saml2SSOResidentAuthenticatorConfig.getProperties() == null) {
saml2SSOResidentAuthenticatorConfig.setProperties(new Property[0]);
}
boolean idPEntityIdAvailable = false;
for (Property property : saml2SSOResidentAuthenticatorConfig.getProperties()) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(property.getName())) {
idPEntityIdAvailable = true;
}
}
if (!idPEntityIdAvailable) {
Property property = new Property();
property.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID);
property.setValue(IdPManagementUtil.getResidentIdPEntityId());
if (saml2SSOResidentAuthenticatorConfig.getProperties().length > 0) {
List<Property> properties = Arrays.asList(saml2SSOResidentAuthenticatorConfig.getProperties());
properties.add(property);
saml2SSOResidentAuthenticatorConfig.setProperties((Property[]) properties.toArray());
} else {
saml2SSOResidentAuthenticatorConfig.setProperties(new Property[]{property});
}
}
Property samlMetadataValidityPeriodProperty = new Property();
String samlMetadataValidityPeriod = IdentityUtil.getProperty(IdentityConstants.ServerConfig.
SAML_METADATA_VALIDITY_PERIOD);
if (StringUtils.isBlank(samlMetadataValidityPeriod)) {
samlMetadataValidityPeriod = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT;
} else if (!StringUtils.isNumeric(samlMetadataValidityPeriod) ||
Integer.parseInt(samlMetadataValidityPeriod) <= 0) {
log.warn("SAMLMetadataValidityPeriod in identity.xml should be a numeric value " +
"hence defaulting to value: " + IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT + "m");
samlMetadataValidityPeriod = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT;
}
samlMetadataValidityPeriodProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD);
samlMetadataValidityPeriodProperty.setValue(samlMetadataValidityPeriod);
Property samlMetadataSigningEnabledProperty = new Property();
String samlMetadataSigningEnabled = IdentityUtil.getProperty(IdentityConstants.ServerConfig.
SAML_METADATA_SIGNING_ENABLED);
if (StringUtils.isBlank(samlMetadataSigningEnabled)) {
log.warn("SAMLMetadataSigningEnabled in identity.xml should be a boolean value");
samlMetadataSigningEnabled = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED_DEFAULT;
}
samlMetadataSigningEnabledProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED);
samlMetadataSigningEnabledProperty.setValue(samlMetadataSigningEnabled);
Property samlAuthnRequestSigningProperty = new Property();
String samlAuthnRequestSigningEnabled = IdentityUtil.getProperty(IdentityConstants.ServerConfig.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_ENABLED);
if (StringUtils.isBlank(samlAuthnRequestSigningEnabled)) {
log.warn("samlAuthnRequestSigningEnabled in identity.xml should be a boolean value");
samlAuthnRequestSigningEnabled = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_DEFAULT;
}
samlAuthnRequestSigningProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_ENABLED);
samlAuthnRequestSigningProperty.setValue(samlAuthnRequestSigningEnabled);
List<Property> propertyList =
new ArrayList<>(Arrays.asList(saml2SSOResidentAuthenticatorConfig.getProperties()));
propertyList.add(samlMetadataValidityPeriodProperty);
propertyList.add(samlMetadataSigningEnabledProperty);
propertyList.add(samlAuthnRequestSigningProperty);
Property[] properties = new Property[propertyList.size()];
properties = propertyList.toArray(properties);
saml2SSOResidentAuthenticatorConfig.setProperties(properties);
Property oidcProperty = new Property();
oidcProperty.setName(OPENID_IDP_ENTITY_ID);
oidcProperty.setValue(getOIDCResidentIdPEntityId());
FederatedAuthenticatorConfig oidcAuthenticationConfig = new FederatedAuthenticatorConfig();
oidcAuthenticationConfig.setProperties(new Property[]{oidcProperty});
oidcAuthenticationConfig.setName(IdentityApplicationConstants.Authenticator.OIDC.NAME);
Property passiveStsProperty = new Property();
passiveStsProperty.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_ENTITY_ID);
passiveStsProperty.setValue(IdPManagementUtil.getResidentIdPEntityId());
FederatedAuthenticatorConfig passiveStsAuthenticationConfig = new FederatedAuthenticatorConfig();
passiveStsAuthenticationConfig.setProperties(new Property[]{passiveStsProperty});
passiveStsAuthenticationConfig.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.NAME);
FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs = {saml2SSOResidentAuthenticatorConfig,
passiveStsAuthenticationConfig, oidcAuthenticationConfig};
identityProvider.setFederatedAuthenticatorConfigs(IdentityApplicationManagementUtil
.concatArrays(identityProvider.getFederatedAuthenticatorConfigs(), federatedAuthenticatorConfigs));
IdentityProviderProperty[] idpProperties = new IdentityProviderProperty[2];
IdentityProviderProperty rememberMeTimeoutProperty = new IdentityProviderProperty();
String rememberMeTimeout = IdentityUtil.getProperty(IdentityConstants.ServerConfig.REMEMBER_ME_TIME_OUT);
if (StringUtils.isBlank(rememberMeTimeout) || !StringUtils.isNumeric(rememberMeTimeout) ||
Integer.parseInt(rememberMeTimeout) <= 0) {
log.warn("RememberMeTimeout in identity.xml should be a numeric value");
rememberMeTimeout = IdentityApplicationConstants.REMEMBER_ME_TIME_OUT_DEFAULT;
}
rememberMeTimeoutProperty.setName(IdentityApplicationConstants.REMEMBER_ME_TIME_OUT);
rememberMeTimeoutProperty.setValue(rememberMeTimeout);
IdentityProviderProperty sessionIdletimeOutProperty = new IdentityProviderProperty();
String idleTimeout = IdentityUtil.getProperty(IdentityConstants.ServerConfig.SESSION_IDLE_TIMEOUT);
if (StringUtils.isBlank(idleTimeout) || !StringUtils.isNumeric(idleTimeout) ||
Integer.parseInt(idleTimeout) <= 0) {
log.warn("SessionIdleTimeout in identity.xml should be a numeric value");
idleTimeout = IdentityApplicationConstants.SESSION_IDLE_TIME_OUT_DEFAULT;
}
sessionIdletimeOutProperty.setName(IdentityApplicationConstants.SESSION_IDLE_TIME_OUT);
sessionIdletimeOutProperty.setValue(idleTimeout);
idpProperties[0] = rememberMeTimeoutProperty;
idpProperties[1] = sessionIdletimeOutProperty;
identityProvider.setIdpProperties(idpProperties);
dao.addIdP(identityProvider, IdentityTenantUtil.getTenantId(tenantDomain), tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostAddResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
}
/**
* Update Resident Identity provider for a given tenant.
*
* @param identityProvider <code>IdentityProvider</code>
* @param tenantDomain Tenant domain whose resident IdP is requested
* @throws IdentityProviderManagementException Error when updating Resident Identity Provider
*/
@Override
public void updateResidentIdP(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProvider residentIdp = dao.getIdPByName(null, IdentityApplicationConstants.RESIDENT_IDP_RESERVED_NAME,
IdentityTenantUtil.getTenantId(tenantDomain), tenantDomain);
Map<String, String> configurationDetails = new HashMap<>();
for (IdentityProviderProperty property : identityProvider.getIdpProperties()) {
configurationDetails.put(property.getName(), property.getValue());
}
IdentityProviderProperty[] identityMgtProperties = residentIdp.getIdpProperties();
List<IdentityProviderProperty> newProperties = new ArrayList<>();
for (IdentityProviderProperty identityMgtProperty : identityMgtProperties) {
IdentityProviderProperty prop = new IdentityProviderProperty();
String key = identityMgtProperty.getName();
prop.setName(key);
if (configurationDetails.containsKey(key)) {
prop.setValue(configurationDetails.get(key));
} else {
prop.setValue(identityMgtProperty.getValue());
}
newProperties.add(prop);
configurationDetails.remove(key);
}
for (Map.Entry<String, String> entry : configurationDetails.entrySet()) {
IdentityProviderProperty prop = new IdentityProviderProperty();
prop.setName(entry.getKey());
prop.setValue(entry.getValue());
newProperties.add(prop);
}
identityProvider.setIdpProperties(newProperties.toArray(new IdentityProviderProperty[newProperties.size()]));
for (IdentityProviderProperty idpProp : identityProvider.getIdpProperties()) {
if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.SESSION_IDLE_TIME_OUT)) {
if (StringUtils.isBlank(idpProp.getValue()) || !StringUtils.isNumeric(idpProp.getValue()) ||
Integer.parseInt(idpProp.getValue().trim()) <= 0) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.SESSION_IDLE_TIME_OUT
+ " of ResidentIdP should be a numeric value greater than 0 ");
}
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.REMEMBER_ME_TIME_OUT)) {
if (StringUtils.isBlank(idpProp.getValue()) || !StringUtils.isNumeric(idpProp.getValue()) ||
Integer.parseInt(idpProp.getValue().trim()) <= 0) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.REMEMBER_ME_TIME_OUT
+ " of ResidentIdP should be a numeric value greater than 0 ");
}
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD)) {
if (StringUtils.isBlank(idpProp.getValue()) || !StringUtils.isNumeric(idpProp.getValue()) ||
Integer.parseInt(idpProp.getValue().trim()) <= 0) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD +
" of ResidentIdP should be a numeric value greater than 0 ");
}
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED)) {
if (StringUtils.isBlank(idpProp.getValue())) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED + " of ResidentIdP should be a boolean value ");
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_ENABLED)) {
if (StringUtils.isBlank(idpProp.getValue())) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_ENABLED + " of ResidentIdP should be a boolean value ");
}
}
}
}
// invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreUpdateResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
if (identityProvider.getFederatedAuthenticatorConfigs() == null) {
identityProvider.setFederatedAuthenticatorConfigs(new FederatedAuthenticatorConfig[0]);
}
IdentityProvider currentIdP = IdentityProviderManager.getInstance().getIdPByName(
IdentityApplicationConstants.RESIDENT_IDP_RESERVED_NAME, tenantDomain, true);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
validateUpdateOfIdPEntityId(currentIdP.getFederatedAuthenticatorConfigs(),
identityProvider.getFederatedAuthenticatorConfigs(), tenantId, tenantDomain);
dao.updateIdP(identityProvider, currentIdP, tenantId, tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostUpdateResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
}
/**
* Retrieves registered Identity finally {
* break;
* }providers for a given tenant
*
* @param tenantDomain Tenant domain whose IdP names are requested
* @return Set of <code>IdentityProvider</code>. IdP names, primary IdP and home realm
* identifiers of each IdP
* @throws IdentityProviderManagementException Error when getting list of Identity Providers
*/
@Override
public List<IdentityProvider> getIdPs(String tenantDomain)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
return dao.getIdPs(null, tenantId, tenantDomain);
}
/**
* Get all basic identity provider information.
*
* @param limit limit per page.
* @param offset offset value.
* @param filter filter value for IdP search.
* @param sortOrder order of IdP ASC/DESC.
* @param sortBy the column value need to sort.
* @param tenantDomain tenant domain whose IdP names are requested.
* @return Identity Provider's Basic Information array {@link IdpSearchResult}.
* @throws IdentityProviderManagementException Server/client related error when getting list of Identity Providers.
* @deprecated use {@link #getIdPs(Integer, Integer, String, String, String, String, List)}
*/
@Override
public IdpSearchResult getIdPs(Integer limit, Integer offset, String filter, String sortOrder, String sortBy,
String tenantDomain)
throws IdentityProviderManagementException {
return getIdPs(limit, offset, filter, sortOrder, sortBy, tenantDomain, new ArrayList<>());
}
/**
* Get all identity provider's Basic information along with additionally requested information depending on the
* requiredAttributes.
*
* @param limit Limit per page.
* @param offset Offset value.
* @param filter Filter value for IdP search.
* @param sortOrder Order of IdP ASC/DESC.
* @param sortBy The column value need to sort.
* @param tenantDomain TenantDomain of the user.
* @param requiredAttributes Required attributes which needs to be return.
* @return Identity Provider's Basic Information array along with requested attribute
* information{@link IdpSearchResult}.
* @throws IdentityProviderManagementException Server/client related error when getting list of Identity Providers.
*/
@Override
public IdpSearchResult getIdPs(Integer limit, Integer offset, String filter, String sortOrder, String sortBy,
String tenantDomain, List<String> requiredAttributes)
throws IdentityProviderManagementException {
IdpSearchResult result = new IdpSearchResult();
List<ExpressionNode> expressionNodes = getExpressionNodes(filter);
setParameters(limit, offset, sortOrder, sortBy, filter, result);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
result.setTotalIDPCount(dao.getTotalIdPCount(tenantId, expressionNodes));
result.setIdpList(dao.getPaginatedIdPsSearch(tenantId, expressionNodes, result.getLimit(), result.getOffSet(),
result.getSortOrder(), result.getSortBy(), requiredAttributes));
return result;
}
/**
* Get all basic identity provider information.
*
* @param filter filter value for IdP search.
* @param tenantDomain tenant domain whose IdP names are requested.
* @return filtered idp count.
* @throws IdentityProviderManagementException Error while getting Identity Providers count.IdentityProviderManagementService.java
*/
@Override
public int getTotalIdPCount(String filter, String tenantDomain) throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
List<ExpressionNode> expressionNodes = getExpressionNodes(filter);
return dao.getTotalIdPCount(tenantId, expressionNodes);
}
/**
* Get the filter node as a list.
*
* @param filter value of the filter.
* @return node tree.
* @throws IdentityProviderManagementClientException Error when validate filters.
*/
private List<ExpressionNode> getExpressionNodes(String filter) throws IdentityProviderManagementClientException {
// Filter example : name sw "te" and name ew "st" and isEnabled eq "true".
List<ExpressionNode> expressionNodes = new ArrayList<>();
FilterTreeBuilder filterTreeBuilder;
try {
if (StringUtils.isNotBlank(filter)) {
filterTreeBuilder = new FilterTreeBuilder(filter);
Node rootNode = filterTreeBuilder.buildTree();
setExpressionNodeList(rootNode, expressionNodes);
}
} catch (IOException | IdentityException e) {
String message = "Error occurred while validate filter, filter: " + filter;
diagnosticLog.error(message + ". Error message: " + e.getMessage());
throw IdPManagementUtil
.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP, message, e);
}
return expressionNodes;
}
/**
* Set the node values as list of expression.
*
* @param node filter node.
* @param expression list of expression.
* @throws IdentityProviderManagementClientException Error when passing invalid filter.
*/
private void setExpressionNodeList(Node node, List<ExpressionNode> expression)
throws IdentityProviderManagementClientException {
if (node instanceof ExpressionNode) {
if (StringUtils.isNotBlank(((ExpressionNode) node).getAttributeValue())) {
if (((ExpressionNode) node).getAttributeValue().contains(IdPManagementConstants.IDP_IS_ENABLED)) {
if ("true".contains(((ExpressionNode) node).getValue())) {
((ExpressionNode) node).setValue(IdPManagementConstants.IS_TRUE_VALUE);
} else if ("false".contains(((ExpressionNode) node).getValue())) {
((ExpressionNode) node).setValue(IdPManagementConstants.IS_FALSE_VALUE);
} else {
String message = "Invalid value: " + ((ExpressionNode) node).getValue() + "is passed for " +
"'isEnabled' attribute in the filter. It should be 'true' or 'false'";
throw IdPManagementUtil
.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
}
}
expression.add((ExpressionNode) node);
} else if (node instanceof OperationNode) {
setExpressionNodeList(node.getLeftNode(), expression);
setExpressionNodeList(node.getRightNode(), expression);
}
}
/**
* Set the passing parameters as result.
*
* @param limit page limit.
* @param offset offset value.
* @param filter filter value for IdP search.
* @param sortOrder order of IdP(ASC/DESC).
* @param sortBy the column value need to sort.
* @param result result object.
* @throws IdentityProviderManagementClientException Error while set offset.
*/
private void setParameters(Integer limit, Integer offset, String filter, String sortOrder, String sortBy,
IdpSearchResult result) throws IdentityProviderManagementClientException {
result.setLimit(validateLimit(limit));
result.setOffSet(validateOffset(offset));
result.setSortBy(validateSortBy(sortBy));
result.setSortOrder(validateSortOrder(sortOrder));
result.setFilter(filter);
}
/**
* Validate sortBy.
*
* @param sortBy sortBy attribute.
* @return Validated sortOrder and sortBy.
*/
private String validateSortBy(String sortBy) {
if (StringUtils.isBlank(sortBy)) {
if (log.isDebugEnabled()) {
log.debug("sortBy attribute is empty. Therefore we set the default sortBy attribute. sortBy" +
IdPManagementConstants.DEFAULT_SORT_BY);
}
diagnosticLog.info("sortBy attribute is empty. Therefore we set the default sortBy attribute. sortBy" +
IdPManagementConstants.DEFAULT_SORT_BY);
return IdPManagementConstants.DEFAULT_SORT_BY;
}
switch (sortBy) {
case IdPManagementConstants.IDP_NAME:
sortBy = IdPManagementConstants.NAME;
break;
case IdPManagementConstants.IDP_HOME_REALM_ID:
sortBy = IdPManagementConstants.HOME_REALM_ID;
break;
default:
sortBy = IdPManagementConstants.DEFAULT_SORT_BY;
if (log.isDebugEnabled()) {
log.debug("sortBy attribute is incorrect. Therefore we set the default sortBy attribute. " +
"sortBy: " + IdPManagementConstants.DEFAULT_SORT_BY);
}
diagnosticLog.info("sortBy attribute is incorrect. Therefore we set the default sortBy attribute. " +
"sortBy: " + IdPManagementConstants.DEFAULT_SORT_BY);
break;
}
return sortBy;
}
/**
* Validate sortOrder.
*
* @param sortOrder sortOrder ASC/DESC.
* @return Validated sortOrder and sortBy.
*/
private String validateSortOrder(String sortOrder) {
if (StringUtils.isBlank(sortOrder)) {
sortOrder = IdPManagementConstants.DEFAULT_SORT_ORDER;
if (log.isDebugEnabled()) {
log.debug("sortOrder is empty. Therefore we set the default sortOrder value as ASC. SortOrder: " +
sortOrder);
}
diagnosticLog.info("sortOrder is empty. Therefore we set the default sortOrder value as ASC. SortOrder: " +
sortOrder);
} else if (sortOrder.equals(IdPManagementConstants.DESC_SORT_ORDER)) {
sortOrder = IdPManagementConstants.DESC_SORT_ORDER;
} else if (sortOrder.equals(IdPManagementConstants.ASC_SORT_ORDER)) {
sortOrder = IdPManagementConstants.ASC_SORT_ORDER;
} else {
sortOrder = IdPManagementConstants.DEFAULT_SORT_ORDER;
if (log.isDebugEnabled()) {
log.debug("sortOrder is incorrect. Therefore we set the default sortOrder value as ASC. SortOrder: "
+ sortOrder);
}
diagnosticLog.info("sortOrder is incorrect. Therefore we set the default sortOrder value as ASC." +
" SortOrder: " + sortOrder);
}
return sortOrder;
}
/**
* Validate limit.
*
* @param limit given limit value.
* @return validated limit and offset value.
*/
private int validateLimit(Integer limit) throws IdentityProviderManagementClientException {
if (limit == null) {
if (log.isDebugEnabled()) {
log.debug("Given limit is null. Therefore we get the default limit from " +
"identity.xml.");
}
limit = IdentityUtil.getDefaultItemsPerPage();
diagnosticLog.info("Given limit is null. Setting default limit as: " + limit);
}
if (limit < 0) {
String message = "Given limit: " + limit + " is a negative value.";
diagnosticLog.error(message);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
int maximumItemsPerPage = IdentityUtil.getMaximumItemPerPage();
if (limit > maximumItemsPerPage) {
if (log.isDebugEnabled()) {
log.debug("Given limit exceed the maximum limit. Therefore we get the default limit from " +
"identity.xml. limit: " + maximumItemsPerPage);
}
limit = maximumItemsPerPage;
diagnosticLog.info("Given limit exceed the maximum limit. Therefore setting the default maximum value: "
+ limit);
}
return limit;
}
/**
* Validate offset.
*
* @param offset given offset value.
* @return validated limit and offset value.
* @throws IdentityProviderManagementClientException Error while set offset
*/
private int validateOffset(Integer offset) throws IdentityProviderManagementClientException {
if (offset == null) {
// Return first page offset.
offset = 0;
}
if (offset < 0) {
String message = "Invalid offset applied. Offset should not negative. offSet: " +
offset;
diagnosticLog.error(message);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
return offset;
}
/**
* Retrieves registered Identity finally {
* break;
* }providers for a given tenant
*
* @param tenantDomain Tenant domain whose IdP names are requested
* @return Set of <code>IdentityProvider</code>. IdP names, primary IdP and home realm
* identifiers of each IdP
* @throws IdentityProviderManagementException Error when getting list of Identity Providers
*/
@Override
public List<IdentityProvider> getIdPsSearch(String tenantDomain, String filter)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
return dao.getIdPsSearch(null, tenantId, tenantDomain, filter);
}
/**
* Retrieves registered Enabled Identity providers for a given tenant
*
* @param tenantDomain Tenant domain whose IdP names are requested
* @return Set of <code>IdentityProvider</code>. IdP names, primary IdP and home realm
* identifiers of each IdP
* @throws IdentityProviderManagementException Error when getting list of Identity Providers
*/
@Override
public List<IdentityProvider> getEnabledIdPs(String tenantDomain)
throws IdentityProviderManagementException {
List<IdentityProvider> enabledIdentityProviders = new ArrayList<IdentityProvider>();
List<IdentityProvider> identityProviers = getIdPs(tenantDomain);
for (IdentityProvider idp : identityProviers) {
if (idp.isEnable()) {
enabledIdentityProviders.add(idp);
}
}
return enabledIdentityProviders;
}
/**
* @param idPName
* @param tenantDomain
* @param ignoreFileBasedIdps
* @return
* @throws IdentityProviderManagementException
*/
@Override
public IdentityProvider getIdPByName(String idPName, String tenantDomain,
boolean ignoreFileBasedIdps) throws IdentityProviderManagementException {
diagnosticLog.info("Retrieving IDP by name for IDP name: " + idPName);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (!ignoreFileBasedIdps) {
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
}
return identityProvider;
}
@Override
public IdentityProvider getIdPById(String id, String tenantDomain,
boolean ignoreFileBasedIdps) throws IdentityProviderManagementException {
diagnosticLog.info("Retrieving IDP by ID: " + id);
if (StringUtils.isEmpty(id)) {
String msg = "Invalid argument: Identity Provider ID value is empty";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
Integer intId;
IdentityProvider identityProvider = null;
try {
intId = Integer.parseInt(id);
identityProvider = dao.getIdPById(null, intId, tenantId, tenantDomain);
} catch (NumberFormatException e) {
// Ignore this.
}
if (!ignoreFileBasedIdps) {
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(id, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
}
return identityProvider;
}
@Override
public IdentityProvider getIdPByResourceId(String resourceId, String tenantDomain, boolean
ignoreFileBasedIdps) throws IdentityProviderManagementException {
validateGetIdPInputValues(resourceId);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
return dao.getIdPByResourceId(resourceId, tenantId, tenantDomain);
}
@Override
public String getIdPNameByResourceId(String resourceId) throws IdentityProviderManagementException {
validateGetIdPInputValues(resourceId);
return dao.getIdPNameByResourceId(resourceId);
}
/**
* @param idPName
* @param tenantDomain
* @param ignoreFileBasedIdps
* @return
* @throws IdentityProviderManagementException
*/
@Override
public IdentityProvider getEnabledIdPByName(String idPName, String tenantDomain,
boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
IdentityProvider idp = getIdPByName(idPName, tenantDomain, ignoreFileBasedIdps);
if (idp != null && idp.isEnable()) {
return idp;
}
return null;
}
/**
* Retrieves Identity provider information about a given tenant by Identity Provider name
*
* @param idPName Unique name of the Identity provider of whose information is requested
* @param tenantDomain Tenant domain whose information is requested
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP name
*/
@Override
public IdentityProvider getIdPByName(String idPName, String tenantDomain)
throws IdentityProviderManagementException {
return getIdPByName(idPName, tenantDomain, false);
}
@Override
public IdentityProvider getIdPById(String id, String tenantDomain) throws IdentityProviderManagementException {
return getIdPById(id, tenantDomain, false);
}
/**
* @param property IDP authenticator property (E.g.: IdPEntityId)
* @param value Value associated with given Property
* @param tenantDomain
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by authenticator property value
*/
@Override
public IdentityProvider getIdPByAuthenticatorPropertyValue(String property, String value, String tenantDomain,
boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(property) || StringUtils.isEmpty(value)) {
String msg = "Invalid argument: Authenticator property or property value is empty";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByAuthenticatorPropertyValue(
null, property, value, tenantId, tenantDomain);
if (identityProvider == null && !ignoreFileBasedIdps) {
identityProvider = new FileBasedIdPMgtDAO()
.getIdPByAuthenticatorPropertyValue(property, value, tenantDomain);
}
return identityProvider;
}
/**
* @param property IDP authenticator property (E.g.: IdPEntityId)
* @param value Value associated with given Property
* @param tenantDomain
* @param authenticator
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by authenticator property value
*/
public IdentityProvider getIdPByAuthenticatorPropertyValue(String property, String value, String tenantDomain,
String authenticator, boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(property) || StringUtils.isEmpty(value) || StringUtils.isEmpty(authenticator)) {
String msg = "Invalid argument: Authenticator property, property value or authenticator name is empty";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByAuthenticatorPropertyValue(
null, property, value, authenticator, tenantId, tenantDomain);
if (identityProvider == null && !ignoreFileBasedIdps) {
identityProvider = new FileBasedIdPMgtDAO()
.getIdPByAuthenticatorPropertyValue(property, value, tenantDomain, authenticator);
}
return identityProvider;
}
/**
* Retrieves Enabled Identity provider information about a given tenant by Identity Provider name
*
* @param idPName Unique name of the Identity provider of whose information is requested
* @param tenantDomain Tenant domain whose information is requested
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP name
*/
@Override
public IdentityProvider getEnabledIdPByName(String idPName, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProvider idp = getIdPByName(idPName, tenantDomain);
if (idp != null && idp.isEnable()) {
return idp;
}
return null;
}
/**
* Retrieves Identity provider information about a given tenant by realm identifier
*
* @param realmId Unique realm identifier of the Identity provider of whose information is
* requested
* @param tenantDomain Tenant domain whose information is requested
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP home realm identifier
*/
@Override
public IdentityProvider getIdPByRealmId(String realmId, String tenantDomain)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(realmId)) {
String msg = "Invalid argument: Identity Provider Home Realm Identifier value is empty";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByRealmId(realmId, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByRealmId(realmId, tenantDomain);
}
return identityProvider;
}
/**
* Retrieves Enabled Identity provider information about a given tenant by realm identifier
*
* @param realmId Unique realm identifier of the Identity provider of whose information is
* requested
* @param tenantDomain Tenant domain whose information is requested
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP home realm identifier
*/
@Override
public IdentityProvider getEnabledIdPByRealmId(String realmId, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProvider idp = getIdPByRealmId(realmId, tenantDomain);
if (idp != null && idp.isEnable()) {
return idp;
}
return null;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given IdP claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param idPClaimURIs IdP claim URIs which need to be mapped to tenant's local claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Set<ClaimMapping> getMappedLocalClaims(String idPName, String tenantDomain,
List<String> idPClaimURIs) throws
IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
ClaimConfig claimConfiguration = identityProvider.getClaimConfig();
if (claimConfiguration != null) {
ClaimMapping[] claimMappings = claimConfiguration.getClaimMappings();
if (claimMappings != null && claimMappings.length > 0 && idPClaimURIs != null) {
Set<ClaimMapping> returnSet = new HashSet<ClaimMapping>();
for (String idpClaim : idPClaimURIs) {
for (ClaimMapping claimMapping : claimMappings) {
if (claimMapping.getRemoteClaim().getClaimUri().equals(idpClaim)) {
returnSet.add(claimMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<ClaimMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given IdP claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param idPClaimURIs IdP claim URIs which need to be mapped to tenant's local claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Map<String, String> getMappedLocalClaimsMap(String idPName, String tenantDomain,
List<String> idPClaimURIs) throws
IdentityProviderManagementException {
Set<ClaimMapping> claimMappings = getMappedLocalClaims(idPName, tenantDomain, idPClaimURIs);
Map<String, String> returnMap = new HashMap<String, String>();
for (ClaimMapping claimMapping : claimMappings) {
returnMap.put(claimMapping.getRemoteClaim().getClaimUri(), claimMapping.getLocalClaim()
.getClaimUri());
}
return returnMap;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given local claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param localClaimURIs Local claim URIs which need to be mapped to IdP's claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Set<ClaimMapping> getMappedIdPClaims(String idPName, String tenantDomain,
List<String> localClaimURIs) throws
IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
ClaimConfig claimConfiguration = identityProvider.getClaimConfig();
if (claimConfiguration != null) {
ClaimMapping[] claimMappings = claimConfiguration.getClaimMappings();
if (claimMappings != null && claimMappings.length > 0 && localClaimURIs != null) {
Set<ClaimMapping> returnSet = new HashSet<ClaimMapping>();
for (String localClaimURI : localClaimURIs) {
for (ClaimMapping claimMapping : claimMappings) {
if (claimMapping.getLocalClaim().getClaimUri().equals(localClaimURI)) {
returnSet.add(claimMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<ClaimMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given local claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param localClaimURIs Local claim URIs which need to be mapped to IdP's claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Map<String, String> getMappedIdPClaimsMap(String idPName, String tenantDomain,
List<String> localClaimURIs) throws
IdentityProviderManagementException {
Set<ClaimMapping> claimMappings = getMappedIdPClaims(idPName, tenantDomain, localClaimURIs);
Map<String, String> returnMap = new HashMap<String, String>();
for (ClaimMapping claimMapping : claimMappings) {
returnMap.put(claimMapping.getLocalClaim().getClaimUri(), claimMapping.getRemoteClaim()
.getClaimUri());
}
return returnMap;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given IdP roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles to be mapped
* @param idPRoles IdP roles which need to be mapped to local roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Set<RoleMapping> getMappedLocalRoles(String idPName, String tenantDomain,
String[] idPRoles) throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
PermissionsAndRoleConfig roleConfiguration = identityProvider.getPermissionAndRoleConfig();
if (roleConfiguration != null) {
RoleMapping[] roleMappings = roleConfiguration.getRoleMappings();
if (roleMappings != null && roleMappings.length > 0 && idPRoles != null) {
Set<RoleMapping> returnSet = new HashSet<RoleMapping>();
for (String idPRole : idPRoles) {
for (RoleMapping roleMapping : roleMappings) {
if (roleMapping.getRemoteRole().equals(idPRole)) {
returnSet.add(roleMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<RoleMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given IdP roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles to be mapped
* @param idPRoles IdP roles which need to be mapped to local roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Map<String, LocalRole> getMappedLocalRolesMap(String idPName, String tenantDomain,
String[] idPRoles) throws IdentityProviderManagementException {
Set<RoleMapping> roleMappings = getMappedLocalRoles(idPName, tenantDomain, idPRoles);
Map<String, LocalRole> returnMap = new HashMap<String, LocalRole>();
for (RoleMapping roleMapping : roleMappings) {
returnMap.put(roleMapping.getRemoteRole(), roleMapping.getLocalRole());
}
return returnMap;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given local roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles need to be mapped
* @param localRoles Local roles which need to be mapped to IdP roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Set<RoleMapping> getMappedIdPRoles(String idPName, String tenantDomain,
LocalRole[] localRoles) throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
PermissionsAndRoleConfig roleConfiguration = identityProvider.getPermissionAndRoleConfig();
if (roleConfiguration != null) {
RoleMapping[] roleMappings = roleConfiguration.getRoleMappings();
if (roleMappings != null && roleMappings.length > 0 && localRoles != null) {
Set<RoleMapping> returnSet = new HashSet<RoleMapping>();
for (LocalRole localRole : localRoles) {
for (RoleMapping roleMapping : roleMappings) {
if (roleMapping.getLocalRole().equals(localRole)) {
returnSet.add(roleMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<RoleMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given local roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles need to be mapped
* @param localRoles Local roles which need to be mapped to IdP roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Map<LocalRole, String> getMappedIdPRolesMap(String idPName, String tenantDomain,
LocalRole[] localRoles) throws
IdentityProviderManagementException {
Set<RoleMapping> roleMappings = getMappedIdPRoles(idPName, tenantDomain, localRoles);
Map<LocalRole, String> returnMap = new HashMap<LocalRole, String>();
for (RoleMapping roleMapping : roleMappings) {
returnMap.put(roleMapping.getLocalRole(), roleMapping.getRemoteRole());
}
return returnMap;
}
/**
* If metadata file is available, creates a new FederatedAuthenticatorConfig from that
*
* @param identityProvider
* @throws IdentityProviderManagementException
*/
private void handleMetadata(int tenantId, IdentityProvider identityProvider)
throws IdentityProviderManagementException {
if (IdpMgtServiceComponentHolder.getInstance().getMetadataConverters().isEmpty()) {
throw new IdentityProviderManagementException("Metadata Converter is not set");
}
FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs =
identityProvider.getFederatedAuthenticatorConfigs();
for (FederatedAuthenticatorConfig federatedAuthenticatorConfig : federatedAuthenticatorConfigs) {
Property[] properties = federatedAuthenticatorConfig.getProperties();
if (ArrayUtils.isNotEmpty(properties)) {
for (Property property : properties) {
if (property != null) {
if (StringUtils.isNotBlank(property.getName()) &&
property.getName().contains(IdPManagementConstants.META_DATA)) {
for (MetadataConverter metadataConverter : IdpMgtServiceComponentHolder.getInstance()
.getMetadataConverters()) {
if (metadataConverter.canHandle(property)) {
try {
if (isMetadataFileExist(identityProvider.getIdentityProviderName(),
property.getValue())) {
try {
metadataConverter.saveMetadataString(tenantId,
identityProvider.getIdentityProviderName(),
federatedAuthenticatorConfig.getName(), property.getValue());
} catch (IdentityProviderManagementException e) {
String data =
"Couldn't save metadata in registry.SAML2SSOMetadataConverter" +
" is not set. ";
throw IdPManagementUtil.handleServerException(
IdPManagementConstants.ErrorMessage.ERROR_CODE_ADD_IDP, data);
}
}
StringBuilder certificate = new StringBuilder();
try {
FederatedAuthenticatorConfig metaFederated = metadataConverter
.getFederatedAuthenticatorConfig(properties, certificate);
String spName = "";
for (Property value : properties) {
if (value != null && IdentityApplicationConstants.Authenticator
.SAML2SSO.SP_ENTITY_ID.equals(value.getName())) {
spName = value.getValue();
}
}
if (spName.equals("")) {
throw new IdentityProviderManagementException("SP name can't be empty");
}
if (metaFederated != null &&
ArrayUtils.isNotEmpty(metaFederated.getProperties())) {
for (int y = 0; y < metaFederated.getProperties().length; y++) {
if (metaFederated.getProperties()[y] != null &&
IdentityApplicationConstants.Authenticator.SAML2SSO
.SP_ENTITY_ID.equals
(metaFederated.getProperties()[y].getName())) {
metaFederated.getProperties()[y].setValue(spName);
break;
}
}
}
if (metaFederated != null && ArrayUtils.isNotEmpty(metaFederated.getProperties())) {
federatedAuthenticatorConfig
.setProperties(metaFederated.getProperties());
} else {
throw new IdentityProviderManagementException(
"Error setting metadata using file");
}
} catch (IdentityProviderManagementException ex) {
throw new IdentityProviderManagementException("Error converting metadata",
ex);
}
if (certificate.toString().length() > 0) {
identityProvider.setCertificate(certificate.toString());
}
} catch (XMLStreamException e) {
throw new IdentityProviderManagementException(
"Error while configuring metadata", e);
}
break;
}
}
}
}
}
}
}
}
/**
* Adds an Identity Provider to the given tenant
*
* @param identityProvider new Identity Provider information
* @throws IdentityProviderManagementException Error when adding Identity Provider
* information
* @deprecated use {@link IdentityProviderManager#addIdPWithResourceId(IdentityProvider, String)} instead.
*/
@Deprecated
@Override
public void addIdP(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
addIdPWithResourceId(identityProvider, tenantDomain);
}
/**
* Adds an Identity Provider to the given tenant
*
* @param identityProvider new Identity Provider information
* @throws IdentityProviderManagementException Error when adding Identity Provider
* information
*/
@Override
public IdentityProvider addIdPWithResourceId(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
diagnosticLog.info("Adding IDP with resource ID: " + identityProvider.getResourceId());
validateAddIdPInputValues(identityProvider.getIdentityProviderName(), tenantDomain);
validateOutboundProvisioningRoles(identityProvider,tenantDomain);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreAddIdP(identityProvider, tenantDomain)) {
diagnosticLog.info("'doPreAddIdP' invocation failed in listener: " + listener.getClass().getName());
return null;
}
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (isPermissionAndRoleConfigExist(identityProvider)) {
verifyAndUpdateRoleConfiguration(tenantDomain, tenantId, identityProvider.getPermissionAndRoleConfig());
}
validateIdPEntityId(identityProvider.getFederatedAuthenticatorConfigs(), tenantId, tenantDomain);
validateIdPIssuerName(identityProvider, tenantId, tenantDomain);
handleMetadata(tenantId, identityProvider);
String resourceId = dao.addIdP(identityProvider, tenantId, tenantDomain);
identityProvider = dao.getIdPByResourceId(resourceId, tenantId, tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostAddIdP(identityProvider, tenantDomain)) {
diagnosticLog.info("'doPostAddIdP' invocation failed in listener: " + listener.getClass().getName());
return null;
}
}
return identityProvider;
}
/**
* Deletes an Identity Provider from a given tenant
*
* @param idPName Name of the IdP to be deleted
* @throws IdentityProviderManagementException Error when deleting Identity Provider
* information
* @deprecated use {@link IdentityProviderManager#deleteIdPByResourceId(String, String)} instead.
*/
@Deprecated
@Override
public void deleteIdP(String idPName, String tenantDomain) throws IdentityProviderManagementException {
diagnosticLog.info("Deleting IDP with name: " + idPName);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdP(idPName, tenantDomain)) {
diagnosticLog.info("'doPreDeleteIdP' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
if (StringUtils.isEmpty(idPName)) {
String data = "IdP name is empty.";
diagnosticLog.error("IDP name cannot be empty.");
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_NAME_INVALID, data);
}
IdentityProvider identityProvider = this.getIdPByName(idPName, tenantDomain, true);
if (identityProvider == null) {
diagnosticLog.info("Could not find a valid IDP for the given name: " + idPName);
return;
}
deleteIDP(identityProvider.getResourceId(), idPName, tenantDomain);
// Invoking the post listeners.
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostDeleteIdP(idPName, tenantDomain)) {
diagnosticLog.info("'doPostDeleteIdP' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
}
/**
* Delete all Identity Providers from a given tenant.
*
* @param tenantDomain Domain of the tenant
* @throws IdentityProviderManagementException
*/
@Override
public void deleteIdPs(String tenantDomain) throws IdentityProviderManagementException {
diagnosticLog.info("Deleting all IDPs in tenant domain: " + tenantDomain);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdPs(tenantDomain)) {
diagnosticLog.info("'doPreDeleteIdPs' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
// Delete metadata strings of each IDP
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
List<IdentityProvider> identityProviders = getIdPs(tenantDomain);
for (IdentityProvider identityProvider : identityProviders) {
deleteMetadataStrings(identityProvider.getIdentityProviderName(), tenantId);
}
dao.deleteIdPs(tenantId);
// Invoking the post listeners.
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostDeleteIdPs(tenantDomain)) {
diagnosticLog.info("'doPostDeleteIdPs' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
}
/**
* Deletes an Identity Provider from a given tenant.
*
* @param resourceId Resource ID of the IdP to be deleted
* @throws IdentityProviderManagementException Error when deleting Identity Provider
* information
*/
@Override
public void deleteIdPByResourceId(String resourceId, String tenantDomain) throws
IdentityProviderManagementException {
diagnosticLog.info("Deleting IDP by resource ID: " + resourceId);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdPByResourceId(resourceId, tenantDomain)) {
diagnosticLog.info("'doPreDeleteIdPByResourceId' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
IdentityProvider identityProvider = getIdPByResourceId(resourceId, tenantDomain, true);
if (identityProvider == null) {
diagnosticLog.info("Could not find a valid IDP for the given resource ID: " + resourceId);
return;
}
deleteIDP(resourceId, identityProvider.getIdentityProviderName(), tenantDomain);
// Invoking the post listeners.
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() &&
!listener.doPostDeleteIdPByResourceId(resourceId, identityProvider, tenantDomain)) {
diagnosticLog.info("'doPostDeleteIdPByResourceId' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
}
/**
* Delete metadata strings of a given IDP.
*
* @param idpName Identity Provider name
* @param tenantId Id of the tenant
* @throws IdentityProviderManagementException
*/
private void deleteMetadataStrings(String idpName, int tenantId) throws IdentityProviderManagementException {
for (MetadataConverter metadataConverter : IdpMgtServiceComponentHolder.getInstance().getMetadataConverters()) {
if (metadataConverter.canDelete(tenantId, idpName)) {
metadataConverter.deleteMetadataString(tenantId, idpName);
}
}
}
/**
* Delete an IDP.
*
* @param resourceId Resource Id
* @param idpName Name of the IDP
* @param tenantDomain Tenant Domain
* @throws IdentityProviderManagementException
*/
private void deleteIDP(String resourceId, String idpName, String tenantDomain) throws
IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
// Delete metadata strings of the IDP
deleteMetadataStrings(idpName, tenantId);
dao.deleteIdPByResourceId(resourceId, tenantId, tenantDomain);
}
/**
* Force delete an Identity Provider from a given tenant. This will remove any associations this Identity
* Provider has with any Service Providers in authentication steps or provisioning.
*
* @param idpName name of IDP to be deleted
* @param tenantDomain tenantDomain to which the IDP belongs to
* @deprecated use {@link IdentityProviderManager#forceDeleteIdpByResourceId(String, String)} instead.
*/
@Deprecated
public void forceDeleteIdp(String idpName, String tenantDomain) throws IdentityProviderManagementException {
diagnosticLog.info("Force-deleting IDP with name: " + idpName);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdP(idpName, tenantDomain)) {
diagnosticLog.info("'doPreDeleteIdP' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
IdentityProvider identityProvider = this
.getIdPByName(idpName, tenantDomain, true);
if (identityProvider == null) {
diagnosticLog.info("Could not find a valid IDP for the given name: " + idpName);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_NAME_DOES_NOT_EXIST, idpName);
}
forceDeleteIDP(identityProvider.getResourceId(), idpName, tenantDomain);
// Invoking the post listeners.
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostDeleteIdP(idpName, tenantDomain)) {
diagnosticLog.info("'doPostDeleteIdP' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
}
/**
* Force delete an Identity Provider from a given tenant. This will remove any associations this Identity
* Provider has with any Service Providers in authentication steps or provisioning.
*
* @param resourceId resource ID of IDP to be deleted
* @param tenantDomain tenantDomain to which the IDP belongs to
*/
public void forceDeleteIdpByResourceId(String resourceId, String tenantDomain) throws
IdentityProviderManagementException {
diagnosticLog.info("Force-deleting IDP with resource ID: " + resourceId);
// Invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdPByResourceId(resourceId, tenantDomain)) {
diagnosticLog.info("'doPreDeleteIdPByResourceId' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
IdentityProvider identityProvider = getIdPByResourceId(resourceId, tenantDomain, true);
if (identityProvider == null) {
diagnosticLog.info("Could not find a valid IDP for the given resource ID: " + resourceId);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, resourceId);
}
forceDeleteIDP(resourceId, identityProvider.getIdentityProviderName(), tenantDomain);
// Invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostDeleteIdPByResourceId(resourceId, identityProvider,
tenantDomain)) {
diagnosticLog.info("'doPostDeleteIdPByResourceId' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
}
private void forceDeleteIDP(String resourceId, String idpName, String tenantDomain) throws
IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
for (MetadataConverter metadataConverter : IdpMgtServiceComponentHolder.getInstance().getMetadataConverters()) {
if (metadataConverter.canDelete(tenantId, idpName)) {
metadataConverter.deleteMetadataString(tenantId, idpName);
}
}
dao.forceDeleteIdPByResourceId(resourceId, tenantId, tenantDomain);
}
/**
* Updates a given Identity Provider information
*
* @param oldIdPName existing Identity Provider name
* @param newIdentityProvider new IdP information
* @throws IdentityProviderManagementException Error when updating Identity Provider
* information
* @deprecated use {@link IdentityProviderManager#updateIdPByResourceId(String, IdentityProvider, String)} instead.
*/
@Deprecated
@Override
public void updateIdP(String oldIdPName, IdentityProvider newIdentityProvider,
String tenantDomain) throws IdentityProviderManagementException {
diagnosticLog.info("Updating IDP with name: " + oldIdPName);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreUpdateIdP(oldIdPName, newIdentityProvider,
tenantDomain)) {
diagnosticLog.info("'doPreUpdateIdP' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
IdentityProvider currentIdentityProvider = this
.getIdPByName(oldIdPName, tenantDomain, true);
if (currentIdentityProvider == null) {
diagnosticLog.info("Could not find a valid IDP with the given name: " + oldIdPName);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_NAME_DOES_NOT_EXIST, oldIdPName);
}
updateIDP(currentIdentityProvider, newIdentityProvider, IdentityTenantUtil.getTenantId(tenantDomain),
tenantDomain);
// Invoking the post listeners.
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostUpdateIdP(oldIdPName, newIdentityProvider, tenantDomain)) {
diagnosticLog.info("'doPostUpdateIdP' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
}
/**
* Updates a given Identity Provider information
*
* @param resourceId existing Identity Provider resourceId
* @param newIdentityProvider new IdP information
* @param tenantDomain tenant domain of IDP.
* @throws IdentityProviderManagementException Error when updating Identity Provider
* information
*/
@Override
public IdentityProvider updateIdPByResourceId(String resourceId, IdentityProvider
newIdentityProvider, String tenantDomain) throws IdentityProviderManagementException {
diagnosticLog.info("Updating IDP with resource ID: " + resourceId);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreUpdateIdPByResourceId(resourceId, newIdentityProvider,
tenantDomain)) {
diagnosticLog.info("'doPreUpdateIdPByResourceId' invocation failed in listener: " +
listener.getClass().getName());
return null;
}
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
IdentityProvider currentIdentityProvider = this
.getIdPByResourceId(resourceId, tenantDomain, true);
validateUpdateIdPInputValues(currentIdentityProvider, resourceId, newIdentityProvider, tenantDomain);
updateIDP(currentIdentityProvider, newIdentityProvider, tenantId, tenantDomain);
// Invoking the post listeners.
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostUpdateIdPByResourceId(resourceId, currentIdentityProvider,
newIdentityProvider, tenantDomain)) {
diagnosticLog.info("'doPostUpdateIdPByResourceId' invocation failed in listener: " +
listener.getClass().getName());
return null;
}
}
return dao.getIdPByResourceId(resourceId, tenantId, tenantDomain);
}
private void updateIDP(IdentityProvider currentIdentityProvider, IdentityProvider newIdentityProvider, int tenantId,
String tenantDomain) throws IdentityProviderManagementException {
if (isPermissionAndRoleConfigExist(newIdentityProvider)) {
verifyAndUpdateRoleConfiguration(tenantDomain, tenantId, newIdentityProvider.getPermissionAndRoleConfig());
}
validateUpdateOfIdPEntityId(currentIdentityProvider.getFederatedAuthenticatorConfigs(),
newIdentityProvider.getFederatedAuthenticatorConfigs(),
tenantId, tenantDomain);
validateIdPIssuerName(currentIdentityProvider, newIdentityProvider, tenantId, tenantDomain);
handleMetadata(tenantId, newIdentityProvider);
dao.updateIdP(newIdentityProvider, currentIdentityProvider, tenantId, tenantDomain);
}
/**
* Get the authenticators registered in the system.
*
* @return <code>FederatedAuthenticatorConfig</code> array.
* @throws IdentityProviderManagementException Error when getting authenticators registered
* in the system
*/
@Override
public FederatedAuthenticatorConfig[] getAllFederatedAuthenticators()
throws IdentityProviderManagementException {
List<FederatedAuthenticatorConfig> appConfig = ApplicationAuthenticatorService
.getInstance().getFederatedAuthenticators();
if (CollectionUtils.isNotEmpty(appConfig)) {
return appConfig.toArray(new FederatedAuthenticatorConfig[appConfig.size()]);
}
return new FederatedAuthenticatorConfig[0];
}
/**
* Get the Provisioning Connectors registered in the system.
*
* @return <code>ProvisioningConnectorConfig</code> array.
* @throws IdentityProviderManagementException
*/
@Override
public ProvisioningConnectorConfig[] getAllProvisioningConnectors()
throws IdentityProviderManagementException {
List<ProvisioningConnectorConfig> connectorConfigs = ProvisioningConnectorService
.getInstance().getProvisioningConnectorConfigs();
if (connectorConfigs != null && connectorConfigs.size() > 0) {
return connectorConfigs.toArray(new ProvisioningConnectorConfig[0]);
}
return null;
}
private boolean validateIdPEntityId(FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs,
int tenantId, String tenantDomain) throws IdentityProviderManagementException {
if (federatedAuthenticatorConfigs != null) {
for (FederatedAuthenticatorConfig authConfig : federatedAuthenticatorConfigs) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.FED_AUTH_NAME.equals(authConfig.getName()) ||
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME.equals(authConfig.getName())) {
Property[] properties = authConfig.getProperties();
if (properties != null) {
for (Property property : properties) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(
property.getName())) {
if (dao.isIdPAvailableForAuthenticatorProperty(authConfig.getName(),
IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID,
property.getValue(), tenantId)) {
String msg =
"An Identity Provider Entity ID has already been registered with the " +
"name '" + property.getValue() + "' for tenant '" + tenantDomain +
"'";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
return true;
}
}
}
}
}
}
return true;
}
private boolean validateUpdateOfIdPEntityId(FederatedAuthenticatorConfig[] currentFederatedAuthConfigs,
FederatedAuthenticatorConfig[] newFederatedAuthConfigs,
int tenantId, String tenantDomain)
throws IdentityProviderManagementException {
String currentIdentityProviderEntityId = null;
if (currentFederatedAuthConfigs != null) {
for (FederatedAuthenticatorConfig fedAuthnConfig : currentFederatedAuthConfigs) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.FED_AUTH_NAME
.equals(fedAuthnConfig.getName()) ||
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME.equals(fedAuthnConfig.getName())) {
Property[] properties = fedAuthnConfig.getProperties();
if (properties != null) {
for (Property property : properties) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals
(property.getName())) {
currentIdentityProviderEntityId = property.getValue();
break;
}
}
}
break;
}
}
}
if (newFederatedAuthConfigs != null) {
for (FederatedAuthenticatorConfig fedAuthnConfig : newFederatedAuthConfigs) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.FED_AUTH_NAME
.equals(fedAuthnConfig.getName()) ||
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME.equals(fedAuthnConfig.getName())) {
Property[] properties = fedAuthnConfig.getProperties();
if (properties != null) {
for (Property property : properties) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(property.
getName())) {
if (currentIdentityProviderEntityId != null && currentIdentityProviderEntityId.equals
(property.getValue())) {
return true;
} else {
if (dao.isIdPAvailableForAuthenticatorProperty(fedAuthnConfig.getName(),
IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID,
property.getValue(), tenantId)) {
String msg = "An Identity Provider Entity ID has already been registered " +
"with the name '" +
property.getValue() + "' for tenant '" + tenantDomain + "'";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
return true;
}
}
}
}
break;
}
}
}
return true;
}
private String getOIDCResidentIdPEntityId() {
String OIDCEntityId = IdentityUtil.getProperty("OAuth.OpenIDConnect.IDTokenIssuerID");
if (StringUtils.isBlank(OIDCEntityId)) {
OIDCEntityId = "localhost";
}
return OIDCEntityId;
}
public String getResidentIDPMetadata(String tenantDomain) throws IdentityProviderManagementException {
if (IdpMgtServiceComponentHolder.getInstance().getMetadataConverters().isEmpty()) {
throw new IdentityProviderManagementException(
"Error receiving Metadata object for tenant: " + tenantDomain);
}
IdentityProvider residentIdentityProvider = this.getResidentIdP(tenantDomain);
FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs =
residentIdentityProvider.getFederatedAuthenticatorConfigs();
FederatedAuthenticatorConfig samlFederatedAuthenticatorConfig = null;
for (int i = 0; i < federatedAuthenticatorConfigs.length; i++) {
if (federatedAuthenticatorConfigs[i].getName()
.equals(IdentityApplicationConstants.Authenticator.SAML2SSO.NAME)) {
samlFederatedAuthenticatorConfig = federatedAuthenticatorConfigs[i];
break;
}
}
if (samlFederatedAuthenticatorConfig != null) {
try {
for (int t = 0; t < IdpMgtServiceComponentHolder.getInstance().getMetadataConverters().size(); t++) {
MetadataConverter converter = IdpMgtServiceComponentHolder.getInstance().getMetadataConverters()
.get(t);
if (converter.canHandle(samlFederatedAuthenticatorConfig)) {
return converter.getMetadataString(samlFederatedAuthenticatorConfig);
}
}
} catch (IdentityProviderSAMLException e) {
throw new IdentityProviderManagementException(
"Error in retrieving metadata string for tenant:" + tenantDomain, e.getMessage());
}
}
return null;
}
@Override
public ConnectedAppsResult getConnectedApplications(String resourceId, Integer limit, Integer offset, String
tenantDomain) throws IdentityProviderManagementException {
validateResourceId(resourceId, tenantDomain);
limit = validateLimit(limit);
offset = validateOffset(offset);
return dao.getConnectedApplications(resourceId, limit, offset);
}
private void validateResourceId(String resourceId, String tenantDomain) throws IdentityProviderManagementException {
if (StringUtils.isEmpty(resourceId)) {
String data = "Invalid argument: Identity Provider resource ID value is empty";
diagnosticLog.error(data);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_RETRIEVE_IDP_CONNECTED_APPS, data);
}
if (getIdPByResourceId(resourceId, tenantDomain, true) == null) {
diagnosticLog.error("Could not find a valid IDP with given resource ID: " + resourceId);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, resourceId);
}
}
/**
* Resolves the public service url given the default context and the url picked from the configuration based on
* the 'tenant_context.enable_tenant_qualified_urls' mode set in deployment.toml.
*
* @param defaultUrlContext Default url context path.
* @param urlFromConfig Url picked from the file configuration.
* @return Absolute public url of the service if 'enable_tenant_qualified_urls' is 'true', else returns the url
* from the file config.
* @throws IdentityProviderManagementServerException When fail to build the absolute public url.
*/
private String resolveAbsoluteURL(String defaultUrlContext, String urlFromConfig) throws IdentityProviderManagementServerException {
if (!IdentityTenantUtil.isTenantQualifiedUrlsEnabled() && StringUtils.isNotBlank(urlFromConfig)) {
if (log.isDebugEnabled()) {
log.debug("Resolved URL:" + urlFromConfig + " from file configuration for default url context: " +
defaultUrlContext);
}
return urlFromConfig;
}
try {
return ServiceURLBuilder.create().addPath(defaultUrlContext).build().getAbsolutePublicURL();
} catch (URLBuilderException e) {
throw IdentityProviderManagementException.error(IdentityProviderManagementServerException.class,
"Error while building URL: " + defaultUrlContext, e);
}
}
private String addTenantPathParamInLegacyMode(String resolvedUrl, String tenantDomain) {
try {
if (!IdentityTenantUtil.isTenantQualifiedUrlsEnabled() && StringUtils.isNotBlank(tenantDomain) &&
!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
resolvedUrl = getTenantUrl(resolvedUrl, tenantDomain);
}
} catch (URISyntaxException e) {
log.error(String.format("%s endpoint is malformed.", resolvedUrl), e);
}
return resolvedUrl;
}
private String getTenantUrl(String url, String tenantDomain) throws URISyntaxException {
URI uri = new URI(url);
URI uriModified = new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), ("/t/" +
tenantDomain + uri.getPath()), uri.getQuery(), uri.getFragment());
return uriModified.toString();
}
private void verifyAndUpdateRoleConfiguration(String tenantDomain, int tenantId,
PermissionsAndRoleConfig roleConfiguration)
throws IdentityProviderManagementException {
List<RoleMapping> validRoleMappings = new ArrayList<>();
List<String> validIdPRoles = new ArrayList<>();
for (RoleMapping mapping : roleConfiguration.getRoleMappings()) {
try {
if (mapping.getRemoteRole() == null || mapping.getLocalRole() == null || StringUtils
.isBlank(mapping.getLocalRole().getLocalRoleName())) {
continue;
}
UserStoreManager usm = IdPManagementServiceComponent.getRealmService().getTenantUserRealm(tenantId)
.getUserStoreManager();
String role = mapping.getLocalRole().getLocalRoleName();
if (StringUtils.isNotBlank(mapping.getLocalRole().getUserStoreId())) {
role = IdentityUtil.addDomainToName(role, mapping.getLocalRole().getUserStoreId());
}
if (IdentityUtil.isGroupsVsRolesSeparationImprovementsEnabled()) {
// Only roles are allowed for role mapping.
if (isGroup(role)) {
if (log.isDebugEnabled()) {
log.debug("Groups including: " + role + ", are not allowed for the identity " +
"provider role mapping.");
}
diagnosticLog.info("Groups including: " + role + ", are not allowed for the identity " +
"provider role mapping.");
continue;
}
}
// Remove invalid mappings if local role does not exists.
if (usm.isExistingRole(role)) {
validRoleMappings.add(mapping);
validIdPRoles.add(mapping.getRemoteRole());
} else {
if (log.isDebugEnabled()) {
log.debug("Invalid local role name: " + role + " for the federated role: " + mapping
.getRemoteRole());
}
diagnosticLog.info("Invalid local role name: " + role + " for the federated role: " + mapping
.getRemoteRole());
}
} catch (UserStoreException e) {
diagnosticLog.error("Error occurred while retrieving UserStoreManager for tenant " + tenantDomain);
throw new IdentityProviderManagementException(
"Error occurred while retrieving UserStoreManager for tenant " + tenantDomain, e);
}
}
roleConfiguration.setRoleMappings(validRoleMappings.toArray(new RoleMapping[0]));
roleConfiguration.setIdpRoles(validIdPRoles.toArray(new String[0]));
}
/**
* Validate input parameters for the getIdPByResourceId function.
*
* @param resourceId Identity Provider resource ID.
* @throws IdentityProviderManagementException IdentityProviderManagementException
*/
private void validateGetIdPInputValues(String resourceId) throws IdentityProviderManagementException {
if (StringUtils.isEmpty(resourceId)) {
String data = "Invalid argument: Identity Provider resource ID value is empty";
diagnosticLog.error(data);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_GET_REQUEST_INVALID, data);
}
}
/**
* Validate input parameters for the addIdPWithResourceId function.
*
* @param idpName Identity Provider name.
* @param tenantDomain Tenant domain of IDP.
* @throws IdentityProviderManagementException IdentityProviderManagementException
*/
private void validateAddIdPInputValues(String idpName, String tenantDomain) throws
IdentityProviderManagementException {
if (IdentityProviderManager.getInstance().getIdPByName(idpName, tenantDomain, true) != null) {
diagnosticLog.error("An IDP with given name: " + idpName + " already exists.");
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_ALREADY_EXISTS, idpName);
}
if (IdPManagementServiceComponent.getFileBasedIdPs().containsKey(idpName)
&& !idpName.startsWith(IdPManagementConstants.SHARED_IDP_PREFIX)) {
//If an IDP with name starting with "SHARED_" is added from UI, It's blocked at the service class
// before calling this method
diagnosticLog.error("An IDP with given name: " + idpName + " already exists.");
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_ALREADY_EXISTS, idpName);
}
}
/**
* Validate input parameters for the updateIdPByResourceId function.
*
* @param currentIdentityProvider Old Identity Provider Information.
* @param resourceId Identity Provider's resource ID.
* @param newIdentityProvider
* @param tenantDomain Tenant domain of IDP.
* @throws IdentityProviderManagementException IdentityProviderManagementException
*/
private void validateUpdateIdPInputValues(IdentityProvider currentIdentityProvider, String resourceId,
IdentityProvider newIdentityProvider, String tenantDomain)
throws IdentityProviderManagementException {
if (currentIdentityProvider == null) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, resourceId);
}
boolean isNewIdPNameExists = false;
IdentityProvider retrievedIdentityProvider =
getIdPByName(newIdentityProvider.getIdentityProviderName(), tenantDomain, true);
if (retrievedIdentityProvider != null) {
isNewIdPNameExists = !StringUtils.equals(retrievedIdentityProvider.getResourceId(), currentIdentityProvider
.getResourceId());
}
if (isNewIdPNameExists || IdPManagementServiceComponent.getFileBasedIdPs()
.containsKey(newIdentityProvider.getIdentityProviderName())) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_ALREADY_EXISTS, newIdentityProvider.getIdentityProviderName());
}
// Validate whether there are any duplicate properties in the ProvisioningConnectorConfig.
validateOutboundProvisioningConnectorProperties(newIdentityProvider);
}
/**
* Validate whether there are any duplicate properties in the ProvisioningConnectorConfig of an IdentityProvider.
*
* @param newIdentityProvider IdentityProvider object.
* @throws IdentityProviderManagementException If duplicate properties found in ProvisioningConnectorConfig.
*/
private void validateOutboundProvisioningConnectorProperties(IdentityProvider newIdentityProvider)
throws IdentityProviderManagementException {
ProvisioningConnectorConfig[] provisioningConnectorConfigs =
newIdentityProvider.getProvisioningConnectorConfigs();
if (!ArrayUtils.isEmpty(provisioningConnectorConfigs)) {
for (ProvisioningConnectorConfig connectorConfig : provisioningConnectorConfigs) {
Property[] properties = connectorConfig.getProvisioningProperties();
// If no properties have specified, validation needs to stop.
if (ArrayUtils.isEmpty(properties) || properties.length < 2) {
break;
}
Set<Property> connectorProperties = new HashSet<>();
for (Property property : properties) {
if (!connectorProperties.add(property)) {
throw IdPManagementUtil.handleClientException(
IdPManagementConstants.ErrorMessage.DUPLICATE_OUTBOUND_CONNECTOR_PROPERTIES,
newIdentityProvider.getIdentityProviderName());
}
}
}
}
}
/**
* Check whether PermissionAndRoleConfig is configured for the IDP.
*
* @param identityProvider Identity Provider information.
* @return whether config exists.
*/
private boolean isPermissionAndRoleConfigExist(IdentityProvider identityProvider) {
return identityProvider.getPermissionAndRoleConfig() != null
&& identityProvider.getPermissionAndRoleConfig().getRoleMappings() != null;
}
/**
* Check whether metadata file is configured for the IDP.
*
* @param idpName Identity Provider name.
* @param metadata Metadata string.
* @return whether metadata exists.
*/
private boolean isMetadataFileExist(String idpName, String metadata) {
return StringUtils.isNotEmpty(idpName) && StringUtils.isNotEmpty(metadata);
}
@Override
public IdentityProvider getIdPByMetadataProperty(String property, String value, String tenantDomain,
boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(property) || StringUtils.isEmpty(value)) {
String msg = "Invalid argument: IDP metadata property or property value is empty";
diagnosticLog.info(msg);
throw new IdentityProviderManagementException(msg);
}
String idPName = getIDPNameByMetadataProperty(null, property, value, tenantId, tenantDomain,
ignoreFileBasedIdps);
if (idPName == null) {
if (log.isDebugEnabled()) {
log.debug("IDP Name not found for metadata property name: " + property + " value: " + value +
". Returning null without continuing.");
}
diagnosticLog.info("IDP Name not found for metadata property name: " + property + " value: " + value +
". Returning null without continuing.");
return null;
}
return getIdPByName(idPName, tenantDomain, ignoreFileBasedIdps);
}
/**
* Method to validate the uniqueness of the IDP Issuer Name.
* Ideally used when adding a IDP.
*
* @param identityProvider Identity Provider being added.
* @param tenantId Tenant id.
* @param tenantDomain Tenant domain.
* @return Returns true if valid.
* @throws IdentityProviderManagementException IdentityProviderManagementException.
*/
private boolean validateIdPIssuerName(IdentityProvider identityProvider, int tenantId, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProviderProperty[] identityProviderProperties = identityProvider.getIdpProperties();
if (!ArrayUtils.isEmpty(identityProviderProperties)) {
for (IdentityProviderProperty prop : identityProviderProperties) {
if (prop != null && IdentityApplicationConstants.IDP_ISSUER_NAME.equals(prop.getName())
&& StringUtils.isNotBlank(prop.getValue())) {
String idpWithIssuer = getIDPNameByMetadataProperty(null,
IdentityApplicationConstants.IDP_ISSUER_NAME, prop.getValue(), tenantId, tenantDomain,
false);
if (StringUtils.isNotEmpty(idpWithIssuer)) {
String msg = "The provided IDP Issuer Name '" + prop.getValue() + "' has already been " +
"registered with the IDP '" + idpWithIssuer + "'.";
diagnosticLog.error(msg);
throw new IdentityProviderManagementClientException(msg);
}
}
}
}
return true;
}
/**
* Method to validate the uniqueness of the IDP Issuer Name.
* Ideally used when updating a IDP.
* If the provided two IDP configs have the same Issuer Name validation is passed.
*
* @param currentIdP Existing Identity Provider config.
* @param newIdP Updated Identity Provider config.
* @param tenantId Tenant id.
* @param tenantDomain Tenant domain.
* @return Returns true if valid.
* @throws IdentityProviderManagementException IdentityProviderManagementException.
*/
private boolean validateIdPIssuerName(IdentityProvider currentIdP, IdentityProvider newIdP, int tenantId,
String tenantDomain)
throws IdentityProviderManagementException {
String newIdPIssuerName = null;
IdentityProviderProperty[] identityProviderProperties = newIdP.getIdpProperties();
if (!ArrayUtils.isEmpty(identityProviderProperties)) {
for (IdentityProviderProperty prop : identityProviderProperties) {
if (prop != null && IdentityApplicationConstants.IDP_ISSUER_NAME.equals(prop.getName())
&& StringUtils.isNotBlank(prop.getValue())) {
newIdPIssuerName = prop.getValue();
}
}
}
String currentIdPIssuerName = null;
identityProviderProperties = currentIdP.getIdpProperties();
if (!ArrayUtils.isEmpty(identityProviderProperties)) {
for (IdentityProviderProperty prop : identityProviderProperties) {
if (prop != null && IdentityApplicationConstants.IDP_ISSUER_NAME.equals(prop.getName())
&& StringUtils.isNotBlank(prop.getValue())) {
currentIdPIssuerName = prop.getValue();
}
}
}
if (StringUtils.isNotBlank(newIdPIssuerName) && !StringUtils.equals(newIdPIssuerName, currentIdPIssuerName)) {
String idpWithIssuer = getIDPNameByMetadataProperty(null,
IdentityApplicationConstants.IDP_ISSUER_NAME, newIdPIssuerName, tenantId, tenantDomain,
false);
if (StringUtils.isNotEmpty(idpWithIssuer)) {
String msg = "The provided IDP Issuer Name '" + newIdPIssuerName + "' has already been " +
"registered with the IDP '" + idpWithIssuer + "'.";
diagnosticLog.error(msg);
throw new IdentityProviderManagementClientException(msg);
}
}
return true;
}
private String getIDPNameByMetadataProperty(Connection dbConnection, String property, String value, int tenantId,
String tenantDomain, boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
String idPName = dao.getIdPNameByMetadataProperty(null, property, value, tenantId, tenantDomain);
if (idPName == null && !ignoreFileBasedIdps) {
if (log.isDebugEnabled()) {
log.debug("Attempting to retrieve IDP Name from filebased IDPs for IDP metadata " +
"property name: " + property + " value: " + value);
}
idPName = new FileBasedIdPMgtDAO().getIdPNameByMetadataProperty(property, value);
}
return idPName;
}
/**
* Validate whether the outbound provisioning roles does exist.
*
* @param identityProvider IdentityProvider.
* @param tenantDomain Tenant Domain.
* @throws IdentityProviderManagementException If an error occurred while checking for role existence.
*/
private void validateOutboundProvisioningRoles(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
String provisioningRole = identityProvider.getProvisioningRole();
if (StringUtils.isBlank(provisioningRole)) {
return;
}
String[] outboundProvisioningRoles = StringUtils.split(provisioningRole, ",");
try {
RoleManagementService roleManagementService =
IdpMgtServiceComponentHolder.getInstance().getRoleManagementService();
for (String roleName : outboundProvisioningRoles) {
try {
if (!roleManagementService.isExistingRoleName(roleName, tenantDomain)) {
throw IdPManagementUtil.handleClientException(
IdPManagementConstants.ErrorMessage.ERROR_CODE_NOT_EXISTING_OUTBOUND_PROVISIONING_ROLE,
null);
}
} catch (NotImplementedException e) {
if (log.isDebugEnabled()) {
log.debug("isExistingRoleName is not implemented in the RoleManagementService. " +
"Therefore, proceeding without validating outbound provisioning role existence.");
}
}
}
} catch (IdentityRoleManagementException e) {
throw IdPManagementUtil.handleServerException(
IdPManagementConstants.ErrorMessage.ERROR_CODE_VALIDATING_OUTBOUND_PROVISIONING_ROLES, null, e);
}
}
private boolean isGroup(String localRoleName) {
return !Stream.of(INTERNAL_DOMAIN, APPLICATION_DOMAIN, WORKFLOW_DOMAIN).anyMatch(domain -> localRoleName
.toUpperCase().startsWith((domain + UserCoreConstants.DOMAIN_SEPARATOR).toUpperCase()));
}
/**
* Get tenant context using tenant domain.
*
* @param tenantDomain Tenant domain of the tenant
* @return Tenant context
*/
private String getTenantContextFromTenantDomain(String tenantDomain) {
if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equalsIgnoreCase(tenantDomain)) {
return MultitenantConstants.TENANT_AWARE_URL_PREFIX + "/" + tenantDomain + "/";
}
return "";
}
}
|
components/idp-mgt/org.wso2.carbon.idp.mgt/src/main/java/org/wso2/carbon/idp/mgt/IdentityProviderManager.java
|
/*
* Copyright (c) 2014 WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.idp.mgt;
import org.apache.axiom.om.util.Base64;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.core.util.KeyStoreManager;
import org.wso2.carbon.identity.application.common.ApplicationAuthenticatorService;
import org.wso2.carbon.identity.application.common.ProvisioningConnectorService;
import org.wso2.carbon.identity.application.common.model.ClaimConfig;
import org.wso2.carbon.identity.application.common.model.ClaimMapping;
import org.wso2.carbon.identity.application.common.model.FederatedAuthenticatorConfig;
import org.wso2.carbon.identity.application.common.model.IdentityProvider;
import org.wso2.carbon.identity.application.common.model.IdentityProviderProperty;
import org.wso2.carbon.identity.application.common.model.LocalRole;
import org.wso2.carbon.identity.application.common.model.PermissionsAndRoleConfig;
import org.wso2.carbon.identity.application.common.model.Property;
import org.wso2.carbon.identity.application.common.model.ProvisioningConnectorConfig;
import org.wso2.carbon.identity.application.common.model.RoleMapping;
import org.wso2.carbon.identity.application.common.util.IdentityApplicationConstants;
import org.wso2.carbon.identity.application.common.util.IdentityApplicationManagementUtil;
import org.wso2.carbon.identity.base.IdentityConstants;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.core.ServiceURLBuilder;
import org.wso2.carbon.identity.core.URLBuilderException;
import org.wso2.carbon.identity.core.model.ExpressionNode;
import org.wso2.carbon.identity.core.model.FilterTreeBuilder;
import org.wso2.carbon.identity.core.model.Node;
import org.wso2.carbon.identity.core.model.OperationNode;
import org.wso2.carbon.identity.core.util.IdentityTenantUtil;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.identity.role.mgt.core.IdentityRoleManagementException;
import org.wso2.carbon.identity.role.mgt.core.RoleManagementService;
import org.wso2.carbon.idp.mgt.dao.CacheBackedIdPMgtDAO;
import org.wso2.carbon.idp.mgt.dao.FileBasedIdPMgtDAO;
import org.wso2.carbon.idp.mgt.dao.IdPManagementDAO;
import org.wso2.carbon.idp.mgt.internal.IdPManagementServiceComponent;
import org.wso2.carbon.idp.mgt.internal.IdpMgtServiceComponentHolder;
import org.wso2.carbon.idp.mgt.listener.IdentityProviderMgtListener;
import org.wso2.carbon.idp.mgt.model.ConnectedAppsResult;
import org.wso2.carbon.idp.mgt.model.IdpSearchResult;
import org.wso2.carbon.idp.mgt.util.IdPManagementConstants;
import org.wso2.carbon.idp.mgt.util.IdPManagementUtil;
import org.wso2.carbon.idp.mgt.util.MetadataConverter;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.api.UserStoreManager;
import org.wso2.carbon.user.core.UserCoreConstants;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.KeyStore;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.xml.stream.XMLStreamException;
import static org.wso2.carbon.user.core.UserCoreConstants.INTERNAL_DOMAIN;
import static org.wso2.carbon.user.core.UserCoreConstants.WORKFLOW_DOMAIN;
import static org.wso2.carbon.user.mgt.UserMgtConstants.APPLICATION_DOMAIN;
public class IdentityProviderManager implements IdpManager {
private static final Log log = LogFactory.getLog(IdentityProviderManager.class);
private static final Log diagnosticLog = LogFactory.getLog("diagnostics");
private static final String OPENID_IDP_ENTITY_ID = "IdPEntityId";
private static CacheBackedIdPMgtDAO dao = new CacheBackedIdPMgtDAO(new IdPManagementDAO());
private static volatile IdentityProviderManager instance = new IdentityProviderManager();
private IdentityProviderManager() {
}
/**
* @return
*/
public static IdentityProviderManager getInstance() {
return instance;
}
/**
* Retrieves resident Identity provider for a given tenant.
*
* @param tenantDomain Tenant domain whose resident IdP is requested
* @return <code>LocalIdentityProvider</code>
* @throws IdentityProviderManagementException Error when getting Resident Identity Providers
*/
@Override
public IdentityProvider getResidentIdP(String tenantDomain)
throws IdentityProviderManagementException {
IdPManagementUtil.setTenantSpecifiers(tenantDomain);
String openIdUrl;
String oauth1RequestTokenUrl;
String oauth1AuthorizeUrl;
String oauth1AccessTokenUrl;
String oauth2AuthzEPUrl;
String oauth2TokenEPUrl;
String oauth2RevokeEPUrl;
String oauth2IntrospectEpUrl;
String oauth2UserInfoEPUrl;
String oidcCheckSessionEPUrl;
String oidcLogoutEPUrl;
String oIDCWebFingerEPUrl;
String oAuth2DCREPUrl;
String oAuth2JWKSPage;
String oIDCDiscoveryEPUrl;
String passiveStsUrl;
String stsUrl;
String scimUsersEndpoint;
String scimGroupsEndpoint;
String scim2UsersEndpoint;
String scim2GroupsEndpoint;
openIdUrl = IdentityUtil.getProperty(IdentityConstants.ServerConfig.OPENID_SERVER_URL);
oauth1RequestTokenUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH1_REQUEST_TOKEN_URL);
oauth1AuthorizeUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH1_AUTHORIZE_URL);
oauth1AccessTokenUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH1_ACCESSTOKEN_URL);
oauth2AuthzEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_AUTHZ_EP_URL);
oauth2TokenEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_TOKEN_EP_URL);
oauth2UserInfoEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_USERINFO_EP_URL);
oidcCheckSessionEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_CHECK_SESSION_EP_URL);
oidcLogoutEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_LOGOUT_EP_URL);
passiveStsUrl = IdentityUtil.getProperty(IdentityConstants.STS.PSTS_IDENTITY_PROVIDER_URL);
stsUrl = IdentityUtil.getProperty(IdentityConstants.STS.STS_IDENTITY_PROVIDER_URL);
scimUsersEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM.USER_EP_URL);
scimGroupsEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM.GROUP_EP_URL);
scim2UsersEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM2.USER_EP_URL);
scim2GroupsEndpoint = IdentityUtil.getProperty(IdentityConstants.SCIM2.GROUP_EP_URL);
oauth2RevokeEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_REVOKE_EP_URL);
oauth2IntrospectEpUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_INTROSPECT_EP_URL);
oIDCWebFingerEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_WEB_FINGER_EP_URL);
oAuth2DCREPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_DCR_EP_URL);
oAuth2JWKSPage = IdentityUtil.getProperty(IdentityConstants.OAuth.OAUTH2_JWKS_EP_URL);
oIDCDiscoveryEPUrl = IdentityUtil.getProperty(IdentityConstants.OAuth.OIDC_DISCOVERY_EP_URL);
if (StringUtils.isBlank(openIdUrl)) {
openIdUrl = IdentityUtil.getServerURL(IdentityConstants.OpenId.OPENID, true, true);
}
if (StringUtils.isBlank(oauth1RequestTokenUrl)) {
oauth1RequestTokenUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.REQUEST_TOKEN, true, true);
}
if (StringUtils.isBlank(oauth1AuthorizeUrl)) {
oauth1AuthorizeUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.AUTHORIZE_URL, true, true);
}
if (StringUtils.isBlank(oauth1AccessTokenUrl)) {
oauth1AccessTokenUrl = IdentityUtil.getServerURL(IdentityConstants.OAuth.ACCESS_TOKEN, true, true);
}
oauth2AuthzEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.AUTHORIZE, oauth2AuthzEPUrl);
oauth2TokenEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.TOKEN, oauth2TokenEPUrl);
oauth2RevokeEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.REVOKE, oauth2RevokeEPUrl);
oauth2IntrospectEpUrl = resolveAbsoluteURL(IdentityConstants.OAuth.INTROSPECT, oauth2IntrospectEpUrl);
oauth2IntrospectEpUrl = addTenantPathParamInLegacyMode(oauth2IntrospectEpUrl, tenantDomain);
oauth2UserInfoEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.USERINFO, oauth2UserInfoEPUrl);
oidcCheckSessionEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.CHECK_SESSION, oidcCheckSessionEPUrl);
oidcLogoutEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.LOGOUT, oidcLogoutEPUrl);
oAuth2DCREPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.DCR, oAuth2DCREPUrl);
oAuth2DCREPUrl = addTenantPathParamInLegacyMode(oAuth2DCREPUrl, tenantDomain);
oAuth2JWKSPage = resolveAbsoluteURL(IdentityConstants.OAuth.JWKS, oAuth2JWKSPage);
oAuth2JWKSPage = addTenantPathParamInLegacyMode(oAuth2JWKSPage, tenantDomain);
oIDCDiscoveryEPUrl = resolveAbsoluteURL(IdentityConstants.OAuth.DISCOVERY, oIDCDiscoveryEPUrl);
oIDCDiscoveryEPUrl = addTenantPathParamInLegacyMode(oIDCDiscoveryEPUrl, tenantDomain);
passiveStsUrl = resolveAbsoluteURL(IdentityConstants.STS.PASSIVE_STS, passiveStsUrl);
// If sts url is configured in file, change it according to tenant domain. If not configured, add a default url
if (StringUtils.isNotBlank(stsUrl)) {
stsUrl = stsUrl.replace(IdentityConstants.STS.WSO2_CARBON_STS, getTenantContextFromTenantDomain(tenantDomain) +
IdentityConstants.STS.WSO2_CARBON_STS);
} else {
stsUrl = IdentityUtil.getServerURL("services/" + getTenantContextFromTenantDomain(tenantDomain) +
IdentityConstants.STS.WSO2_CARBON_STS, true, true);
}
if (StringUtils.isBlank(scimUsersEndpoint)) {
scimUsersEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM.USER_EP, true, false);
}
if (StringUtils.isBlank(scimGroupsEndpoint)) {
scimGroupsEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM.GROUP_EP, true, false);
}
if (StringUtils.isBlank(scim2UsersEndpoint)) {
scim2UsersEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM2.USER_EP, true, false);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
scim2UsersEndpoint = getTenantUrl(scim2UsersEndpoint, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("SCIM 2.0 Users endpoint is malformed");
}
if (StringUtils.isBlank(scim2GroupsEndpoint)) {
scim2GroupsEndpoint = IdentityUtil.getServerURL(IdentityConstants.SCIM2.GROUP_EP, true, false);
}
try {
if (StringUtils.isNotBlank(tenantDomain) && !MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals
(tenantDomain)) {
scim2GroupsEndpoint = getTenantUrl(scim2GroupsEndpoint, tenantDomain);
}
} catch (URISyntaxException e) {
log.error("SCIM 2.0 Groups endpoint is malformed");
}
IdentityProvider identityProvider = dao.getIdPByName(null,
IdentityApplicationConstants.RESIDENT_IDP_RESERVED_NAME,
IdentityTenantUtil.getTenantId(tenantDomain), tenantDomain);
if (identityProvider == null) {
String message = "Could not find Resident Identity Provider for tenant " + tenantDomain;
throw new IdentityProviderManagementException(message);
}
int tenantId = -1;
try {
tenantId = IdPManagementServiceComponent.getRealmService().getTenantManager().getTenantId(tenantDomain);
} catch (UserStoreException e) {
throw new IdentityProviderManagementException(
"Exception occurred while retrieving Tenant ID from Tenant Domain " + tenantDomain, e);
}
X509Certificate cert = null;
try {
IdentityTenantUtil.initializeRegistry(tenantId);
PrivilegedCarbonContext.startTenantFlow();
PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext();
carbonContext.setTenantDomain(tenantDomain, true);
KeyStoreManager keyStoreManager = KeyStoreManager.getInstance(tenantId);
if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
// derive key store name
String ksName = tenantDomain.trim().replace(".", "-");
// derive JKS name
String jksName = ksName + ".jks";
KeyStore keyStore = keyStoreManager.getKeyStore(jksName);
cert = (X509Certificate) keyStore.getCertificate(tenantDomain);
} else {
cert = keyStoreManager.getDefaultPrimaryCertificate();
}
} catch (Exception e) {
String msg = "Error retrieving primary certificate for tenant : " + tenantDomain;
throw new IdentityProviderManagementException(msg, e);
} finally {
PrivilegedCarbonContext.endTenantFlow();
}
if (cert == null) {
throw new IdentityProviderManagementException(
"Cannot find the primary certificate for tenant " + tenantDomain);
}
try {
identityProvider.setCertificate(Base64.encode(cert.getEncoded()));
} catch (CertificateEncodingException e) {
String msg = "Error occurred while encoding primary certificate for tenant domain " + tenantDomain;
throw new IdentityProviderManagementException(msg, e);
}
List<FederatedAuthenticatorConfig> fedAuthnCofigs = new ArrayList<FederatedAuthenticatorConfig>();
List<Property> propertiesList = null;
FederatedAuthenticatorConfig openIdFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.OpenID.NAME);
if (openIdFedAuthn == null) {
openIdFedAuthn = new FederatedAuthenticatorConfig();
openIdFedAuthn.setName(IdentityApplicationConstants.Authenticator.OpenID.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(openIdFedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(openIdFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.OpenID.OPEN_ID_URL) == null) {
Property openIdUrlProp = new Property();
openIdUrlProp.setName(IdentityApplicationConstants.Authenticator.OpenID.OPEN_ID_URL);
openIdUrlProp.setValue(openIdUrl);
propertiesList.add(openIdUrlProp);
}
openIdFedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(openIdFedAuthn);
// SAML2 related endpoints.
FederatedAuthenticatorConfig saml2SSOFedAuthn = buildSAMLProperties(identityProvider, tenantDomain);
fedAuthnCofigs.add(saml2SSOFedAuthn);
FederatedAuthenticatorConfig oauth1FedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.OAuth10A.NAME);
if (oauth1FedAuthn == null) {
oauth1FedAuthn = new FederatedAuthenticatorConfig();
oauth1FedAuthn.setName(IdentityApplicationConstants.OAuth10A.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(oauth1FedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(oauth1FedAuthn.getProperties(),
IdentityApplicationConstants.OAuth10A.OAUTH1_REQUEST_TOKEN_URL) == null) {
Property oauth1ReqTokUrlProp = new Property();
oauth1ReqTokUrlProp.setName(IdentityApplicationConstants.OAuth10A.OAUTH1_REQUEST_TOKEN_URL);
oauth1ReqTokUrlProp.setValue(oauth1RequestTokenUrl);
propertiesList.add(oauth1ReqTokUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oauth1FedAuthn.getProperties(),
IdentityApplicationConstants.OAuth10A.OAUTH1_AUTHORIZE_URL) == null) {
Property oauth1AuthzUrlProp = new Property();
oauth1AuthzUrlProp.setName(IdentityApplicationConstants.OAuth10A.OAUTH1_AUTHORIZE_URL);
oauth1AuthzUrlProp.setValue(oauth1AuthorizeUrl);
propertiesList.add(oauth1AuthzUrlProp);
}
if (IdentityApplicationManagementUtil.getProperty(oauth1FedAuthn.getProperties(),
IdentityApplicationConstants.OAuth10A.OAUTH1_ACCESS_TOKEN_URL) == null) {
Property oauth1AccessTokUrlProp = new Property();
oauth1AccessTokUrlProp.setName(IdentityApplicationConstants.OAuth10A.OAUTH1_ACCESS_TOKEN_URL);
oauth1AccessTokUrlProp.setValue(oauth1AccessTokenUrl);
propertiesList.add(oauth1AccessTokUrlProp);
}
oauth1FedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(oauth1FedAuthn);
FederatedAuthenticatorConfig oidcFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.OIDC.NAME);
if (oidcFedAuthn == null) {
oidcFedAuthn = new FederatedAuthenticatorConfig();
oidcFedAuthn.setName(IdentityApplicationConstants.Authenticator.OIDC.NAME);
}
propertiesList = new ArrayList<Property>();
Property idPEntityIdProp = resolveFedAuthnProperty(getOIDCResidentIdPEntityId(), oidcFedAuthn,
OPENID_IDP_ENTITY_ID);
propertiesList.add(idPEntityIdProp);
Property authzUrlProp = resolveFedAuthnProperty(oauth2AuthzEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_AUTHZ_URL);
propertiesList.add(authzUrlProp);
Property tokenUrlProp = resolveFedAuthnProperty(oauth2TokenEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_TOKEN_URL);
propertiesList.add(tokenUrlProp);
Property revokeUrlProp = resolveFedAuthnProperty(oauth2RevokeEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_REVOKE_URL);
propertiesList.add(revokeUrlProp);
Property instropsectUrlProp = resolveFedAuthnProperty(oauth2IntrospectEpUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_INTROSPECT_URL);
propertiesList.add(instropsectUrlProp);
Property userInfoUrlProp = resolveFedAuthnProperty(oauth2UserInfoEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_USER_INFO_EP_URL);
propertiesList.add(userInfoUrlProp);
Property checkSessionUrlProp = resolveFedAuthnProperty(oidcCheckSessionEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OIDC_CHECK_SESSION_URL);
propertiesList.add(checkSessionUrlProp);
Property logoutUrlProp = resolveFedAuthnProperty(oidcLogoutEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OIDC_LOGOUT_URL);
propertiesList.add(logoutUrlProp);
Property dcrUrlProp = resolveFedAuthnProperty(oAuth2DCREPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_DCR_EP_URL);
propertiesList.add(dcrUrlProp);
Property webFingerUrlProp = resolveFedAuthnProperty(oIDCWebFingerEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OIDC_WEB_FINGER_EP_URL);
propertiesList.add(webFingerUrlProp);
Property jwksUrlProp = resolveFedAuthnProperty(oAuth2JWKSPage, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OAUTH2_JWKS_EP_URL);
propertiesList.add(jwksUrlProp);
Property discoveryUrlProp = resolveFedAuthnProperty(oIDCDiscoveryEPUrl, oidcFedAuthn,
IdentityApplicationConstants.Authenticator.OIDC.OIDC_DISCOVERY_EP_URL);
propertiesList.add(discoveryUrlProp);
oidcFedAuthn.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(oidcFedAuthn);
FederatedAuthenticatorConfig passiveSTSFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.PassiveSTS.NAME);
if (passiveSTSFedAuthn == null) {
passiveSTSFedAuthn = new FederatedAuthenticatorConfig();
passiveSTSFedAuthn.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.NAME);
}
propertiesList = new ArrayList<>();
Property passiveSTSUrlProperty = IdentityApplicationManagementUtil.getProperty(passiveSTSFedAuthn
.getProperties(), IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL);
if (passiveSTSUrlProperty == null) {
passiveSTSUrlProperty = new Property();
passiveSTSUrlProperty.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL);
}
passiveSTSUrlProperty.setValue(passiveStsUrl);
propertiesList.add(passiveSTSUrlProperty);
Property stsIdPEntityIdProperty = IdentityApplicationManagementUtil.getProperty(passiveSTSFedAuthn
.getProperties(), IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_ENTITY_ID);
if (stsIdPEntityIdProperty == null) {
stsIdPEntityIdProperty = new Property();
stsIdPEntityIdProperty.setName(IdentityApplicationConstants.Authenticator.PassiveSTS
.IDENTITY_PROVIDER_ENTITY_ID);
stsIdPEntityIdProperty.setValue(IdPManagementUtil.getResidentIdPEntityId());
}
propertiesList.add(stsIdPEntityIdProperty);
for (Property property : passiveSTSFedAuthn.getProperties()) {
if (property != null && !IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_URL
.equals(property.getName()) && !IdentityApplicationConstants.Authenticator.PassiveSTS
.IDENTITY_PROVIDER_ENTITY_ID.equals(property.getName())) {
propertiesList.add(property);
}
}
passiveSTSFedAuthn
.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(passiveSTSFedAuthn);
FederatedAuthenticatorConfig stsFedAuthn = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.WSTrust.NAME);
if (stsFedAuthn == null) {
stsFedAuthn = new FederatedAuthenticatorConfig();
stsFedAuthn.setName(IdentityApplicationConstants.Authenticator.WSTrust.NAME);
}
propertiesList = new ArrayList<Property>(Arrays.asList(stsFedAuthn.getProperties()));
if (IdentityApplicationManagementUtil.getProperty(stsFedAuthn.getProperties(),
IdentityApplicationConstants.Authenticator.WSTrust.IDENTITY_PROVIDER_URL) == null) {
Property stsUrlProp = new Property();
stsUrlProp.setName(IdentityApplicationConstants.Authenticator.WSTrust.IDENTITY_PROVIDER_URL);
stsUrlProp.setValue(stsUrl);
propertiesList.add(stsUrlProp);
}
stsFedAuthn
.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(stsFedAuthn);
List<IdentityProviderProperty> identityProviderProperties = new ArrayList<IdentityProviderProperty>();
FederatedAuthenticatorConfig sessionTimeoutConfig = new FederatedAuthenticatorConfig();
sessionTimeoutConfig.setName(IdentityApplicationConstants.NAME);
propertiesList = new ArrayList<Property>(Arrays.asList(sessionTimeoutConfig.getProperties()));
Property cleanUpPeriodProp = new Property();
cleanUpPeriodProp.setName(IdentityApplicationConstants.CLEAN_UP_PERIOD);
String cleanUpPeriod = IdentityUtil.getProperty(IdentityConstants.ServerConfig.CLEAN_UP_PERIOD);
if (StringUtils.isBlank(cleanUpPeriod)) {
cleanUpPeriod = IdentityApplicationConstants.CLEAN_UP_PERIOD_DEFAULT;
} else if (!StringUtils.isNumeric(cleanUpPeriod)) {
log.warn("PersistanceCleanUpPeriod in identity.xml should be a numeric value");
cleanUpPeriod = IdentityApplicationConstants.CLEAN_UP_PERIOD_DEFAULT;
}
cleanUpPeriodProp.setValue(cleanUpPeriod);
propertiesList.add(cleanUpPeriodProp);
sessionTimeoutConfig.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
fedAuthnCofigs.add(sessionTimeoutConfig);
identityProvider.setFederatedAuthenticatorConfigs(fedAuthnCofigs
.toArray(new FederatedAuthenticatorConfig[fedAuthnCofigs.size()]));
ProvisioningConnectorConfig scimProvConn = IdentityApplicationManagementUtil
.getProvisioningConnector(identityProvider.getProvisioningConnectorConfigs(),
"scim");
if (scimProvConn == null) {
scimProvConn = new ProvisioningConnectorConfig();
scimProvConn.setName("scim");
}
propertiesList = new ArrayList<>(Arrays.asList(scimProvConn.getProvisioningProperties()));
Property scimUserEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM.USERS_EP_URL);
if (scimUserEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM.USERS_EP_URL);
property.setValue(scimUsersEndpoint);
propertiesList.add(property);
} else if (!scimUsersEndpoint.equalsIgnoreCase(scimUserEndpointProperty.getValue())) {
scimUserEndpointProperty.setValue(scimUsersEndpoint);
}
Property scimGroupEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM.GROUPS_EP_URL);
if (scimGroupEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM.GROUPS_EP_URL);
property.setValue(scimGroupsEndpoint);
propertiesList.add(property);
} else if (!scimGroupsEndpoint.equalsIgnoreCase(scimGroupEndpointProperty.getValue())) {
scimGroupEndpointProperty.setValue(scimGroupsEndpoint);
}
Property scim2UserEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM2.USERS_EP_URL);
if (scim2UserEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM2.USERS_EP_URL);
property.setValue(scim2UsersEndpoint);
propertiesList.add(property);
} else if (!scim2UsersEndpoint.equalsIgnoreCase(scim2UserEndpointProperty.getValue())) {
scim2UserEndpointProperty.setValue(scim2UsersEndpoint);
}
Property scim2GroupEndpointProperty = IdentityApplicationManagementUtil.getProperty(scimProvConn
.getProvisioningProperties(), IdentityApplicationConstants.SCIM2.GROUPS_EP_URL);
if (scim2GroupEndpointProperty == null) {
Property property = new Property();
property.setName(IdentityApplicationConstants.SCIM2.GROUPS_EP_URL);
property.setValue(scim2GroupsEndpoint);
propertiesList.add(property);
} else if (!scim2GroupsEndpoint.equalsIgnoreCase(scim2GroupEndpointProperty.getValue())) {
scim2GroupEndpointProperty.setValue(scim2GroupsEndpoint);
}
scimProvConn.setProvisioningProperties(propertiesList.toArray(new Property[propertiesList.size()]));
identityProvider.setProvisioningConnectorConfigs(new ProvisioningConnectorConfig[]{scimProvConn});
return identityProvider;
}
private String buildSAMLUrl(String urlFromConfigFile, String tenantDomain, String defaultContext,
boolean appendTenantDomainInLegacyMode) throws IdentityProviderManagementException {
String url = urlFromConfigFile;
if (StringUtils.isBlank(url)) {
// Now we need to build the URL based on the default context.
try {
url = ServiceURLBuilder.create().addPath(defaultContext).build().getAbsolutePublicURL();
} catch (URLBuilderException ex) {
throw new IdentityProviderManagementException("Error while building URL for context: "
+ defaultContext + " for tenantDomain: " + tenantDomain, ex);
}
}
//Should not append the tenant domain as a query parameter if it is super tenant.
if (appendTenantDomainInLegacyMode && isNotSuperTenant(tenantDomain)) {
Map<String, String[]> queryParams = new HashMap<>();
queryParams.put(MultitenantConstants.TENANT_DOMAIN, new String[] {tenantDomain});
try {
url = IdentityUtil.buildQueryUrl(url, queryParams);
} catch (UnsupportedEncodingException e) {
throw new IdentityProviderManagementException("Error while building URL for context: "
+ defaultContext + " for tenantDomain: " + tenantDomain, e);
}
}
return resolveAbsoluteURL(defaultContext, url);
}
private boolean isNotSuperTenant(String tenantDomain) {
return !StringUtils.equals(tenantDomain, MultitenantConstants.SUPER_TENANT_DOMAIN_NAME);
}
private FederatedAuthenticatorConfig buildSAMLProperties(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
String samlSSOUrl = buildSAMLUrl(IdentityUtil.getProperty(IdentityConstants.ServerConfig.SSO_IDP_URL),
tenantDomain, IdPManagementConstants.SAMLSSO, true);
String samlLogoutUrl = buildSAMLUrl(IdentityUtil.getProperty(IdentityConstants.ServerConfig.SSO_IDP_URL),
tenantDomain, IdPManagementConstants.SAMLSSO, true);
String samlECPUrl = buildSAMLUrl(IdentityUtil.getProperty(IdentityConstants.ServerConfig.SAML_ECP_URL),
tenantDomain, IdPManagementConstants.SAML_ECP_URL, true);
String samlArtifactUrl = buildSAMLUrl(IdentityUtil.getProperty(IdentityConstants.ServerConfig.SSO_ARTIFACT_URL),
tenantDomain, IdPManagementConstants.SSO_ARTIFACT_URL, false);
FederatedAuthenticatorConfig samlFederatedAuthConfig = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
if (samlFederatedAuthConfig == null) {
samlFederatedAuthConfig = new FederatedAuthenticatorConfig();
samlFederatedAuthConfig.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
}
List<Property> propertiesList = new ArrayList<>();
Property samlSSOUrlProperty = resolveFedAuthnProperty(samlSSOUrl, samlFederatedAuthConfig,
IdentityApplicationConstants.Authenticator.SAML2SSO.SSO_URL);
propertiesList.add(samlSSOUrlProperty);
Property samlLogoutUrlProperty = resolveFedAuthnProperty(samlLogoutUrl, samlFederatedAuthConfig,
IdentityApplicationConstants.Authenticator.SAML2SSO.LOGOUT_REQ_URL);
propertiesList.add(samlLogoutUrlProperty);
Property samlECPUrlProperty = resolveFedAuthnProperty(samlECPUrl, samlFederatedAuthConfig,
IdentityApplicationConstants.Authenticator.SAML2SSO.ECP_URL);
propertiesList.add(samlECPUrlProperty);
Property samlArtifactUrlProperty = resolveFedAuthnProperty(samlArtifactUrl, samlFederatedAuthConfig,
IdentityApplicationConstants.Authenticator.SAML2SSO.ARTIFACT_RESOLVE_URL);
propertiesList.add(samlArtifactUrlProperty);
Property idPEntityIdProperty =
IdentityApplicationManagementUtil.getProperty(samlFederatedAuthConfig.getProperties(),
IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID);
if (idPEntityIdProperty == null) {
idPEntityIdProperty = new Property();
idPEntityIdProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID);
idPEntityIdProperty.setValue(IdPManagementUtil.getResidentIdPEntityId());
}
propertiesList.add(idPEntityIdProperty);
if (IdentityTenantUtil.isTenantQualifiedUrlsEnabled()) {
// Add SSO URL as a destination URL if not already available.
addSSOUrlAsDestinationUrl(samlFederatedAuthConfig, samlSSOUrl, propertiesList);
}
for (Property property : samlFederatedAuthConfig.getProperties()) {
if (property != null &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.SSO_URL.equals(property.getName()) &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.LOGOUT_REQ_URL.equals(property.getName()) &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.ECP_URL.equals(property.getName()) &&
!IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(property.getName())) {
propertiesList.add(property);
}
}
Property samlMetadataValidityPeriodProperty =
IdentityApplicationManagementUtil.getProperty(samlFederatedAuthConfig.
getProperties(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD);
if (samlMetadataValidityPeriodProperty == null) {
samlMetadataValidityPeriodProperty = new Property();
samlMetadataValidityPeriodProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD);
samlMetadataValidityPeriodProperty.setValue(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT);
}
propertiesList.add(samlMetadataValidityPeriodProperty);
Property samlMetadataSigningEnabledProperty =
IdentityApplicationManagementUtil.getProperty(samlFederatedAuthConfig.
getProperties(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED);
if (samlMetadataSigningEnabledProperty == null) {
samlMetadataSigningEnabledProperty = new Property();
samlMetadataSigningEnabledProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED);
samlMetadataSigningEnabledProperty.setValue(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED_DEFAULT);
}
propertiesList.add(samlMetadataSigningEnabledProperty);
Property samlAuthnRequestSigningProperty =
IdentityApplicationManagementUtil.getProperty(samlFederatedAuthConfig.
getProperties(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_ENABLED);
if (samlAuthnRequestSigningProperty == null) {
samlAuthnRequestSigningProperty = new Property();
samlAuthnRequestSigningProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_ENABLED);
samlAuthnRequestSigningProperty.setValue(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_DEFAULT);
}
propertiesList.add(samlAuthnRequestSigningProperty);
samlFederatedAuthConfig.setProperties(propertiesList.toArray(new Property[propertiesList.size()]));
return samlFederatedAuthConfig;
}
private void addSSOUrlAsDestinationUrl(FederatedAuthenticatorConfig federatedAuthenticatorConfig,
String ssoUrl,
List<Property> propertiesList) {
// First find the available configured destination URLs.
List<Property> destinationURLs = Arrays.stream(federatedAuthenticatorConfig.getProperties())
.filter(property -> property.getName()
.startsWith(IdentityApplicationConstants.Authenticator.SAML2SSO.DESTINATION_URL_PREFIX))
.collect(Collectors.toList());
// Check whether the SSO URL is already available as a destination URL
boolean isSAMLSSOUrlNotPresentAsDestination = destinationURLs.stream()
.noneMatch(x -> StringUtils.equals(ssoUrl, x.getValue()));
if (isSAMLSSOUrlNotPresentAsDestination) {
// There are no destination properties matching the default SSO URL.
int propertyNameIndex = destinationURLs.size() + 1;
Property destinationURLProperty = buildDestinationURLProperty(ssoUrl, propertyNameIndex);
propertiesList.add(destinationURLProperty);
}
}
private Property buildDestinationURLProperty(String destinationURL, int index) {
Property destinationURLProperty = new Property();
destinationURLProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.DESTINATION_URL_PREFIX +
IdentityApplicationConstants.MULTIVALUED_PROPERTY_CHARACTER + index);
destinationURLProperty.setValue(destinationURL);
return destinationURLProperty;
}
private Property resolveFedAuthnProperty(String epUrl, FederatedAuthenticatorConfig fedAuthnConfig,
String propertyName) {
Property property =
IdentityApplicationManagementUtil.getProperty(fedAuthnConfig.getProperties(), propertyName);
if (property == null || IdentityTenantUtil.isTenantQualifiedUrlsEnabled()) {
// In tenant qualified mode we have to always give send the calculated URL and not the value stored in DB.
property = new Property();
property.setName(propertyName);
// Set the calculated SAML endpoint URL.
property.setValue(epUrl);
}
return property;
}
/**
* Add Resident Identity provider for a given tenant.
*
* @param identityProvider <code>IdentityProvider</code>
* @param tenantDomain Tenant domain whose resident IdP is requested
* @throws IdentityProviderManagementException Error when adding Resident Identity Provider
*/
@Override
public void addResidentIdP(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
// invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreAddResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
if (identityProvider.getFederatedAuthenticatorConfigs() == null) {
identityProvider.setFederatedAuthenticatorConfigs(new FederatedAuthenticatorConfig[0]);
}
FederatedAuthenticatorConfig saml2SSOResidentAuthenticatorConfig = IdentityApplicationManagementUtil
.getFederatedAuthenticator(identityProvider.getFederatedAuthenticatorConfigs(),
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
if (saml2SSOResidentAuthenticatorConfig == null) {
saml2SSOResidentAuthenticatorConfig = new FederatedAuthenticatorConfig();
saml2SSOResidentAuthenticatorConfig.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.NAME);
}
if (saml2SSOResidentAuthenticatorConfig.getProperties() == null) {
saml2SSOResidentAuthenticatorConfig.setProperties(new Property[0]);
}
boolean idPEntityIdAvailable = false;
for (Property property : saml2SSOResidentAuthenticatorConfig.getProperties()) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(property.getName())) {
idPEntityIdAvailable = true;
}
}
if (!idPEntityIdAvailable) {
Property property = new Property();
property.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID);
property.setValue(IdPManagementUtil.getResidentIdPEntityId());
if (saml2SSOResidentAuthenticatorConfig.getProperties().length > 0) {
List<Property> properties = Arrays.asList(saml2SSOResidentAuthenticatorConfig.getProperties());
properties.add(property);
saml2SSOResidentAuthenticatorConfig.setProperties((Property[]) properties.toArray());
} else {
saml2SSOResidentAuthenticatorConfig.setProperties(new Property[]{property});
}
}
Property samlMetadataValidityPeriodProperty = new Property();
String samlMetadataValidityPeriod = IdentityUtil.getProperty(IdentityConstants.ServerConfig.
SAML_METADATA_VALIDITY_PERIOD);
if (StringUtils.isBlank(samlMetadataValidityPeriod)) {
samlMetadataValidityPeriod = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT;
} else if (!StringUtils.isNumeric(samlMetadataValidityPeriod) ||
Integer.parseInt(samlMetadataValidityPeriod) <= 0) {
log.warn("SAMLMetadataValidityPeriod in identity.xml should be a numeric value " +
"hence defaulting to value: " + IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT + "m");
samlMetadataValidityPeriod = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD_DEFAULT;
}
samlMetadataValidityPeriodProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD);
samlMetadataValidityPeriodProperty.setValue(samlMetadataValidityPeriod);
Property samlMetadataSigningEnabledProperty = new Property();
String samlMetadataSigningEnabled = IdentityUtil.getProperty(IdentityConstants.ServerConfig.
SAML_METADATA_SIGNING_ENABLED);
if (StringUtils.isBlank(samlMetadataSigningEnabled)) {
log.warn("SAMLMetadataSigningEnabled in identity.xml should be a boolean value");
samlMetadataSigningEnabled = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED_DEFAULT;
}
samlMetadataSigningEnabledProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED);
samlMetadataSigningEnabledProperty.setValue(samlMetadataSigningEnabled);
Property samlAuthnRequestSigningProperty = new Property();
String samlAuthnRequestSigningEnabled = IdentityUtil.getProperty(IdentityConstants.ServerConfig.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_ENABLED);
if (StringUtils.isBlank(samlAuthnRequestSigningEnabled)) {
log.warn("samlAuthnRequestSigningEnabled in identity.xml should be a boolean value");
samlAuthnRequestSigningEnabled = IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_DEFAULT;
}
samlAuthnRequestSigningProperty.setName(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_ENABLED);
samlAuthnRequestSigningProperty.setValue(samlAuthnRequestSigningEnabled);
List<Property> propertyList =
new ArrayList<>(Arrays.asList(saml2SSOResidentAuthenticatorConfig.getProperties()));
propertyList.add(samlMetadataValidityPeriodProperty);
propertyList.add(samlMetadataSigningEnabledProperty);
propertyList.add(samlAuthnRequestSigningProperty);
Property[] properties = new Property[propertyList.size()];
properties = propertyList.toArray(properties);
saml2SSOResidentAuthenticatorConfig.setProperties(properties);
Property oidcProperty = new Property();
oidcProperty.setName(OPENID_IDP_ENTITY_ID);
oidcProperty.setValue(getOIDCResidentIdPEntityId());
FederatedAuthenticatorConfig oidcAuthenticationConfig = new FederatedAuthenticatorConfig();
oidcAuthenticationConfig.setProperties(new Property[]{oidcProperty});
oidcAuthenticationConfig.setName(IdentityApplicationConstants.Authenticator.OIDC.NAME);
Property passiveStsProperty = new Property();
passiveStsProperty.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.IDENTITY_PROVIDER_ENTITY_ID);
passiveStsProperty.setValue(IdPManagementUtil.getResidentIdPEntityId());
FederatedAuthenticatorConfig passiveStsAuthenticationConfig = new FederatedAuthenticatorConfig();
passiveStsAuthenticationConfig.setProperties(new Property[]{passiveStsProperty});
passiveStsAuthenticationConfig.setName(IdentityApplicationConstants.Authenticator.PassiveSTS.NAME);
FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs = {saml2SSOResidentAuthenticatorConfig,
passiveStsAuthenticationConfig, oidcAuthenticationConfig};
identityProvider.setFederatedAuthenticatorConfigs(IdentityApplicationManagementUtil
.concatArrays(identityProvider.getFederatedAuthenticatorConfigs(), federatedAuthenticatorConfigs));
IdentityProviderProperty[] idpProperties = new IdentityProviderProperty[2];
IdentityProviderProperty rememberMeTimeoutProperty = new IdentityProviderProperty();
String rememberMeTimeout = IdentityUtil.getProperty(IdentityConstants.ServerConfig.REMEMBER_ME_TIME_OUT);
if (StringUtils.isBlank(rememberMeTimeout) || !StringUtils.isNumeric(rememberMeTimeout) ||
Integer.parseInt(rememberMeTimeout) <= 0) {
log.warn("RememberMeTimeout in identity.xml should be a numeric value");
rememberMeTimeout = IdentityApplicationConstants.REMEMBER_ME_TIME_OUT_DEFAULT;
}
rememberMeTimeoutProperty.setName(IdentityApplicationConstants.REMEMBER_ME_TIME_OUT);
rememberMeTimeoutProperty.setValue(rememberMeTimeout);
IdentityProviderProperty sessionIdletimeOutProperty = new IdentityProviderProperty();
String idleTimeout = IdentityUtil.getProperty(IdentityConstants.ServerConfig.SESSION_IDLE_TIMEOUT);
if (StringUtils.isBlank(idleTimeout) || !StringUtils.isNumeric(idleTimeout) ||
Integer.parseInt(idleTimeout) <= 0) {
log.warn("SessionIdleTimeout in identity.xml should be a numeric value");
idleTimeout = IdentityApplicationConstants.SESSION_IDLE_TIME_OUT_DEFAULT;
}
sessionIdletimeOutProperty.setName(IdentityApplicationConstants.SESSION_IDLE_TIME_OUT);
sessionIdletimeOutProperty.setValue(idleTimeout);
idpProperties[0] = rememberMeTimeoutProperty;
idpProperties[1] = sessionIdletimeOutProperty;
identityProvider.setIdpProperties(idpProperties);
dao.addIdP(identityProvider, IdentityTenantUtil.getTenantId(tenantDomain), tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostAddResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
}
/**
* Update Resident Identity provider for a given tenant.
*
* @param identityProvider <code>IdentityProvider</code>
* @param tenantDomain Tenant domain whose resident IdP is requested
* @throws IdentityProviderManagementException Error when updating Resident Identity Provider
*/
@Override
public void updateResidentIdP(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProvider residentIdp = dao.getIdPByName(null, IdentityApplicationConstants.RESIDENT_IDP_RESERVED_NAME,
IdentityTenantUtil.getTenantId(tenantDomain), tenantDomain);
Map<String, String> configurationDetails = new HashMap<>();
for (IdentityProviderProperty property : identityProvider.getIdpProperties()) {
configurationDetails.put(property.getName(), property.getValue());
}
IdentityProviderProperty[] identityMgtProperties = residentIdp.getIdpProperties();
List<IdentityProviderProperty> newProperties = new ArrayList<>();
for (IdentityProviderProperty identityMgtProperty : identityMgtProperties) {
IdentityProviderProperty prop = new IdentityProviderProperty();
String key = identityMgtProperty.getName();
prop.setName(key);
if (configurationDetails.containsKey(key)) {
prop.setValue(configurationDetails.get(key));
} else {
prop.setValue(identityMgtProperty.getValue());
}
newProperties.add(prop);
configurationDetails.remove(key);
}
for (Map.Entry<String, String> entry : configurationDetails.entrySet()) {
IdentityProviderProperty prop = new IdentityProviderProperty();
prop.setName(entry.getKey());
prop.setValue(entry.getValue());
newProperties.add(prop);
}
identityProvider.setIdpProperties(newProperties.toArray(new IdentityProviderProperty[newProperties.size()]));
for (IdentityProviderProperty idpProp : identityProvider.getIdpProperties()) {
if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.SESSION_IDLE_TIME_OUT)) {
if (StringUtils.isBlank(idpProp.getValue()) || !StringUtils.isNumeric(idpProp.getValue()) ||
Integer.parseInt(idpProp.getValue().trim()) <= 0) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.SESSION_IDLE_TIME_OUT
+ " of ResidentIdP should be a numeric value greater than 0 ");
}
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.REMEMBER_ME_TIME_OUT)) {
if (StringUtils.isBlank(idpProp.getValue()) || !StringUtils.isNumeric(idpProp.getValue()) ||
Integer.parseInt(idpProp.getValue().trim()) <= 0) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.REMEMBER_ME_TIME_OUT
+ " of ResidentIdP should be a numeric value greater than 0 ");
}
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD)) {
if (StringUtils.isBlank(idpProp.getValue()) || !StringUtils.isNumeric(idpProp.getValue()) ||
Integer.parseInt(idpProp.getValue().trim()) <= 0) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_VALIDITY_PERIOD +
" of ResidentIdP should be a numeric value greater than 0 ");
}
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED)) {
if (StringUtils.isBlank(idpProp.getValue())) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_SIGNING_ENABLED + " of ResidentIdP should be a boolean value ");
} else if (StringUtils.equals(idpProp.getName(), IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_ENABLED)) {
if (StringUtils.isBlank(idpProp.getValue())) {
throw new IdentityProviderManagementException(IdentityApplicationConstants.Authenticator.SAML2SSO.
SAML_METADATA_AUTHN_REQUESTS_SIGNING_ENABLED + " of ResidentIdP should be a boolean value ");
}
}
}
}
// invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreUpdateResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
if (identityProvider.getFederatedAuthenticatorConfigs() == null) {
identityProvider.setFederatedAuthenticatorConfigs(new FederatedAuthenticatorConfig[0]);
}
IdentityProvider currentIdP = IdentityProviderManager.getInstance().getIdPByName(
IdentityApplicationConstants.RESIDENT_IDP_RESERVED_NAME, tenantDomain, true);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
validateUpdateOfIdPEntityId(currentIdP.getFederatedAuthenticatorConfigs(),
identityProvider.getFederatedAuthenticatorConfigs(), tenantId, tenantDomain);
dao.updateIdP(identityProvider, currentIdP, tenantId, tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostUpdateResidentIdP(identityProvider, tenantDomain)) {
return;
}
}
}
/**
* Retrieves registered Identity finally {
* break;
* }providers for a given tenant
*
* @param tenantDomain Tenant domain whose IdP names are requested
* @return Set of <code>IdentityProvider</code>. IdP names, primary IdP and home realm
* identifiers of each IdP
* @throws IdentityProviderManagementException Error when getting list of Identity Providers
*/
@Override
public List<IdentityProvider> getIdPs(String tenantDomain)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
return dao.getIdPs(null, tenantId, tenantDomain);
}
/**
* Get all basic identity provider information.
*
* @param limit limit per page.
* @param offset offset value.
* @param filter filter value for IdP search.
* @param sortOrder order of IdP ASC/DESC.
* @param sortBy the column value need to sort.
* @param tenantDomain tenant domain whose IdP names are requested.
* @return Identity Provider's Basic Information array {@link IdpSearchResult}.
* @throws IdentityProviderManagementException Server/client related error when getting list of Identity Providers.
* @deprecated use {@link #getIdPs(Integer, Integer, String, String, String, String, List)}
*/
@Override
public IdpSearchResult getIdPs(Integer limit, Integer offset, String filter, String sortOrder, String sortBy,
String tenantDomain)
throws IdentityProviderManagementException {
return getIdPs(limit, offset, filter, sortOrder, sortBy, tenantDomain, new ArrayList<>());
}
/**
* Get all identity provider's Basic information along with additionally requested information depending on the
* requiredAttributes.
*
* @param limit Limit per page.
* @param offset Offset value.
* @param filter Filter value for IdP search.
* @param sortOrder Order of IdP ASC/DESC.
* @param sortBy The column value need to sort.
* @param tenantDomain TenantDomain of the user.
* @param requiredAttributes Required attributes which needs to be return.
* @return Identity Provider's Basic Information array along with requested attribute
* information{@link IdpSearchResult}.
* @throws IdentityProviderManagementException Server/client related error when getting list of Identity Providers.
*/
@Override
public IdpSearchResult getIdPs(Integer limit, Integer offset, String filter, String sortOrder, String sortBy,
String tenantDomain, List<String> requiredAttributes)
throws IdentityProviderManagementException {
IdpSearchResult result = new IdpSearchResult();
List<ExpressionNode> expressionNodes = getExpressionNodes(filter);
setParameters(limit, offset, sortOrder, sortBy, filter, result);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
result.setTotalIDPCount(dao.getTotalIdPCount(tenantId, expressionNodes));
result.setIdpList(dao.getPaginatedIdPsSearch(tenantId, expressionNodes, result.getLimit(), result.getOffSet(),
result.getSortOrder(), result.getSortBy(), requiredAttributes));
return result;
}
/**
* Get all basic identity provider information.
*
* @param filter filter value for IdP search.
* @param tenantDomain tenant domain whose IdP names are requested.
* @return filtered idp count.
* @throws IdentityProviderManagementException Error while getting Identity Providers count.IdentityProviderManagementService.java
*/
@Override
public int getTotalIdPCount(String filter, String tenantDomain) throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
List<ExpressionNode> expressionNodes = getExpressionNodes(filter);
return dao.getTotalIdPCount(tenantId, expressionNodes);
}
/**
* Get the filter node as a list.
*
* @param filter value of the filter.
* @return node tree.
* @throws IdentityProviderManagementClientException Error when validate filters.
*/
private List<ExpressionNode> getExpressionNodes(String filter) throws IdentityProviderManagementClientException {
// Filter example : name sw "te" and name ew "st" and isEnabled eq "true".
List<ExpressionNode> expressionNodes = new ArrayList<>();
FilterTreeBuilder filterTreeBuilder;
try {
if (StringUtils.isNotBlank(filter)) {
filterTreeBuilder = new FilterTreeBuilder(filter);
Node rootNode = filterTreeBuilder.buildTree();
setExpressionNodeList(rootNode, expressionNodes);
}
} catch (IOException | IdentityException e) {
String message = "Error occurred while validate filter, filter: " + filter;
diagnosticLog.error(message + ". Error message: " + e.getMessage());
throw IdPManagementUtil
.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP, message, e);
}
return expressionNodes;
}
/**
* Set the node values as list of expression.
*
* @param node filter node.
* @param expression list of expression.
* @throws IdentityProviderManagementClientException Error when passing invalid filter.
*/
private void setExpressionNodeList(Node node, List<ExpressionNode> expression)
throws IdentityProviderManagementClientException {
if (node instanceof ExpressionNode) {
if (StringUtils.isNotBlank(((ExpressionNode) node).getAttributeValue())) {
if (((ExpressionNode) node).getAttributeValue().contains(IdPManagementConstants.IDP_IS_ENABLED)) {
if ("true".contains(((ExpressionNode) node).getValue())) {
((ExpressionNode) node).setValue(IdPManagementConstants.IS_TRUE_VALUE);
} else if ("false".contains(((ExpressionNode) node).getValue())) {
((ExpressionNode) node).setValue(IdPManagementConstants.IS_FALSE_VALUE);
} else {
String message = "Invalid value: " + ((ExpressionNode) node).getValue() + "is passed for " +
"'isEnabled' attribute in the filter. It should be 'true' or 'false'";
throw IdPManagementUtil
.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
}
}
expression.add((ExpressionNode) node);
} else if (node instanceof OperationNode) {
setExpressionNodeList(node.getLeftNode(), expression);
setExpressionNodeList(node.getRightNode(), expression);
}
}
/**
* Set the passing parameters as result.
*
* @param limit page limit.
* @param offset offset value.
* @param filter filter value for IdP search.
* @param sortOrder order of IdP(ASC/DESC).
* @param sortBy the column value need to sort.
* @param result result object.
* @throws IdentityProviderManagementClientException Error while set offset.
*/
private void setParameters(Integer limit, Integer offset, String filter, String sortOrder, String sortBy,
IdpSearchResult result) throws IdentityProviderManagementClientException {
result.setLimit(validateLimit(limit));
result.setOffSet(validateOffset(offset));
result.setSortBy(validateSortBy(sortBy));
result.setSortOrder(validateSortOrder(sortOrder));
result.setFilter(filter);
}
/**
* Validate sortBy.
*
* @param sortBy sortBy attribute.
* @return Validated sortOrder and sortBy.
*/
private String validateSortBy(String sortBy) {
if (StringUtils.isBlank(sortBy)) {
if (log.isDebugEnabled()) {
log.debug("sortBy attribute is empty. Therefore we set the default sortBy attribute. sortBy" +
IdPManagementConstants.DEFAULT_SORT_BY);
}
diagnosticLog.info("sortBy attribute is empty. Therefore we set the default sortBy attribute. sortBy" +
IdPManagementConstants.DEFAULT_SORT_BY);
return IdPManagementConstants.DEFAULT_SORT_BY;
}
switch (sortBy) {
case IdPManagementConstants.IDP_NAME:
sortBy = IdPManagementConstants.NAME;
break;
case IdPManagementConstants.IDP_HOME_REALM_ID:
sortBy = IdPManagementConstants.HOME_REALM_ID;
break;
default:
sortBy = IdPManagementConstants.DEFAULT_SORT_BY;
if (log.isDebugEnabled()) {
log.debug("sortBy attribute is incorrect. Therefore we set the default sortBy attribute. " +
"sortBy: " + IdPManagementConstants.DEFAULT_SORT_BY);
}
diagnosticLog.info("sortBy attribute is incorrect. Therefore we set the default sortBy attribute. " +
"sortBy: " + IdPManagementConstants.DEFAULT_SORT_BY);
break;
}
return sortBy;
}
/**
* Validate sortOrder.
*
* @param sortOrder sortOrder ASC/DESC.
* @return Validated sortOrder and sortBy.
*/
private String validateSortOrder(String sortOrder) {
if (StringUtils.isBlank(sortOrder)) {
sortOrder = IdPManagementConstants.DEFAULT_SORT_ORDER;
if (log.isDebugEnabled()) {
log.debug("sortOrder is empty. Therefore we set the default sortOrder value as ASC. SortOrder: " +
sortOrder);
}
diagnosticLog.info("sortOrder is empty. Therefore we set the default sortOrder value as ASC. SortOrder: " +
sortOrder);
} else if (sortOrder.equals(IdPManagementConstants.DESC_SORT_ORDER)) {
sortOrder = IdPManagementConstants.DESC_SORT_ORDER;
} else if (sortOrder.equals(IdPManagementConstants.ASC_SORT_ORDER)) {
sortOrder = IdPManagementConstants.ASC_SORT_ORDER;
} else {
sortOrder = IdPManagementConstants.DEFAULT_SORT_ORDER;
if (log.isDebugEnabled()) {
log.debug("sortOrder is incorrect. Therefore we set the default sortOrder value as ASC. SortOrder: "
+ sortOrder);
}
diagnosticLog.info("sortOrder is incorrect. Therefore we set the default sortOrder value as ASC." +
" SortOrder: " + sortOrder);
}
return sortOrder;
}
/**
* Validate limit.
*
* @param limit given limit value.
* @return validated limit and offset value.
*/
private int validateLimit(Integer limit) throws IdentityProviderManagementClientException {
if (limit == null) {
if (log.isDebugEnabled()) {
log.debug("Given limit is null. Therefore we get the default limit from " +
"identity.xml.");
}
limit = IdentityUtil.getDefaultItemsPerPage();
diagnosticLog.info("Given limit is null. Setting default limit as: " + limit);
}
if (limit < 0) {
String message = "Given limit: " + limit + " is a negative value.";
diagnosticLog.error(message);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
int maximumItemsPerPage = IdentityUtil.getMaximumItemPerPage();
if (limit > maximumItemsPerPage) {
if (log.isDebugEnabled()) {
log.debug("Given limit exceed the maximum limit. Therefore we get the default limit from " +
"identity.xml. limit: " + maximumItemsPerPage);
}
limit = maximumItemsPerPage;
diagnosticLog.info("Given limit exceed the maximum limit. Therefore setting the default maximum value: "
+ limit);
}
return limit;
}
/**
* Validate offset.
*
* @param offset given offset value.
* @return validated limit and offset value.
* @throws IdentityProviderManagementClientException Error while set offset
*/
private int validateOffset(Integer offset) throws IdentityProviderManagementClientException {
if (offset == null) {
// Return first page offset.
offset = 0;
}
if (offset < 0) {
String message = "Invalid offset applied. Offset should not negative. offSet: " +
offset;
diagnosticLog.error(message);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage.ERROR_CODE_RETRIEVE_IDP,
message);
}
return offset;
}
/**
* Retrieves registered Identity finally {
* break;
* }providers for a given tenant
*
* @param tenantDomain Tenant domain whose IdP names are requested
* @return Set of <code>IdentityProvider</code>. IdP names, primary IdP and home realm
* identifiers of each IdP
* @throws IdentityProviderManagementException Error when getting list of Identity Providers
*/
@Override
public List<IdentityProvider> getIdPsSearch(String tenantDomain, String filter)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
return dao.getIdPsSearch(null, tenantId, tenantDomain, filter);
}
/**
* Retrieves registered Enabled Identity providers for a given tenant
*
* @param tenantDomain Tenant domain whose IdP names are requested
* @return Set of <code>IdentityProvider</code>. IdP names, primary IdP and home realm
* identifiers of each IdP
* @throws IdentityProviderManagementException Error when getting list of Identity Providers
*/
@Override
public List<IdentityProvider> getEnabledIdPs(String tenantDomain)
throws IdentityProviderManagementException {
List<IdentityProvider> enabledIdentityProviders = new ArrayList<IdentityProvider>();
List<IdentityProvider> identityProviers = getIdPs(tenantDomain);
for (IdentityProvider idp : identityProviers) {
if (idp.isEnable()) {
enabledIdentityProviders.add(idp);
}
}
return enabledIdentityProviders;
}
/**
* @param idPName
* @param tenantDomain
* @param ignoreFileBasedIdps
* @return
* @throws IdentityProviderManagementException
*/
@Override
public IdentityProvider getIdPByName(String idPName, String tenantDomain,
boolean ignoreFileBasedIdps) throws IdentityProviderManagementException {
diagnosticLog.info("Retrieving IDP by name for IDP name: " + idPName);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (!ignoreFileBasedIdps) {
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
}
return identityProvider;
}
@Override
public IdentityProvider getIdPById(String id, String tenantDomain,
boolean ignoreFileBasedIdps) throws IdentityProviderManagementException {
diagnosticLog.info("Retrieving IDP by ID: " + id);
if (StringUtils.isEmpty(id)) {
String msg = "Invalid argument: Identity Provider ID value is empty";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
Integer intId;
IdentityProvider identityProvider = null;
try {
intId = Integer.parseInt(id);
identityProvider = dao.getIdPById(null, intId, tenantId, tenantDomain);
} catch (NumberFormatException e) {
// Ignore this.
}
if (!ignoreFileBasedIdps) {
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(id, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
}
return identityProvider;
}
@Override
public IdentityProvider getIdPByResourceId(String resourceId, String tenantDomain, boolean
ignoreFileBasedIdps) throws IdentityProviderManagementException {
validateGetIdPInputValues(resourceId);
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
return dao.getIdPByResourceId(resourceId, tenantId, tenantDomain);
}
@Override
public String getIdPNameByResourceId(String resourceId) throws IdentityProviderManagementException {
validateGetIdPInputValues(resourceId);
return dao.getIdPNameByResourceId(resourceId);
}
/**
* @param idPName
* @param tenantDomain
* @param ignoreFileBasedIdps
* @return
* @throws IdentityProviderManagementException
*/
@Override
public IdentityProvider getEnabledIdPByName(String idPName, String tenantDomain,
boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
IdentityProvider idp = getIdPByName(idPName, tenantDomain, ignoreFileBasedIdps);
if (idp != null && idp.isEnable()) {
return idp;
}
return null;
}
/**
* Retrieves Identity provider information about a given tenant by Identity Provider name
*
* @param idPName Unique name of the Identity provider of whose information is requested
* @param tenantDomain Tenant domain whose information is requested
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP name
*/
@Override
public IdentityProvider getIdPByName(String idPName, String tenantDomain)
throws IdentityProviderManagementException {
return getIdPByName(idPName, tenantDomain, false);
}
@Override
public IdentityProvider getIdPById(String id, String tenantDomain) throws IdentityProviderManagementException {
return getIdPById(id, tenantDomain, false);
}
/**
* @param property IDP authenticator property (E.g.: IdPEntityId)
* @param value Value associated with given Property
* @param tenantDomain
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by authenticator property value
*/
@Override
public IdentityProvider getIdPByAuthenticatorPropertyValue(String property, String value, String tenantDomain,
boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(property) || StringUtils.isEmpty(value)) {
String msg = "Invalid argument: Authenticator property or property value is empty";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByAuthenticatorPropertyValue(
null, property, value, tenantId, tenantDomain);
if (identityProvider == null && !ignoreFileBasedIdps) {
identityProvider = new FileBasedIdPMgtDAO()
.getIdPByAuthenticatorPropertyValue(property, value, tenantDomain);
}
return identityProvider;
}
/**
* @param property IDP authenticator property (E.g.: IdPEntityId)
* @param value Value associated with given Property
* @param tenantDomain
* @param authenticator
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by authenticator property value
*/
public IdentityProvider getIdPByAuthenticatorPropertyValue(String property, String value, String tenantDomain,
String authenticator, boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(property) || StringUtils.isEmpty(value) || StringUtils.isEmpty(authenticator)) {
String msg = "Invalid argument: Authenticator property, property value or authenticator name is empty";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByAuthenticatorPropertyValue(
null, property, value, authenticator, tenantId, tenantDomain);
if (identityProvider == null && !ignoreFileBasedIdps) {
identityProvider = new FileBasedIdPMgtDAO()
.getIdPByAuthenticatorPropertyValue(property, value, tenantDomain, authenticator);
}
return identityProvider;
}
/**
* Retrieves Enabled Identity provider information about a given tenant by Identity Provider name
*
* @param idPName Unique name of the Identity provider of whose information is requested
* @param tenantDomain Tenant domain whose information is requested
* @return <code>IdentityProvider</code> Identity Provider information
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP name
*/
@Override
public IdentityProvider getEnabledIdPByName(String idPName, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProvider idp = getIdPByName(idPName, tenantDomain);
if (idp != null && idp.isEnable()) {
return idp;
}
return null;
}
/**
* Retrieves Identity provider information about a given tenant by realm identifier
*
* @param realmId Unique realm identifier of the Identity provider of whose information is
* requested
* @param tenantDomain Tenant domain whose information is requested
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP home realm identifier
*/
@Override
public IdentityProvider getIdPByRealmId(String realmId, String tenantDomain)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(realmId)) {
String msg = "Invalid argument: Identity Provider Home Realm Identifier value is empty";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByRealmId(realmId, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByRealmId(realmId, tenantDomain);
}
return identityProvider;
}
/**
* Retrieves Enabled Identity provider information about a given tenant by realm identifier
*
* @param realmId Unique realm identifier of the Identity provider of whose information is
* requested
* @param tenantDomain Tenant domain whose information is requested
* @throws IdentityProviderManagementException Error when getting Identity Provider
* information by IdP home realm identifier
*/
@Override
public IdentityProvider getEnabledIdPByRealmId(String realmId, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProvider idp = getIdPByRealmId(realmId, tenantDomain);
if (idp != null && idp.isEnable()) {
return idp;
}
return null;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given IdP claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param idPClaimURIs IdP claim URIs which need to be mapped to tenant's local claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Set<ClaimMapping> getMappedLocalClaims(String idPName, String tenantDomain,
List<String> idPClaimURIs) throws
IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
ClaimConfig claimConfiguration = identityProvider.getClaimConfig();
if (claimConfiguration != null) {
ClaimMapping[] claimMappings = claimConfiguration.getClaimMappings();
if (claimMappings != null && claimMappings.length > 0 && idPClaimURIs != null) {
Set<ClaimMapping> returnSet = new HashSet<ClaimMapping>();
for (String idpClaim : idPClaimURIs) {
for (ClaimMapping claimMapping : claimMappings) {
if (claimMapping.getRemoteClaim().getClaimUri().equals(idpClaim)) {
returnSet.add(claimMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<ClaimMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given IdP claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param idPClaimURIs IdP claim URIs which need to be mapped to tenant's local claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Map<String, String> getMappedLocalClaimsMap(String idPName, String tenantDomain,
List<String> idPClaimURIs) throws
IdentityProviderManagementException {
Set<ClaimMapping> claimMappings = getMappedLocalClaims(idPName, tenantDomain, idPClaimURIs);
Map<String, String> returnMap = new HashMap<String, String>();
for (ClaimMapping claimMapping : claimMappings) {
returnMap.put(claimMapping.getRemoteClaim().getClaimUri(), claimMapping.getLocalClaim()
.getClaimUri());
}
return returnMap;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given local claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param localClaimURIs Local claim URIs which need to be mapped to IdP's claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Set<ClaimMapping> getMappedIdPClaims(String idPName, String tenantDomain,
List<String> localClaimURIs) throws
IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
ClaimConfig claimConfiguration = identityProvider.getClaimConfig();
if (claimConfiguration != null) {
ClaimMapping[] claimMappings = claimConfiguration.getClaimMappings();
if (claimMappings != null && claimMappings.length > 0 && localClaimURIs != null) {
Set<ClaimMapping> returnSet = new HashSet<ClaimMapping>();
for (String localClaimURI : localClaimURIs) {
for (ClaimMapping claimMapping : claimMappings) {
if (claimMapping.getLocalClaim().getClaimUri().equals(localClaimURI)) {
returnSet.add(claimMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<ClaimMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique Name of the IdP to which the given local claim URIs need to be mapped
* @param tenantDomain The tenant domain of whose local claim URIs to be mapped
* @param localClaimURIs Local claim URIs which need to be mapped to IdP's claim URIs
* @throws IdentityProviderManagementException Error when getting claim mappings
*/
@Override
public Map<String, String> getMappedIdPClaimsMap(String idPName, String tenantDomain,
List<String> localClaimURIs) throws
IdentityProviderManagementException {
Set<ClaimMapping> claimMappings = getMappedIdPClaims(idPName, tenantDomain, localClaimURIs);
Map<String, String> returnMap = new HashMap<String, String>();
for (ClaimMapping claimMapping : claimMappings) {
returnMap.put(claimMapping.getLocalClaim().getClaimUri(), claimMapping.getRemoteClaim()
.getClaimUri());
}
return returnMap;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given IdP roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles to be mapped
* @param idPRoles IdP roles which need to be mapped to local roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Set<RoleMapping> getMappedLocalRoles(String idPName, String tenantDomain,
String[] idPRoles) throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
PermissionsAndRoleConfig roleConfiguration = identityProvider.getPermissionAndRoleConfig();
if (roleConfiguration != null) {
RoleMapping[] roleMappings = roleConfiguration.getRoleMappings();
if (roleMappings != null && roleMappings.length > 0 && idPRoles != null) {
Set<RoleMapping> returnSet = new HashSet<RoleMapping>();
for (String idPRole : idPRoles) {
for (RoleMapping roleMapping : roleMappings) {
if (roleMapping.getRemoteRole().equals(idPRole)) {
returnSet.add(roleMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<RoleMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given IdP roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles to be mapped
* @param idPRoles IdP roles which need to be mapped to local roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Map<String, LocalRole> getMappedLocalRolesMap(String idPName, String tenantDomain,
String[] idPRoles) throws IdentityProviderManagementException {
Set<RoleMapping> roleMappings = getMappedLocalRoles(idPName, tenantDomain, idPRoles);
Map<String, LocalRole> returnMap = new HashMap<String, LocalRole>();
for (RoleMapping roleMapping : roleMappings) {
returnMap.put(roleMapping.getRemoteRole(), roleMapping.getLocalRole());
}
return returnMap;
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given local roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles need to be mapped
* @param localRoles Local roles which need to be mapped to IdP roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Set<RoleMapping> getMappedIdPRoles(String idPName, String tenantDomain,
LocalRole[] localRoles) throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(idPName)) {
String msg = "Invalid argument: Identity Provider Name value is empty";
throw new IdentityProviderManagementException(msg);
}
IdentityProvider identityProvider = dao.getIdPByName(null, idPName, tenantId, tenantDomain);
if (identityProvider == null) {
identityProvider = new FileBasedIdPMgtDAO().getIdPByName(idPName, tenantDomain);
}
if (identityProvider == null) {
identityProvider = IdPManagementServiceComponent.getFileBasedIdPs().get(
IdentityApplicationConstants.DEFAULT_IDP_CONFIG);
}
PermissionsAndRoleConfig roleConfiguration = identityProvider.getPermissionAndRoleConfig();
if (roleConfiguration != null) {
RoleMapping[] roleMappings = roleConfiguration.getRoleMappings();
if (roleMappings != null && roleMappings.length > 0 && localRoles != null) {
Set<RoleMapping> returnSet = new HashSet<RoleMapping>();
for (LocalRole localRole : localRoles) {
for (RoleMapping roleMapping : roleMappings) {
if (roleMapping.getLocalRole().equals(localRole)) {
returnSet.add(roleMapping);
break;
}
}
}
return returnSet;
}
}
return new HashSet<RoleMapping>();
}
/**
* Retrieves Identity provider information about a given tenant
*
* @param idPName Unique name of the IdP to which the given local roles need to be mapped
* @param tenantDomain The tenant domain of whose local roles need to be mapped
* @param localRoles Local roles which need to be mapped to IdP roles
* @throws IdentityProviderManagementException Error when getting role mappings
*/
@Override
public Map<LocalRole, String> getMappedIdPRolesMap(String idPName, String tenantDomain,
LocalRole[] localRoles) throws
IdentityProviderManagementException {
Set<RoleMapping> roleMappings = getMappedIdPRoles(idPName, tenantDomain, localRoles);
Map<LocalRole, String> returnMap = new HashMap<LocalRole, String>();
for (RoleMapping roleMapping : roleMappings) {
returnMap.put(roleMapping.getLocalRole(), roleMapping.getRemoteRole());
}
return returnMap;
}
/**
* If metadata file is available, creates a new FederatedAuthenticatorConfig from that
*
* @param identityProvider
* @throws IdentityProviderManagementException
*/
private void handleMetadata(int tenantId, IdentityProvider identityProvider)
throws IdentityProviderManagementException {
if (IdpMgtServiceComponentHolder.getInstance().getMetadataConverters().isEmpty()) {
throw new IdentityProviderManagementException("Metadata Converter is not set");
}
FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs =
identityProvider.getFederatedAuthenticatorConfigs();
for (FederatedAuthenticatorConfig federatedAuthenticatorConfig : federatedAuthenticatorConfigs) {
Property[] properties = federatedAuthenticatorConfig.getProperties();
if (ArrayUtils.isNotEmpty(properties)) {
for (Property property : properties) {
if (property != null) {
if (StringUtils.isNotBlank(property.getName()) &&
property.getName().contains(IdPManagementConstants.META_DATA)) {
for (MetadataConverter metadataConverter : IdpMgtServiceComponentHolder.getInstance()
.getMetadataConverters()) {
if (metadataConverter.canHandle(property)) {
try {
if (isMetadataFileExist(identityProvider.getIdentityProviderName(),
property.getValue())) {
try {
metadataConverter.saveMetadataString(tenantId,
identityProvider.getIdentityProviderName(),
federatedAuthenticatorConfig.getName(), property.getValue());
} catch (IdentityProviderManagementException e) {
String data =
"Couldn't save metadata in registry.SAML2SSOMetadataConverter" +
" is not set. ";
throw IdPManagementUtil.handleServerException(
IdPManagementConstants.ErrorMessage.ERROR_CODE_ADD_IDP, data);
}
}
StringBuilder certificate = new StringBuilder();
try {
FederatedAuthenticatorConfig metaFederated = metadataConverter
.getFederatedAuthenticatorConfig(properties, certificate);
String spName = "";
for (Property value : properties) {
if (value != null && IdentityApplicationConstants.Authenticator
.SAML2SSO.SP_ENTITY_ID.equals(value.getName())) {
spName = value.getValue();
}
}
if (spName.equals("")) {
throw new IdentityProviderManagementException("SP name can't be empty");
}
if (metaFederated != null &&
ArrayUtils.isNotEmpty(metaFederated.getProperties())) {
for (int y = 0; y < metaFederated.getProperties().length; y++) {
if (metaFederated.getProperties()[y] != null &&
IdentityApplicationConstants.Authenticator.SAML2SSO
.SP_ENTITY_ID.equals
(metaFederated.getProperties()[y].getName())) {
metaFederated.getProperties()[y].setValue(spName);
break;
}
}
}
if (metaFederated != null && ArrayUtils.isNotEmpty(metaFederated.getProperties())) {
federatedAuthenticatorConfig
.setProperties(metaFederated.getProperties());
} else {
throw new IdentityProviderManagementException(
"Error setting metadata using file");
}
} catch (IdentityProviderManagementException ex) {
throw new IdentityProviderManagementException("Error converting metadata",
ex);
}
if (certificate.toString().length() > 0) {
identityProvider.setCertificate(certificate.toString());
}
} catch (XMLStreamException e) {
throw new IdentityProviderManagementException(
"Error while configuring metadata", e);
}
break;
}
}
}
}
}
}
}
}
/**
* Adds an Identity Provider to the given tenant
*
* @param identityProvider new Identity Provider information
* @throws IdentityProviderManagementException Error when adding Identity Provider
* information
* @deprecated use {@link IdentityProviderManager#addIdPWithResourceId(IdentityProvider, String)} instead.
*/
@Deprecated
@Override
public void addIdP(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
addIdPWithResourceId(identityProvider, tenantDomain);
}
/**
* Adds an Identity Provider to the given tenant
*
* @param identityProvider new Identity Provider information
* @throws IdentityProviderManagementException Error when adding Identity Provider
* information
*/
@Override
public IdentityProvider addIdPWithResourceId(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
diagnosticLog.info("Adding IDP with resource ID: " + identityProvider.getResourceId());
validateAddIdPInputValues(identityProvider.getIdentityProviderName(), tenantDomain);
validateOutboundProvisioningRoles(identityProvider,tenantDomain);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreAddIdP(identityProvider, tenantDomain)) {
diagnosticLog.info("'doPreAddIdP' invocation failed in listener: " + listener.getClass().getName());
return null;
}
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (isPermissionAndRoleConfigExist(identityProvider)) {
verifyAndUpdateRoleConfiguration(tenantDomain, tenantId, identityProvider.getPermissionAndRoleConfig());
}
validateIdPEntityId(identityProvider.getFederatedAuthenticatorConfigs(), tenantId, tenantDomain);
validateIdPIssuerName(identityProvider, tenantId, tenantDomain);
handleMetadata(tenantId, identityProvider);
String resourceId = dao.addIdP(identityProvider, tenantId, tenantDomain);
identityProvider = dao.getIdPByResourceId(resourceId, tenantId, tenantDomain);
// invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostAddIdP(identityProvider, tenantDomain)) {
diagnosticLog.info("'doPostAddIdP' invocation failed in listener: " + listener.getClass().getName());
return null;
}
}
return identityProvider;
}
/**
* Deletes an Identity Provider from a given tenant
*
* @param idPName Name of the IdP to be deleted
* @throws IdentityProviderManagementException Error when deleting Identity Provider
* information
* @deprecated use {@link IdentityProviderManager#deleteIdPByResourceId(String, String)} instead.
*/
@Deprecated
@Override
public void deleteIdP(String idPName, String tenantDomain) throws IdentityProviderManagementException {
diagnosticLog.info("Deleting IDP with name: " + idPName);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdP(idPName, tenantDomain)) {
diagnosticLog.info("'doPreDeleteIdP' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
if (StringUtils.isEmpty(idPName)) {
String data = "IdP name is empty.";
diagnosticLog.error("IDP name cannot be empty.");
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_NAME_INVALID, data);
}
IdentityProvider identityProvider = this.getIdPByName(idPName, tenantDomain, true);
if (identityProvider == null) {
diagnosticLog.info("Could not find a valid IDP for the given name: " + idPName);
return;
}
deleteIDP(identityProvider.getResourceId(), idPName, tenantDomain);
// Invoking the post listeners.
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostDeleteIdP(idPName, tenantDomain)) {
diagnosticLog.info("'doPostDeleteIdP' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
}
/**
* Delete all Identity Providers from a given tenant.
*
* @param tenantDomain Domain of the tenant
* @throws IdentityProviderManagementException
*/
@Override
public void deleteIdPs(String tenantDomain) throws IdentityProviderManagementException {
diagnosticLog.info("Deleting all IDPs in tenant domain: " + tenantDomain);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdPs(tenantDomain)) {
diagnosticLog.info("'doPreDeleteIdPs' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
// Delete metadata strings of each IDP
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
List<IdentityProvider> identityProviders = getIdPs(tenantDomain);
for (IdentityProvider identityProvider : identityProviders) {
deleteMetadataStrings(identityProvider.getIdentityProviderName(), tenantId);
}
dao.deleteIdPs(tenantId);
// Invoking the post listeners.
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostDeleteIdPs(tenantDomain)) {
diagnosticLog.info("'doPostDeleteIdPs' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
}
/**
* Deletes an Identity Provider from a given tenant.
*
* @param resourceId Resource ID of the IdP to be deleted
* @throws IdentityProviderManagementException Error when deleting Identity Provider
* information
*/
@Override
public void deleteIdPByResourceId(String resourceId, String tenantDomain) throws
IdentityProviderManagementException {
diagnosticLog.info("Deleting IDP by resource ID: " + resourceId);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdPByResourceId(resourceId, tenantDomain)) {
diagnosticLog.info("'doPreDeleteIdPByResourceId' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
IdentityProvider identityProvider = getIdPByResourceId(resourceId, tenantDomain, true);
if (identityProvider == null) {
diagnosticLog.info("Could not find a valid IDP for the given resource ID: " + resourceId);
return;
}
deleteIDP(resourceId, identityProvider.getIdentityProviderName(), tenantDomain);
// Invoking the post listeners.
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() &&
!listener.doPostDeleteIdPByResourceId(resourceId, identityProvider, tenantDomain)) {
diagnosticLog.info("'doPostDeleteIdPByResourceId' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
}
/**
* Delete metadata strings of a given IDP.
*
* @param idpName Identity Provider name
* @param tenantId Id of the tenant
* @throws IdentityProviderManagementException
*/
private void deleteMetadataStrings(String idpName, int tenantId) throws IdentityProviderManagementException {
for (MetadataConverter metadataConverter : IdpMgtServiceComponentHolder.getInstance().getMetadataConverters()) {
if (metadataConverter.canDelete(tenantId, idpName)) {
metadataConverter.deleteMetadataString(tenantId, idpName);
}
}
}
/**
* Delete an IDP.
*
* @param resourceId Resource Id
* @param idpName Name of the IDP
* @param tenantDomain Tenant Domain
* @throws IdentityProviderManagementException
*/
private void deleteIDP(String resourceId, String idpName, String tenantDomain) throws
IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
// Delete metadata strings of the IDP
deleteMetadataStrings(idpName, tenantId);
dao.deleteIdPByResourceId(resourceId, tenantId, tenantDomain);
}
/**
* Force delete an Identity Provider from a given tenant. This will remove any associations this Identity
* Provider has with any Service Providers in authentication steps or provisioning.
*
* @param idpName name of IDP to be deleted
* @param tenantDomain tenantDomain to which the IDP belongs to
* @deprecated use {@link IdentityProviderManager#forceDeleteIdpByResourceId(String, String)} instead.
*/
@Deprecated
public void forceDeleteIdp(String idpName, String tenantDomain) throws IdentityProviderManagementException {
diagnosticLog.info("Force-deleting IDP with name: " + idpName);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdP(idpName, tenantDomain)) {
diagnosticLog.info("'doPreDeleteIdP' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
IdentityProvider identityProvider = this
.getIdPByName(idpName, tenantDomain, true);
if (identityProvider == null) {
diagnosticLog.info("Could not find a valid IDP for the given name: " + idpName);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_NAME_DOES_NOT_EXIST, idpName);
}
forceDeleteIDP(identityProvider.getResourceId(), idpName, tenantDomain);
// Invoking the post listeners.
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostDeleteIdP(idpName, tenantDomain)) {
diagnosticLog.info("'doPostDeleteIdP' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
}
/**
* Force delete an Identity Provider from a given tenant. This will remove any associations this Identity
* Provider has with any Service Providers in authentication steps or provisioning.
*
* @param resourceId resource ID of IDP to be deleted
* @param tenantDomain tenantDomain to which the IDP belongs to
*/
public void forceDeleteIdpByResourceId(String resourceId, String tenantDomain) throws
IdentityProviderManagementException {
diagnosticLog.info("Force-deleting IDP with resource ID: " + resourceId);
// Invoking the pre listeners
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreDeleteIdPByResourceId(resourceId, tenantDomain)) {
diagnosticLog.info("'doPreDeleteIdPByResourceId' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
IdentityProvider identityProvider = getIdPByResourceId(resourceId, tenantDomain, true);
if (identityProvider == null) {
diagnosticLog.info("Could not find a valid IDP for the given resource ID: " + resourceId);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, resourceId);
}
forceDeleteIDP(resourceId, identityProvider.getIdentityProviderName(), tenantDomain);
// Invoking the post listeners
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostDeleteIdPByResourceId(resourceId, identityProvider,
tenantDomain)) {
diagnosticLog.info("'doPostDeleteIdPByResourceId' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
}
private void forceDeleteIDP(String resourceId, String idpName, String tenantDomain) throws
IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
for (MetadataConverter metadataConverter : IdpMgtServiceComponentHolder.getInstance().getMetadataConverters()) {
if (metadataConverter.canDelete(tenantId, idpName)) {
metadataConverter.deleteMetadataString(tenantId, idpName);
}
}
dao.forceDeleteIdPByResourceId(resourceId, tenantId, tenantDomain);
}
/**
* Updates a given Identity Provider information
*
* @param oldIdPName existing Identity Provider name
* @param newIdentityProvider new IdP information
* @throws IdentityProviderManagementException Error when updating Identity Provider
* information
* @deprecated use {@link IdentityProviderManager#updateIdPByResourceId(String, IdentityProvider, String)} instead.
*/
@Deprecated
@Override
public void updateIdP(String oldIdPName, IdentityProvider newIdentityProvider,
String tenantDomain) throws IdentityProviderManagementException {
diagnosticLog.info("Updating IDP with name: " + oldIdPName);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreUpdateIdP(oldIdPName, newIdentityProvider,
tenantDomain)) {
diagnosticLog.info("'doPreUpdateIdP' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
IdentityProvider currentIdentityProvider = this
.getIdPByName(oldIdPName, tenantDomain, true);
if (currentIdentityProvider == null) {
diagnosticLog.info("Could not find a valid IDP with the given name: " + oldIdPName);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_NAME_DOES_NOT_EXIST, oldIdPName);
}
updateIDP(currentIdentityProvider, newIdentityProvider, IdentityTenantUtil.getTenantId(tenantDomain),
tenantDomain);
// Invoking the post listeners.
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostUpdateIdP(oldIdPName, newIdentityProvider, tenantDomain)) {
diagnosticLog.info("'doPostUpdateIdP' invocation failed in listener: " +
listener.getClass().getName());
return;
}
}
}
/**
* Updates a given Identity Provider information
*
* @param resourceId existing Identity Provider resourceId
* @param newIdentityProvider new IdP information
* @param tenantDomain tenant domain of IDP.
* @throws IdentityProviderManagementException Error when updating Identity Provider
* information
*/
@Override
public IdentityProvider updateIdPByResourceId(String resourceId, IdentityProvider
newIdentityProvider, String tenantDomain) throws IdentityProviderManagementException {
diagnosticLog.info("Updating IDP with resource ID: " + resourceId);
// Invoking the pre listeners.
Collection<IdentityProviderMgtListener> listeners = IdPManagementServiceComponent.getIdpMgtListeners();
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPreUpdateIdPByResourceId(resourceId, newIdentityProvider,
tenantDomain)) {
diagnosticLog.info("'doPreUpdateIdPByResourceId' invocation failed in listener: " +
listener.getClass().getName());
return null;
}
}
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
IdentityProvider currentIdentityProvider = this
.getIdPByResourceId(resourceId, tenantDomain, true);
validateUpdateIdPInputValues(currentIdentityProvider, resourceId, newIdentityProvider, tenantDomain);
updateIDP(currentIdentityProvider, newIdentityProvider, tenantId, tenantDomain);
// Invoking the post listeners.
for (IdentityProviderMgtListener listener : listeners) {
if (listener.isEnable() && !listener.doPostUpdateIdPByResourceId(resourceId, currentIdentityProvider,
newIdentityProvider, tenantDomain)) {
diagnosticLog.info("'doPostUpdateIdPByResourceId' invocation failed in listener: " +
listener.getClass().getName());
return null;
}
}
return dao.getIdPByResourceId(resourceId, tenantId, tenantDomain);
}
private void updateIDP(IdentityProvider currentIdentityProvider, IdentityProvider newIdentityProvider, int tenantId,
String tenantDomain) throws IdentityProviderManagementException {
if (isPermissionAndRoleConfigExist(newIdentityProvider)) {
verifyAndUpdateRoleConfiguration(tenantDomain, tenantId, newIdentityProvider.getPermissionAndRoleConfig());
}
validateUpdateOfIdPEntityId(currentIdentityProvider.getFederatedAuthenticatorConfigs(),
newIdentityProvider.getFederatedAuthenticatorConfigs(),
tenantId, tenantDomain);
validateIdPIssuerName(currentIdentityProvider, newIdentityProvider, tenantId, tenantDomain);
handleMetadata(tenantId, newIdentityProvider);
dao.updateIdP(newIdentityProvider, currentIdentityProvider, tenantId, tenantDomain);
}
/**
* Get the authenticators registered in the system.
*
* @return <code>FederatedAuthenticatorConfig</code> array.
* @throws IdentityProviderManagementException Error when getting authenticators registered
* in the system
*/
@Override
public FederatedAuthenticatorConfig[] getAllFederatedAuthenticators()
throws IdentityProviderManagementException {
List<FederatedAuthenticatorConfig> appConfig = ApplicationAuthenticatorService
.getInstance().getFederatedAuthenticators();
if (CollectionUtils.isNotEmpty(appConfig)) {
return appConfig.toArray(new FederatedAuthenticatorConfig[appConfig.size()]);
}
return new FederatedAuthenticatorConfig[0];
}
/**
* Get the Provisioning Connectors registered in the system.
*
* @return <code>ProvisioningConnectorConfig</code> array.
* @throws IdentityProviderManagementException
*/
@Override
public ProvisioningConnectorConfig[] getAllProvisioningConnectors()
throws IdentityProviderManagementException {
List<ProvisioningConnectorConfig> connectorConfigs = ProvisioningConnectorService
.getInstance().getProvisioningConnectorConfigs();
if (connectorConfigs != null && connectorConfigs.size() > 0) {
return connectorConfigs.toArray(new ProvisioningConnectorConfig[0]);
}
return null;
}
private boolean validateIdPEntityId(FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs,
int tenantId, String tenantDomain) throws IdentityProviderManagementException {
if (federatedAuthenticatorConfigs != null) {
for (FederatedAuthenticatorConfig authConfig : federatedAuthenticatorConfigs) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.FED_AUTH_NAME.equals(authConfig.getName()) ||
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME.equals(authConfig.getName())) {
Property[] properties = authConfig.getProperties();
if (properties != null) {
for (Property property : properties) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(
property.getName())) {
if (dao.isIdPAvailableForAuthenticatorProperty(authConfig.getName(),
IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID,
property.getValue(), tenantId)) {
String msg =
"An Identity Provider Entity ID has already been registered with the " +
"name '" + property.getValue() + "' for tenant '" + tenantDomain +
"'";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
return true;
}
}
}
}
}
}
return true;
}
private boolean validateUpdateOfIdPEntityId(FederatedAuthenticatorConfig[] currentFederatedAuthConfigs,
FederatedAuthenticatorConfig[] newFederatedAuthConfigs,
int tenantId, String tenantDomain)
throws IdentityProviderManagementException {
String currentIdentityProviderEntityId = null;
if (currentFederatedAuthConfigs != null) {
for (FederatedAuthenticatorConfig fedAuthnConfig : currentFederatedAuthConfigs) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.FED_AUTH_NAME
.equals(fedAuthnConfig.getName()) ||
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME.equals(fedAuthnConfig.getName())) {
Property[] properties = fedAuthnConfig.getProperties();
if (properties != null) {
for (Property property : properties) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals
(property.getName())) {
currentIdentityProviderEntityId = property.getValue();
break;
}
}
}
break;
}
}
}
if (newFederatedAuthConfigs != null) {
for (FederatedAuthenticatorConfig fedAuthnConfig : newFederatedAuthConfigs) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.FED_AUTH_NAME
.equals(fedAuthnConfig.getName()) ||
IdentityApplicationConstants.Authenticator.SAML2SSO.NAME.equals(fedAuthnConfig.getName())) {
Property[] properties = fedAuthnConfig.getProperties();
if (properties != null) {
for (Property property : properties) {
if (IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID.equals(property.
getName())) {
if (currentIdentityProviderEntityId != null && currentIdentityProviderEntityId.equals
(property.getValue())) {
return true;
} else {
if (dao.isIdPAvailableForAuthenticatorProperty(fedAuthnConfig.getName(),
IdentityApplicationConstants.Authenticator.SAML2SSO.IDP_ENTITY_ID,
property.getValue(), tenantId)) {
String msg = "An Identity Provider Entity ID has already been registered " +
"with the name '" +
property.getValue() + "' for tenant '" + tenantDomain + "'";
diagnosticLog.error(msg);
throw new IdentityProviderManagementException(msg);
}
return true;
}
}
}
}
break;
}
}
}
return true;
}
private String getOIDCResidentIdPEntityId() {
String OIDCEntityId = IdentityUtil.getProperty("OAuth.OpenIDConnect.IDTokenIssuerID");
if (StringUtils.isBlank(OIDCEntityId)) {
OIDCEntityId = "localhost";
}
return OIDCEntityId;
}
public String getResidentIDPMetadata(String tenantDomain) throws IdentityProviderManagementException {
if (IdpMgtServiceComponentHolder.getInstance().getMetadataConverters().isEmpty()) {
throw new IdentityProviderManagementException(
"Error receiving Metadata object for tenant: " + tenantDomain);
}
IdentityProvider residentIdentityProvider = this.getResidentIdP(tenantDomain);
FederatedAuthenticatorConfig[] federatedAuthenticatorConfigs =
residentIdentityProvider.getFederatedAuthenticatorConfigs();
FederatedAuthenticatorConfig samlFederatedAuthenticatorConfig = null;
for (int i = 0; i < federatedAuthenticatorConfigs.length; i++) {
if (federatedAuthenticatorConfigs[i].getName()
.equals(IdentityApplicationConstants.Authenticator.SAML2SSO.NAME)) {
samlFederatedAuthenticatorConfig = federatedAuthenticatorConfigs[i];
break;
}
}
if (samlFederatedAuthenticatorConfig != null) {
try {
for (int t = 0; t < IdpMgtServiceComponentHolder.getInstance().getMetadataConverters().size(); t++) {
MetadataConverter converter = IdpMgtServiceComponentHolder.getInstance().getMetadataConverters()
.get(t);
if (converter.canHandle(samlFederatedAuthenticatorConfig)) {
return converter.getMetadataString(samlFederatedAuthenticatorConfig);
}
}
} catch (IdentityProviderSAMLException e) {
throw new IdentityProviderManagementException(
"Error in retrieving metadata string for tenant:" + tenantDomain, e.getMessage());
}
}
return null;
}
@Override
public ConnectedAppsResult getConnectedApplications(String resourceId, Integer limit, Integer offset, String
tenantDomain) throws IdentityProviderManagementException {
validateResourceId(resourceId, tenantDomain);
limit = validateLimit(limit);
offset = validateOffset(offset);
return dao.getConnectedApplications(resourceId, limit, offset);
}
private void validateResourceId(String resourceId, String tenantDomain) throws IdentityProviderManagementException {
if (StringUtils.isEmpty(resourceId)) {
String data = "Invalid argument: Identity Provider resource ID value is empty";
diagnosticLog.error(data);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_RETRIEVE_IDP_CONNECTED_APPS, data);
}
if (getIdPByResourceId(resourceId, tenantDomain, true) == null) {
diagnosticLog.error("Could not find a valid IDP with given resource ID: " + resourceId);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, resourceId);
}
}
/**
* Resolves the public service url given the default context and the url picked from the configuration based on
* the 'tenant_context.enable_tenant_qualified_urls' mode set in deployment.toml.
*
* @param defaultUrlContext Default url context path.
* @param urlFromConfig Url picked from the file configuration.
* @return Absolute public url of the service if 'enable_tenant_qualified_urls' is 'true', else returns the url
* from the file config.
* @throws IdentityProviderManagementServerException When fail to build the absolute public url.
*/
private String resolveAbsoluteURL(String defaultUrlContext, String urlFromConfig) throws IdentityProviderManagementServerException {
if (!IdentityTenantUtil.isTenantQualifiedUrlsEnabled() && StringUtils.isNotBlank(urlFromConfig)) {
if (log.isDebugEnabled()) {
log.debug("Resolved URL:" + urlFromConfig + " from file configuration for default url context: " +
defaultUrlContext);
}
return urlFromConfig;
}
try {
return ServiceURLBuilder.create().addPath(defaultUrlContext).build().getAbsolutePublicURL();
} catch (URLBuilderException e) {
throw IdentityProviderManagementException.error(IdentityProviderManagementServerException.class,
"Error while building URL: " + defaultUrlContext, e);
}
}
private String addTenantPathParamInLegacyMode(String resolvedUrl, String tenantDomain) {
try {
if (!IdentityTenantUtil.isTenantQualifiedUrlsEnabled() && StringUtils.isNotBlank(tenantDomain) &&
!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equals(tenantDomain)) {
resolvedUrl = getTenantUrl(resolvedUrl, tenantDomain);
}
} catch (URISyntaxException e) {
log.error(String.format("%s endpoint is malformed.", resolvedUrl), e);
}
return resolvedUrl;
}
private String getTenantUrl(String url, String tenantDomain) throws URISyntaxException {
URI uri = new URI(url);
URI uriModified = new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), ("/t/" +
tenantDomain + uri.getPath()), uri.getQuery(), uri.getFragment());
return uriModified.toString();
}
private void verifyAndUpdateRoleConfiguration(String tenantDomain, int tenantId,
PermissionsAndRoleConfig roleConfiguration)
throws IdentityProviderManagementException {
List<RoleMapping> validRoleMappings = new ArrayList<>();
List<String> validIdPRoles = new ArrayList<>();
for (RoleMapping mapping : roleConfiguration.getRoleMappings()) {
try {
if (mapping.getRemoteRole() == null || mapping.getLocalRole() == null || StringUtils
.isBlank(mapping.getLocalRole().getLocalRoleName())) {
continue;
}
UserStoreManager usm = IdPManagementServiceComponent.getRealmService().getTenantUserRealm(tenantId)
.getUserStoreManager();
String role = mapping.getLocalRole().getLocalRoleName();
if (StringUtils.isNotBlank(mapping.getLocalRole().getUserStoreId())) {
role = IdentityUtil.addDomainToName(role, mapping.getLocalRole().getUserStoreId());
}
if (IdentityUtil.isGroupsVsRolesSeparationImprovementsEnabled()) {
// Only roles are allowed for role mapping.
if (isGroup(role)) {
if (log.isDebugEnabled()) {
log.debug("Groups including: " + role + ", are not allowed for the identity " +
"provider role mapping.");
}
diagnosticLog.info("Groups including: " + role + ", are not allowed for the identity " +
"provider role mapping.");
continue;
}
}
// Remove invalid mappings if local role does not exists.
if (usm.isExistingRole(role)) {
validRoleMappings.add(mapping);
validIdPRoles.add(mapping.getRemoteRole());
} else {
if (log.isDebugEnabled()) {
log.debug("Invalid local role name: " + role + " for the federated role: " + mapping
.getRemoteRole());
}
diagnosticLog.info("Invalid local role name: " + role + " for the federated role: " + mapping
.getRemoteRole());
}
} catch (UserStoreException e) {
diagnosticLog.error("Error occurred while retrieving UserStoreManager for tenant " + tenantDomain);
throw new IdentityProviderManagementException(
"Error occurred while retrieving UserStoreManager for tenant " + tenantDomain, e);
}
}
roleConfiguration.setRoleMappings(validRoleMappings.toArray(new RoleMapping[0]));
roleConfiguration.setIdpRoles(validIdPRoles.toArray(new String[0]));
}
/**
* Validate input parameters for the getIdPByResourceId function.
*
* @param resourceId Identity Provider resource ID.
* @throws IdentityProviderManagementException IdentityProviderManagementException
*/
private void validateGetIdPInputValues(String resourceId) throws IdentityProviderManagementException {
if (StringUtils.isEmpty(resourceId)) {
String data = "Invalid argument: Identity Provider resource ID value is empty";
diagnosticLog.error(data);
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_GET_REQUEST_INVALID, data);
}
}
/**
* Validate input parameters for the addIdPWithResourceId function.
*
* @param idpName Identity Provider name.
* @param tenantDomain Tenant domain of IDP.
* @throws IdentityProviderManagementException IdentityProviderManagementException
*/
private void validateAddIdPInputValues(String idpName, String tenantDomain) throws
IdentityProviderManagementException {
if (IdentityProviderManager.getInstance().getIdPByName(idpName, tenantDomain, true) != null) {
diagnosticLog.error("An IDP with given name: " + idpName + " already exists.");
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_ALREADY_EXISTS, idpName);
}
if (IdPManagementServiceComponent.getFileBasedIdPs().containsKey(idpName)
&& !idpName.startsWith(IdPManagementConstants.SHARED_IDP_PREFIX)) {
//If an IDP with name starting with "SHARED_" is added from UI, It's blocked at the service class
// before calling this method
diagnosticLog.error("An IDP with given name: " + idpName + " already exists.");
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_ALREADY_EXISTS, idpName);
}
}
/**
* Validate input parameters for the updateIdPByResourceId function.
*
* @param currentIdentityProvider Old Identity Provider Information.
* @param resourceId Identity Provider's resource ID.
* @param newIdentityProvider
* @param tenantDomain Tenant domain of IDP.
* @throws IdentityProviderManagementException IdentityProviderManagementException
*/
private void validateUpdateIdPInputValues(IdentityProvider currentIdentityProvider, String resourceId,
IdentityProvider newIdentityProvider, String tenantDomain)
throws IdentityProviderManagementException {
if (currentIdentityProvider == null) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_DOES_NOT_EXIST, resourceId);
}
boolean isNewIdPNameExists = false;
IdentityProvider retrievedIdentityProvider =
getIdPByName(newIdentityProvider.getIdentityProviderName(), tenantDomain, true);
if (retrievedIdentityProvider != null) {
isNewIdPNameExists = !StringUtils.equals(retrievedIdentityProvider.getResourceId(), currentIdentityProvider
.getResourceId());
}
if (isNewIdPNameExists || IdPManagementServiceComponent.getFileBasedIdPs()
.containsKey(newIdentityProvider.getIdentityProviderName())) {
throw IdPManagementUtil.handleClientException(IdPManagementConstants.ErrorMessage
.ERROR_CODE_IDP_ALREADY_EXISTS, newIdentityProvider.getIdentityProviderName());
}
// Validate whether there are any duplicate properties in the ProvisioningConnectorConfig.
validateOutboundProvisioningConnectorProperties(newIdentityProvider);
}
/**
* Validate whether there are any duplicate properties in the ProvisioningConnectorConfig of an IdentityProvider.
*
* @param newIdentityProvider IdentityProvider object.
* @throws IdentityProviderManagementException If duplicate properties found in ProvisioningConnectorConfig.
*/
private void validateOutboundProvisioningConnectorProperties(IdentityProvider newIdentityProvider)
throws IdentityProviderManagementException {
ProvisioningConnectorConfig[] provisioningConnectorConfigs =
newIdentityProvider.getProvisioningConnectorConfigs();
if (!ArrayUtils.isEmpty(provisioningConnectorConfigs)) {
for (ProvisioningConnectorConfig connectorConfig : provisioningConnectorConfigs) {
Property[] properties = connectorConfig.getProvisioningProperties();
// If no properties have specified, validation needs to stop.
if (ArrayUtils.isEmpty(properties) || properties.length < 2) {
break;
}
Set<Property> connectorProperties = new HashSet<>();
for (Property property : properties) {
if (!connectorProperties.add(property)) {
throw IdPManagementUtil.handleClientException(
IdPManagementConstants.ErrorMessage.DUPLICATE_OUTBOUND_CONNECTOR_PROPERTIES,
newIdentityProvider.getIdentityProviderName());
}
}
}
}
}
/**
* Check whether PermissionAndRoleConfig is configured for the IDP.
*
* @param identityProvider Identity Provider information.
* @return whether config exists.
*/
private boolean isPermissionAndRoleConfigExist(IdentityProvider identityProvider) {
return identityProvider.getPermissionAndRoleConfig() != null
&& identityProvider.getPermissionAndRoleConfig().getRoleMappings() != null;
}
/**
* Check whether metadata file is configured for the IDP.
*
* @param idpName Identity Provider name.
* @param metadata Metadata string.
* @return whether metadata exists.
*/
private boolean isMetadataFileExist(String idpName, String metadata) {
return StringUtils.isNotEmpty(idpName) && StringUtils.isNotEmpty(metadata);
}
@Override
public IdentityProvider getIdPByMetadataProperty(String property, String value, String tenantDomain,
boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
int tenantId = IdentityTenantUtil.getTenantId(tenantDomain);
if (StringUtils.isEmpty(property) || StringUtils.isEmpty(value)) {
String msg = "Invalid argument: IDP metadata property or property value is empty";
diagnosticLog.info(msg);
throw new IdentityProviderManagementException(msg);
}
String idPName = getIDPNameByMetadataProperty(null, property, value, tenantId, tenantDomain,
ignoreFileBasedIdps);
if (idPName == null) {
if (log.isDebugEnabled()) {
log.debug("IDP Name not found for metadata property name: " + property + " value: " + value +
". Returning null without continuing.");
}
diagnosticLog.info("IDP Name not found for metadata property name: " + property + " value: " + value +
". Returning null without continuing.");
return null;
}
return getIdPByName(idPName, tenantDomain, ignoreFileBasedIdps);
}
/**
* Method to validate the uniqueness of the IDP Issuer Name.
* Ideally used when adding a IDP.
*
* @param identityProvider Identity Provider being added.
* @param tenantId Tenant id.
* @param tenantDomain Tenant domain.
* @return Returns true if valid.
* @throws IdentityProviderManagementException IdentityProviderManagementException.
*/
private boolean validateIdPIssuerName(IdentityProvider identityProvider, int tenantId, String tenantDomain)
throws IdentityProviderManagementException {
IdentityProviderProperty[] identityProviderProperties = identityProvider.getIdpProperties();
if (!ArrayUtils.isEmpty(identityProviderProperties)) {
for (IdentityProviderProperty prop : identityProviderProperties) {
if (prop != null && IdentityApplicationConstants.IDP_ISSUER_NAME.equals(prop.getName())
&& StringUtils.isNotBlank(prop.getValue())) {
String idpWithIssuer = getIDPNameByMetadataProperty(null,
IdentityApplicationConstants.IDP_ISSUER_NAME, prop.getValue(), tenantId, tenantDomain,
false);
if (StringUtils.isNotEmpty(idpWithIssuer)) {
String msg = "The provided IDP Issuer Name '" + prop.getValue() + "' has already been " +
"registered with the IDP '" + idpWithIssuer + "'.";
diagnosticLog.error(msg);
throw new IdentityProviderManagementClientException(msg);
}
}
}
}
return true;
}
/**
* Method to validate the uniqueness of the IDP Issuer Name.
* Ideally used when updating a IDP.
* If the provided two IDP configs have the same Issuer Name validation is passed.
*
* @param currentIdP Existing Identity Provider config.
* @param newIdP Updated Identity Provider config.
* @param tenantId Tenant id.
* @param tenantDomain Tenant domain.
* @return Returns true if valid.
* @throws IdentityProviderManagementException IdentityProviderManagementException.
*/
private boolean validateIdPIssuerName(IdentityProvider currentIdP, IdentityProvider newIdP, int tenantId,
String tenantDomain)
throws IdentityProviderManagementException {
String newIdPIssuerName = null;
IdentityProviderProperty[] identityProviderProperties = newIdP.getIdpProperties();
if (!ArrayUtils.isEmpty(identityProviderProperties)) {
for (IdentityProviderProperty prop : identityProviderProperties) {
if (prop != null && IdentityApplicationConstants.IDP_ISSUER_NAME.equals(prop.getName())
&& StringUtils.isNotBlank(prop.getValue())) {
newIdPIssuerName = prop.getValue();
}
}
}
String currentIdPIssuerName = null;
identityProviderProperties = currentIdP.getIdpProperties();
if (!ArrayUtils.isEmpty(identityProviderProperties)) {
for (IdentityProviderProperty prop : identityProviderProperties) {
if (prop != null && IdentityApplicationConstants.IDP_ISSUER_NAME.equals(prop.getName())
&& StringUtils.isNotBlank(prop.getValue())) {
currentIdPIssuerName = prop.getValue();
}
}
}
if (StringUtils.isNotBlank(newIdPIssuerName) && !StringUtils.equals(newIdPIssuerName, currentIdPIssuerName)) {
String idpWithIssuer = getIDPNameByMetadataProperty(null,
IdentityApplicationConstants.IDP_ISSUER_NAME, newIdPIssuerName, tenantId, tenantDomain,
false);
if (StringUtils.isNotEmpty(idpWithIssuer)) {
String msg = "The provided IDP Issuer Name '" + newIdPIssuerName + "' has already been " +
"registered with the IDP '" + idpWithIssuer + "'.";
diagnosticLog.error(msg);
throw new IdentityProviderManagementClientException(msg);
}
}
return true;
}
private String getIDPNameByMetadataProperty(Connection dbConnection, String property, String value, int tenantId,
String tenantDomain, boolean ignoreFileBasedIdps)
throws IdentityProviderManagementException {
String idPName = dao.getIdPNameByMetadataProperty(null, property, value, tenantId, tenantDomain);
if (idPName == null && !ignoreFileBasedIdps) {
if (log.isDebugEnabled()) {
log.debug("Attempting to retrieve IDP Name from filebased IDPs for IDP metadata " +
"property name: " + property + " value: " + value);
}
idPName = new FileBasedIdPMgtDAO().getIdPNameByMetadataProperty(property, value);
}
return idPName;
}
/**
* Validate whether the outbound provisioning roles does exist.
*
* @param identityProvider IdentityProvider.
* @param tenantDomain Tenant Domain.
* @throws IdentityProviderManagementException If an error occurred while checking for role existence.
*/
private void validateOutboundProvisioningRoles(IdentityProvider identityProvider, String tenantDomain)
throws IdentityProviderManagementException {
String provisioningRole = identityProvider.getProvisioningRole();
if (StringUtils.isBlank(provisioningRole)) {
return;
}
String[] outboundProvisioningRoles = StringUtils.split(provisioningRole, ",");
try {
RoleManagementService roleManagementService =
IdpMgtServiceComponentHolder.getInstance().getRoleManagementService();
for (String roleName : outboundProvisioningRoles) {
try {
if (!roleManagementService.isExistingRoleName(roleName, tenantDomain)) {
throw IdPManagementUtil.handleClientException(
IdPManagementConstants.ErrorMessage.ERROR_CODE_NOT_EXISTING_OUTBOUND_PROVISIONING_ROLE,
null);
}
} catch (NotImplementedException e) {
if (log.isDebugEnabled()) {
log.debug("isExistingRoleName is not implemented in the RoleManagementService. " +
"Therefore, proceeding without validating outbound provisioning role existence.");
}
}
}
} catch (IdentityRoleManagementException e) {
throw IdPManagementUtil.handleServerException(
IdPManagementConstants.ErrorMessage.ERROR_CODE_VALIDATING_OUTBOUND_PROVISIONING_ROLES, null, e);
}
}
private boolean isGroup(String localRoleName) {
return !Stream.of(INTERNAL_DOMAIN, APPLICATION_DOMAIN, WORKFLOW_DOMAIN).anyMatch(domain -> localRoleName
.toUpperCase().startsWith((domain + UserCoreConstants.DOMAIN_SEPARATOR).toUpperCase()));
}
/**
* Get tenant context using tenant domain.
*
* @param tenantDomain tenant domain of the tenant
* @return tenant context
*/
private String getTenantContextFromTenantDomain(String tenantDomain) {
if (!MultitenantConstants.SUPER_TENANT_DOMAIN_NAME.equalsIgnoreCase(tenantDomain)) {
return MultitenantConstants.TENANT_AWARE_URL_PREFIX + "/" + tenantDomain + "/";
} else {
return "";
}
}
}
|
Refactored the comments
|
components/idp-mgt/org.wso2.carbon.idp.mgt/src/main/java/org/wso2/carbon/idp/mgt/IdentityProviderManager.java
|
Refactored the comments
|
|
Java
|
apache-2.0
|
a1b15a20a825ce2ead241f4adf620c1d7dc51827
| 0
|
babble/babble,babble/babble,babble/babble,babble/babble,babble/babble,babble/babble
|
// JSBuiltInFunctions.java
package ed.js.engine;
import java.io.*;
import java.util.*;
import java.lang.reflect.*;
import com.twmacinta.util.*;
import ed.log.*;
import ed.js.*;
import ed.js.func.*;
import ed.io.*;
import ed.net.*;
import ed.util.*;
import ed.security.*;
public class JSBuiltInFunctions {
public static class jsassert extends JSFunctionCalls1 {
public Object call( Scope scope , Object foo , Object extra[] ){
if ( JSInternalFunctions.JS_evalToBool( foo ) )
return Boolean.TRUE;
if ( extra != null && extra.length > 0 && extra[0] != null )
throw new JSException( "assert failed : " + extra[0] );
throw new JSException( "assert failed" );
}
}
static Class _getClass( String name )
throws Exception {
final int colon = name.indexOf( ":" );
if ( colon < 0 )
return Class.forName( name );
String base = name.substring( 0 , colon );
Class c = Class.forName( base );
String inner = "$" + name.substring( colon + 1 );
for ( Class child : c.getClasses() ){
if ( child.getName().endsWith( inner ) )
return child;
}
throw new JSException( "can't find inner class [" + inner + "] on [" + c.getName() + "]" );
}
public static class javaCreate extends JSFunctionCalls1 {
public Object call( Scope scope , Object clazzNameJS , Object extra[] ){
String clazzName = clazzNameJS.toString();
if ( ! Security.isCoreJS() )
throw new JSException( "you can't do create a :" + clazzName );
Class clazz = null;
try {
clazz = _getClass( clazzName );
}
catch ( Exception e ){
throw new JSException( "can't find class for [" + clazzName + "]" );
}
Constructor[] allCons = clazz.getConstructors();
Arrays.sort( allCons , Scope._consLengthComparator );
for ( int i=0; i<allCons.length; i++ ){
Object params[] = Scope.doParamsMatch( allCons[i].getParameterTypes() , extra );
if ( params != null ){
try {
return allCons[i].newInstance( params );
}
catch ( Exception e ){
throw new JSException( "can' instantiate" , e );
}
}
}
throw new RuntimeException( "can't find valid constructor" );
}
}
public static class javaStatic extends JSFunctionCalls2 {
public Object call( Scope scope , Object clazzNameJS , Object methodNameJS , Object extra[] ){
String clazzName = clazzNameJS.toString();
if ( ! Security.isCoreJS() )
throw new JSException( "you can't use a :" + clazzName );
Class clazz = null;
try {
clazz = _getClass( clazzName );
}
catch ( Exception e ){
throw new JSException( "can't find class for [" + clazzName + "]" );
}
Method[] all = clazz.getMethods();
Arrays.sort( all , Scope._methodLengthComparator );
for ( int i=0; i<all.length; i++ ){
Method m = all[i];
if ( ( m.getModifiers() & Modifier.STATIC ) == 0 )
continue;
if ( ! m.getName().equals( methodNameJS.toString() ) )
continue;
Object params[] = Scope.doParamsMatch( m.getParameterTypes() , extra );
if ( params != null ){
try {
return m.invoke( null , params );
}
catch ( Exception e ){
throw new JSException( "can' call" , e );
}
}
}
throw new RuntimeException( "can't find valid method" );
}
}
public static class javaStaticProp extends JSFunctionCalls2 {
public Object call( Scope scope , Object clazzNameJS , Object fieldNameJS , Object extra[] ){
String clazzName = clazzNameJS.toString();
if ( ! Security.isCoreJS() )
throw new JSException( "you can't use a :" + clazzName );
Class clazz = null;
try {
clazz = _getClass( clazzName );
}
catch ( JSException e ){
throw e;
}
catch ( Exception e ){
throw new JSException( "can't find class for [" + clazzName + "]" );
}
try {
return clazz.getField( fieldNameJS.toString() ).get( null );
}
catch ( NoSuchFieldException n ){
throw new JSException( "can't find field [" + fieldNameJS + "] from [" + clazz.getName() + "]" );
}
catch ( Throwable t ){
throw new JSException( "can't get field [" + fieldNameJS + "] from [" + clazz.getName() + "] b/c " + t );
}
}
}
public static class print extends JSFunctionCalls1 {
print(){
this( true );
}
print( boolean newLine ){
super();
_newLine = newLine;
}
public Object call( Scope scope , Object foo , Object extra[] ){
if ( _newLine )
System.out.println( foo );
else
System.out.print( foo );
return null;
}
final boolean _newLine;
}
public static JSFunction NewObject = new JSFunctionCalls0(){
public Object call( Scope scope , Object extra[] ){
return new JSObjectBase();
}
protected void init(){
/**
* Copies all properties from the source to the destination object.
* Not in JavaScript spec! Please refer to Prototype docs!
*/
set( "extend", new JSFunctionCalls2(){
public Object call( Scope s , Object dest , Object src , Object [] extra ){
JSObject jdest, jsrc;
jdest = (JSObject)dest;
jsrc = (JSObject)src;
for(String key : jsrc.keySet()){
jdest.set(key, jsrc.get(key));
}
return dest;
}
});
}
};
public static class NewDate extends JSFunctionCalls1 {
public Object call( Scope scope , Object t , Object extra[] ){
if ( t == null )
return new JSDate();
if ( ! ( t instanceof Number ) )
return new JSDate();
return new JSDate( ((Number)t).longValue() );
}
}
public static class CrID extends JSFunctionCalls1 {
public Object call( Scope scope , Object idString , Object extra[] ){
if ( idString == null )
return ed.db.ObjectId.get();
return new ed.db.ObjectId( idString.toString() );
}
}
public static class sleep extends JSFunctionCalls1 {
public Object call( Scope scope , Object timeObj , Object extra[] ){
if ( ! ( timeObj instanceof Number ) )
return false;
try {
Thread.sleep( ((Number)timeObj).longValue() );
}
catch ( Exception e ){
return false;
}
return true;
}
}
public static class isXXX extends JSFunctionCalls1 {
isXXX( Class c ){
_c = c;
}
public Object call( Scope scope , Object o , Object extra[] ){
return _c.isInstance( o );
}
final Class _c;
}
public static class isXXXs extends JSFunctionCalls1 {
isXXXs( Class ... c ){
_c = c;
}
public Object call( Scope scope , Object o , Object extra[] ){
for ( int i=0; i<_c.length; i++ )
if ( _c[i].isInstance( o ) )
return true;
return false;
}
final Class _c[];
}
public static class sysexec extends JSFunctionCalls1 {
// adds quotes as needed
static String[] fix( String s ){
String base[] = s.split( "\\s+" );
List<String> fixed = new ArrayList();
boolean changed = false;
for ( int i=0; i<base.length; i++ ){
if ( ! base[i].startsWith( "\"" ) ){
fixed.add( base[i] );
continue;
}
int end = i;
while( end < base.length && ! base[end].endsWith( "\"" ) )
end++;
String foo = base[i++].substring( 1 );
for ( ; i<=end && i < base.length; i++ )
foo += " " + base[i];
i--;
if ( foo.endsWith( "\"" ) )
foo = foo.substring( 0 , foo.length() - 1 );
fixed.add( foo );
changed = true;
}
if ( changed ){
System.out.println( fixed );
base = new String[fixed.size()];
for ( int i=0; i<fixed.size(); i++ )
base[i] = fixed.get(i);
}
return base;
}
public Object call( Scope scope , Object o , Object extra[] ){
if ( o == null )
return null;
if ( ! Security.isCoreJS() )
throw new JSException( "can't exec this" );
File root = scope.getRoot();
if ( root == null )
throw new JSException( "no root" );
String cmd[] = fix( o.toString() );
String env[] = new String[]{};
String toSend = null;
if ( extra != null && extra.length > 0 )
toSend = extra[0].toString();
if ( extra != null && extra.length > 1 && extra[1] instanceof JSObject ){
JSObject foo = (JSObject)extra[1];
env = new String[ foo.keySet().size() ];
int pos = 0;
for ( String name : foo.keySet() ){
Object val = foo.get( name );
if ( val == null )
val = "";
env[pos++] = name + "=" + val.toString();
}
}
try {
Process p = Runtime.getRuntime().exec( cmd , env , root );
if ( toSend != null ){
OutputStream out = p.getOutputStream();
out.write( toSend.getBytes() );
out.close();
}
JSObject res = new JSObjectBase();
res.set( "out" , StreamUtil.readFully( p.getInputStream() ) );
res.set( "err" , StreamUtil.readFully( p.getErrorStream() ) );
return res;
}
catch ( Throwable t ){
throw new JSException( t.toString() , t );
}
}
}
public static class fork extends JSFunctionCalls1 {
public Object call( final Scope scope , final Object funcJS , final Object extra[] ){
if ( ! ( funcJS instanceof JSFunction ) )
throw new JSException( "fork has to take a function" );
final JSFunction func = (JSFunction)funcJS;
final Thread t = new Thread( "fork" ){
public void run(){
try {
_result = func.call( scope , extra );
}
catch ( Throwable t ){
if ( scope.get( "log" ) != null )
((Logger)scope.get( "log" ) ).error( "error in fork" , t );
else
t.printStackTrace();
}
}
public Object returnData()
throws InterruptedException {
join();
return _result;
}
private Object _result;
};
return t;
}
}
static Scope _myScope = new Scope( "Built-Ins" , null );
static {
_myScope.put( "sysexec" , new sysexec() , true );
_myScope.put( "print" , new print() , true );
_myScope.put( "printnoln" , new print( false ) , true );
_myScope.put( "SYSOUT" , new print() , true );
_myScope.put( "sleep" , new sleep() , true );
_myScope.put( "fork" , new fork() , true );
_myScope.put( "Object" , NewObject , true );
_myScope.put( "Array" , JSArray._cons , true );
_myScope.put( "Date" , JSDate._cons , true );
_myScope.put( "String" , JSString._cons , true );
_myScope.put( "RegExp" , JSRegex._cons , true );
_myScope.put( "XMLHttpRequest" , XMLHttpRequest._cons , true );
_myScope.put( "Function" , JSInternalFunctions.FunctionCons , true );
_myScope.put( "Math" , JSMath.getInstance() , true );
CrID crid = new CrID();
_myScope.put( "CrID" , crid , true );
_myScope.put( "ObjID" , crid , true );
_myScope.put( "ObjId" , crid , true );
_myScope.put( "ObjectID" , crid , true );
_myScope.put( "ObjectId" , crid , true );
_myScope.put( "Base64" , new ed.util.Base64() , true );
_myScope.put( "download" , HttpDownload.DOWNLOAD , true );
_myScope.put( "JSCaptcha" , new JSCaptcha() , true );
_myScope.put( "parseBool" , new JSFunctionCalls1(){
public Object call( Scope scope , Object b , Object extra[] ){
if ( b == null )
return false;
String s = b.toString();
if ( s.length() == 0 )
return false;
char c = s.charAt( 0 );
return c == 't' || c == 'T';
}
} , true );
_myScope.put( "Number" , JSNumber.CONS , true );
_myScope.put( "parseNumber" , JSNumber.CONS , true );
_myScope.put( "parseFloat" ,
new JSFunctionCalls1(){
public Object call( Scope scope , Object a , Object extra[] ){
if ( a == null )
return Double.NaN;
try {
return Double.parseDouble( a.toString() );
}
catch ( Exception e ){}
return Double.NaN;
}
}
, true );
_myScope.put( "parseInt" ,
new JSFunctionCalls2(){
public Object call( Scope scope , Object a , Object b , Object extra[] ){
if ( a == null )
return Double.NaN;
if ( a instanceof Number )
return ((Number)a).intValue();
String s = a.toString();
try {
if ( b != null && b instanceof Number ){
return StringParseUtil.parseIntRadix( s , ((Number)b).intValue() );
}
return StringParseUtil.parseIntRadix( s , 10 );
}
catch ( Exception e ){}
return Double.NaN;
}
}
, true );
_myScope.put( "NaN" , Double.NaN , true );
_myScope.put( "md5" , new JSFunctionCalls1(){
public Object call( Scope scope , Object b , Object extra[] ){
synchronized ( _myMd5 ){
_myMd5.Init();
_myMd5.Update( b.toString() );
return new JSString( _myMd5.asHex() );
}
}
private final MD5 _myMd5 = new MD5();
} , true );
_myScope.put( "isArray" , new isXXX( JSArray.class ) , true );
_myScope.put( "isBool" , new isXXX( Boolean.class ) , true );
_myScope.put( "isNumber" , new isXXX( Number.class ) , true );
_myScope.put( "isObject" , new isXXX( JSObject.class ) , true );
_myScope.put( "isDate" , new isXXX( JSDate.class ) , true );
_myScope.put( "isFunction" , new isXXX( JSFunction.class ) , true );
_myScope.put( "isString" , new isXXXs( String.class , JSString.class ) , true );
_myScope.put( "isAlpha" , new JSFunctionCalls1(){
public Object call( Scope scope , Object o , Object extra[] ){
char c = getChar( o );
return Character.isLetter( c );
}
} , true );
_myScope.put( "isSpace" , new JSFunctionCalls1(){
public Object call( Scope scope , Object o , Object extra[] ){
char c = getChar( o );
return Character.isWhitespace( c );
}
} , true );
_myScope.put( "isDigit" , new JSFunctionCalls1(){
public Object call( Scope scope , Object o , Object extra[] ){
char c = getChar( o );
return Character.isDigit( c );
}
} , true );
_myScope.put( "assert" , new jsassert() , true );
_myScope.put( "javaCreate" , new javaCreate() , true );
_myScope.put( "javaStatic" , new javaStatic() , true );
_myScope.put( "javaStaticProp" , new javaStaticProp() , true );
_myScope.put( "escape" , new JSFunctionCalls1(){
public Object call( Scope scope , Object o , Object extra[] ){
return java.net.URLEncoder.encode( o.toString() ).replaceAll( "\\+" , "%20" );
}
} , true );
JSON.init( _myScope );
// mail stuff till i'm done
_myScope.put( "JAVAXMAILTO" , javax.mail.Message.RecipientType.TO , true );
}
private static char getChar( Object o ){
if ( o instanceof Character )
return (Character)o;
if ( o instanceof JSString )
o = o.toString();
if ( o instanceof String ){
String s = (String)o;
if ( s.length() == 1 )
return s.charAt( 0 );
}
return 0;
}
}
|
src/main/ed/js/engine/JSBuiltInFunctions.java
|
// JSBuiltInFunctions.java
package ed.js.engine;
import java.io.*;
import java.util.*;
import java.lang.reflect.*;
import com.twmacinta.util.*;
import ed.log.*;
import ed.js.*;
import ed.js.func.*;
import ed.io.*;
import ed.net.*;
import ed.util.*;
import ed.security.*;
public class JSBuiltInFunctions {
public static class jsassert extends JSFunctionCalls1 {
public Object call( Scope scope , Object foo , Object extra[] ){
if ( JSInternalFunctions.JS_evalToBool( foo ) )
return Boolean.TRUE;
if ( extra != null && extra.length > 0 && extra[0] != null )
throw new JSException( "assert failed : " + extra[0] );
throw new JSException( "assert failed" );
}
}
static Class _getClass( String name )
throws Exception {
final int colon = name.indexOf( ":" );
if ( colon < 0 )
return Class.forName( name );
String base = name.substring( 0 , colon );
Class c = Class.forName( base );
String inner = "$" + name.substring( colon + 1 );
for ( Class child : c.getClasses() ){
if ( child.getName().endsWith( inner ) )
return child;
}
throw new JSException( "can't find inner class [" + inner + "] on [" + c.getName() + "]" );
}
public static class javaCreate extends JSFunctionCalls1 {
public Object call( Scope scope , Object clazzNameJS , Object extra[] ){
String clazzName = clazzNameJS.toString();
if ( ! Security.isCoreJS() )
throw new JSException( "you can't do create a :" + clazzName );
Class clazz = null;
try {
clazz = _getClass( clazzName );
}
catch ( Exception e ){
throw new JSException( "can't find class for [" + clazzName + "]" );
}
Constructor[] allCons = clazz.getConstructors();
Arrays.sort( allCons , Scope._consLengthComparator );
for ( int i=0; i<allCons.length; i++ ){
Object params[] = Scope.doParamsMatch( allCons[i].getParameterTypes() , extra );
if ( params != null ){
try {
return allCons[i].newInstance( params );
}
catch ( Exception e ){
throw new JSException( "can' instantiate" , e );
}
}
}
throw new RuntimeException( "can't find valid constructor" );
}
}
public static class javaStatic extends JSFunctionCalls2 {
public Object call( Scope scope , Object clazzNameJS , Object methodNameJS , Object extra[] ){
String clazzName = clazzNameJS.toString();
if ( ! Security.isCoreJS() )
throw new JSException( "you can't use a :" + clazzName );
Class clazz = null;
try {
clazz = _getClass( clazzName );
}
catch ( Exception e ){
throw new JSException( "can't find class for [" + clazzName + "]" );
}
Method[] all = clazz.getMethods();
Arrays.sort( all , Scope._methodLengthComparator );
for ( int i=0; i<all.length; i++ ){
Method m = all[i];
if ( ( m.getModifiers() & Modifier.STATIC ) == 0 )
continue;
if ( ! m.getName().equals( methodNameJS.toString() ) )
continue;
Object params[] = Scope.doParamsMatch( m.getParameterTypes() , extra );
if ( params != null ){
try {
return m.invoke( null , params );
}
catch ( Exception e ){
throw new JSException( "can' call" , e );
}
}
}
throw new RuntimeException( "can't find valid method" );
}
}
public static class javaStaticProp extends JSFunctionCalls2 {
public Object call( Scope scope , Object clazzNameJS , Object fieldNameJS , Object extra[] ){
String clazzName = clazzNameJS.toString();
if ( ! Security.isCoreJS() )
throw new JSException( "you can't use a :" + clazzName );
Class clazz = null;
try {
clazz = _getClass( clazzName );
}
catch ( JSException e ){
throw e;
}
catch ( Exception e ){
throw new JSException( "can't find class for [" + clazzName + "]" );
}
try {
return clazz.getField( fieldNameJS.toString() ).get( null );
}
catch ( NoSuchFieldException n ){
throw new JSException( "can't find field [" + fieldNameJS + "] from [" + clazz.getName() + "]" );
}
catch ( Throwable t ){
throw new JSException( "can't get field [" + fieldNameJS + "] from [" + clazz.getName() + "] b/c " + t );
}
}
}
public static class print extends JSFunctionCalls1 {
print(){
this( true );
}
print( boolean newLine ){
super();
_newLine = newLine;
}
public Object call( Scope scope , Object foo , Object extra[] ){
if ( _newLine )
System.out.println( foo );
else
System.out.print( foo );
return null;
}
final boolean _newLine;
}
public static JSFunction NewObject = new JSFunctionCalls0(){
public Object call( Scope scope , Object extra[] ){
return new JSObjectBase();
}
protected void init(){
set( "extend", new JSFunctionCalls2(){
public Object call( Scope s , Object dest , Object src , Object [] extra ){
JSObject jdest, jsrc;
jdest = (JSObject)dest;
jsrc = (JSObject)src;
for(String key : jsrc.keySet()){
jdest.set(key, jsrc.get(key));
}
return dest;
}
});
}
};
public static class NewDate extends JSFunctionCalls1 {
public Object call( Scope scope , Object t , Object extra[] ){
if ( t == null )
return new JSDate();
if ( ! ( t instanceof Number ) )
return new JSDate();
return new JSDate( ((Number)t).longValue() );
}
}
public static class CrID extends JSFunctionCalls1 {
public Object call( Scope scope , Object idString , Object extra[] ){
if ( idString == null )
return ed.db.ObjectId.get();
return new ed.db.ObjectId( idString.toString() );
}
}
public static class sleep extends JSFunctionCalls1 {
public Object call( Scope scope , Object timeObj , Object extra[] ){
if ( ! ( timeObj instanceof Number ) )
return false;
try {
Thread.sleep( ((Number)timeObj).longValue() );
}
catch ( Exception e ){
return false;
}
return true;
}
}
public static class isXXX extends JSFunctionCalls1 {
isXXX( Class c ){
_c = c;
}
public Object call( Scope scope , Object o , Object extra[] ){
return _c.isInstance( o );
}
final Class _c;
}
public static class isXXXs extends JSFunctionCalls1 {
isXXXs( Class ... c ){
_c = c;
}
public Object call( Scope scope , Object o , Object extra[] ){
for ( int i=0; i<_c.length; i++ )
if ( _c[i].isInstance( o ) )
return true;
return false;
}
final Class _c[];
}
public static class sysexec extends JSFunctionCalls1 {
// adds quotes as needed
static String[] fix( String s ){
String base[] = s.split( "\\s+" );
List<String> fixed = new ArrayList();
boolean changed = false;
for ( int i=0; i<base.length; i++ ){
if ( ! base[i].startsWith( "\"" ) ){
fixed.add( base[i] );
continue;
}
int end = i;
while( end < base.length && ! base[end].endsWith( "\"" ) )
end++;
String foo = base[i++].substring( 1 );
for ( ; i<=end && i < base.length; i++ )
foo += " " + base[i];
i--;
if ( foo.endsWith( "\"" ) )
foo = foo.substring( 0 , foo.length() - 1 );
fixed.add( foo );
changed = true;
}
if ( changed ){
System.out.println( fixed );
base = new String[fixed.size()];
for ( int i=0; i<fixed.size(); i++ )
base[i] = fixed.get(i);
}
return base;
}
public Object call( Scope scope , Object o , Object extra[] ){
if ( o == null )
return null;
if ( ! Security.isCoreJS() )
throw new JSException( "can't exec this" );
File root = scope.getRoot();
if ( root == null )
throw new JSException( "no root" );
String cmd[] = fix( o.toString() );
String env[] = new String[]{};
String toSend = null;
if ( extra != null && extra.length > 0 )
toSend = extra[0].toString();
if ( extra != null && extra.length > 1 && extra[1] instanceof JSObject ){
JSObject foo = (JSObject)extra[1];
env = new String[ foo.keySet().size() ];
int pos = 0;
for ( String name : foo.keySet() ){
Object val = foo.get( name );
if ( val == null )
val = "";
env[pos++] = name + "=" + val.toString();
}
}
try {
Process p = Runtime.getRuntime().exec( cmd , env , root );
if ( toSend != null ){
OutputStream out = p.getOutputStream();
out.write( toSend.getBytes() );
out.close();
}
JSObject res = new JSObjectBase();
res.set( "out" , StreamUtil.readFully( p.getInputStream() ) );
res.set( "err" , StreamUtil.readFully( p.getErrorStream() ) );
return res;
}
catch ( Throwable t ){
throw new JSException( t.toString() , t );
}
}
}
public static class fork extends JSFunctionCalls1 {
public Object call( final Scope scope , final Object funcJS , final Object extra[] ){
if ( ! ( funcJS instanceof JSFunction ) )
throw new JSException( "fork has to take a function" );
final JSFunction func = (JSFunction)funcJS;
final Thread t = new Thread( "fork" ){
public void run(){
try {
_result = func.call( scope , extra );
}
catch ( Throwable t ){
if ( scope.get( "log" ) != null )
((Logger)scope.get( "log" ) ).error( "error in fork" , t );
else
t.printStackTrace();
}
}
public Object returnData()
throws InterruptedException {
join();
return _result;
}
private Object _result;
};
return t;
}
}
static Scope _myScope = new Scope( "Built-Ins" , null );
static {
_myScope.put( "sysexec" , new sysexec() , true );
_myScope.put( "print" , new print() , true );
_myScope.put( "printnoln" , new print( false ) , true );
_myScope.put( "SYSOUT" , new print() , true );
_myScope.put( "sleep" , new sleep() , true );
_myScope.put( "fork" , new fork() , true );
_myScope.put( "Object" , NewObject , true );
_myScope.put( "Array" , JSArray._cons , true );
_myScope.put( "Date" , JSDate._cons , true );
_myScope.put( "String" , JSString._cons , true );
_myScope.put( "RegExp" , JSRegex._cons , true );
_myScope.put( "XMLHttpRequest" , XMLHttpRequest._cons , true );
_myScope.put( "Function" , JSInternalFunctions.FunctionCons , true );
_myScope.put( "Math" , JSMath.getInstance() , true );
CrID crid = new CrID();
_myScope.put( "CrID" , crid , true );
_myScope.put( "ObjID" , crid , true );
_myScope.put( "ObjId" , crid , true );
_myScope.put( "ObjectID" , crid , true );
_myScope.put( "ObjectId" , crid , true );
_myScope.put( "Base64" , new ed.util.Base64() , true );
_myScope.put( "download" , HttpDownload.DOWNLOAD , true );
_myScope.put( "JSCaptcha" , new JSCaptcha() , true );
_myScope.put( "parseBool" , new JSFunctionCalls1(){
public Object call( Scope scope , Object b , Object extra[] ){
if ( b == null )
return false;
String s = b.toString();
if ( s.length() == 0 )
return false;
char c = s.charAt( 0 );
return c == 't' || c == 'T';
}
} , true );
_myScope.put( "Number" , JSNumber.CONS , true );
_myScope.put( "parseNumber" , JSNumber.CONS , true );
_myScope.put( "parseFloat" ,
new JSFunctionCalls1(){
public Object call( Scope scope , Object a , Object extra[] ){
if ( a == null )
return Double.NaN;
try {
return Double.parseDouble( a.toString() );
}
catch ( Exception e ){}
return Double.NaN;
}
}
, true );
_myScope.put( "parseInt" ,
new JSFunctionCalls2(){
public Object call( Scope scope , Object a , Object b , Object extra[] ){
if ( a == null )
return Double.NaN;
if ( a instanceof Number )
return ((Number)a).intValue();
String s = a.toString();
try {
if ( b != null && b instanceof Number ){
return StringParseUtil.parseIntRadix( s , ((Number)b).intValue() );
}
return StringParseUtil.parseIntRadix( s , 10 );
}
catch ( Exception e ){}
return Double.NaN;
}
}
, true );
_myScope.put( "NaN" , Double.NaN , true );
_myScope.put( "md5" , new JSFunctionCalls1(){
public Object call( Scope scope , Object b , Object extra[] ){
synchronized ( _myMd5 ){
_myMd5.Init();
_myMd5.Update( b.toString() );
return new JSString( _myMd5.asHex() );
}
}
private final MD5 _myMd5 = new MD5();
} , true );
_myScope.put( "isArray" , new isXXX( JSArray.class ) , true );
_myScope.put( "isBool" , new isXXX( Boolean.class ) , true );
_myScope.put( "isNumber" , new isXXX( Number.class ) , true );
_myScope.put( "isObject" , new isXXX( JSObject.class ) , true );
_myScope.put( "isDate" , new isXXX( JSDate.class ) , true );
_myScope.put( "isFunction" , new isXXX( JSFunction.class ) , true );
_myScope.put( "isString" , new isXXXs( String.class , JSString.class ) , true );
_myScope.put( "isAlpha" , new JSFunctionCalls1(){
public Object call( Scope scope , Object o , Object extra[] ){
char c = getChar( o );
return Character.isLetter( c );
}
} , true );
_myScope.put( "isSpace" , new JSFunctionCalls1(){
public Object call( Scope scope , Object o , Object extra[] ){
char c = getChar( o );
return Character.isWhitespace( c );
}
} , true );
_myScope.put( "isDigit" , new JSFunctionCalls1(){
public Object call( Scope scope , Object o , Object extra[] ){
char c = getChar( o );
return Character.isDigit( c );
}
} , true );
_myScope.put( "assert" , new jsassert() , true );
_myScope.put( "javaCreate" , new javaCreate() , true );
_myScope.put( "javaStatic" , new javaStatic() , true );
_myScope.put( "javaStaticProp" , new javaStaticProp() , true );
_myScope.put( "escape" , new JSFunctionCalls1(){
public Object call( Scope scope , Object o , Object extra[] ){
return java.net.URLEncoder.encode( o.toString() ).replaceAll( "\\+" , "%20" );
}
} , true );
JSON.init( _myScope );
// mail stuff till i'm done
_myScope.put( "JAVAXMAILTO" , javax.mail.Message.RecipientType.TO , true );
}
private static char getChar( Object o ){
if ( o instanceof Character )
return (Character)o;
if ( o instanceof JSString )
o = o.toString();
if ( o instanceof String ){
String s = (String)o;
if ( s.length() == 1 )
return s.charAt( 0 );
}
return 0;
}
}
|
Comment.
|
src/main/ed/js/engine/JSBuiltInFunctions.java
|
Comment.
|
|
Java
|
apache-2.0
|
01daaf581546050a2da02c1881f026c3f9e0858f
| 0
|
kmels/bitcoinj,GroestlCoin/groestlcoinj,bitcoinj/bitcoinj,natzei/bitcoinj,yenliangl/bitcoinj,yenliangl/bitcoinj,GroestlCoin/groestlcoinj,schildbach/bitcoinj,kmels/bitcoinj,GroestlCoin/groestlcoinj,peterdettman/bitcoinj,bitcoinj/bitcoinj,schildbach/bitcoinj,natzei/bitcoinj,peterdettman/bitcoinj
|
/*
* Copyright 2011 Google Inc.
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bitcoinj.core;
import com.google.common.base.Objects;
import org.bitcoinj.script.*;
import org.bitcoinj.wallet.Wallet;
import org.slf4j.*;
import javax.annotation.*;
import java.io.*;
import java.util.Arrays;
import java.util.List;
import static com.google.common.base.Preconditions.*;
/**
* <p>A TransactionOutput message contains a scriptPubKey that controls who is able to spend its value. It is a sub-part
* of the Transaction message.</p>
*
* <p>Instances of this class are not safe for use by multiple threads.</p>
*/
public class TransactionOutput extends ChildMessage {
private static final Logger log = LoggerFactory.getLogger(TransactionOutput.class);
// The output's value is kept as a native type in order to save class instances.
private long value;
// A transaction output has a script used for authenticating that the redeemer is allowed to spend
// this output.
private byte[] scriptBytes;
// The script bytes are parsed and turned into a Script on demand.
private Script scriptPubKey;
// These fields are not Bitcoin serialized. They are used for tracking purposes in our wallet
// only. If set to true, this output is counted towards our balance. If false and spentBy is null the tx output
// was owned by us and was sent to somebody else. If false and spentBy is set it means this output was owned by
// us and used in one of our own transactions (eg, because it is a change output).
private boolean availableForSpending;
@Nullable private TransactionInput spentBy;
private int scriptLen;
/**
* Deserializes a transaction output message. This is usually part of a transaction message.
*/
public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, byte[] payload,
int offset) throws ProtocolException {
super(params, payload, offset);
setParent(parent);
availableForSpending = true;
}
/**
* Deserializes a transaction output message. This is usually part of a transaction message.
*
* @param params NetworkParameters object.
* @param payload Bitcoin protocol formatted byte array containing message content.
* @param offset The location of the first payload byte within the array.
* @param serializer the serializer to use for this message.
* @throws ProtocolException
*/
public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, byte[] payload, int offset, MessageSerializer serializer) throws ProtocolException {
super(params, payload, offset, parent, serializer, UNKNOWN_LENGTH);
availableForSpending = true;
}
/**
* Creates an output that sends 'value' to the given address (public key hash). The amount should be created with
* something like {@link Coin#valueOf(int, int)}. Typically you would use
* {@link Transaction#addOutput(Coin, Address)} instead of creating a TransactionOutput directly.
*/
public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, Coin value, Address to) {
this(params, parent, value, ScriptBuilder.createOutputScript(to).getProgram());
}
/**
* Creates an output that sends 'value' to the given public key using a simple CHECKSIG script (no addresses). The
* amount should be created with something like {@link Coin#valueOf(int, int)}. Typically you would use
* {@link Transaction#addOutput(Coin, ECKey)} instead of creating an output directly.
*/
public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, Coin value, ECKey to) {
this(params, parent, value, ScriptBuilder.createOutputScript(to).getProgram());
}
public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, Coin value, byte[] scriptBytes) {
super(params);
// Negative values obviously make no sense, except for -1 which is used as a sentinel value when calculating
// SIGHASH_SINGLE signatures, so unfortunately we have to allow that here.
checkArgument(value.signum() >= 0 || value.equals(Coin.NEGATIVE_SATOSHI), "Negative values not allowed");
checkArgument(!params.hasMaxMoney() || value.compareTo(params.getMaxMoney()) <= 0, "Values larger than MAX_MONEY not allowed");
this.value = value.value;
this.scriptBytes = scriptBytes;
setParent(parent);
availableForSpending = true;
length = 8 + VarInt.sizeOf(scriptBytes.length) + scriptBytes.length;
}
public Script getScriptPubKey() throws ScriptException {
if (scriptPubKey == null) {
scriptPubKey = new Script(scriptBytes);
}
return scriptPubKey;
}
@Nullable
@Deprecated
public LegacyAddress getAddressFromP2PKHScript(NetworkParameters params) throws ScriptException {
if (ScriptPattern.isPayToPubKeyHash(getScriptPubKey()))
return LegacyAddress.fromPubKeyHash(params,
ScriptPattern.extractHashFromPayToPubKeyHash(getScriptPubKey()));
return null;
}
@Nullable
@Deprecated
public LegacyAddress getAddressFromP2SH(NetworkParameters params) throws ScriptException {
if (ScriptPattern.isPayToScriptHash(getScriptPubKey()))
return LegacyAddress.fromScriptHash(params, ScriptPattern.extractHashFromPayToScriptHash(getScriptPubKey()));
return null;
}
@Override
protected void parse() throws ProtocolException {
value = readInt64();
scriptLen = (int) readVarInt();
length = cursor - offset + scriptLen;
scriptBytes = readBytes(scriptLen);
}
@Override
protected void bitcoinSerializeToStream(OutputStream stream) throws IOException {
checkNotNull(scriptBytes);
Utils.int64ToByteStreamLE(value, stream);
// TODO: Move script serialization into the Script class, where it belongs.
stream.write(new VarInt(scriptBytes.length).encode());
stream.write(scriptBytes);
}
/**
* Returns the value of this output. This is the amount of currency that the destination address
* receives.
*/
public Coin getValue() {
try {
return Coin.valueOf(value);
} catch (IllegalArgumentException e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
/**
* Sets the value of this output.
*/
public void setValue(Coin value) {
checkNotNull(value);
unCache();
this.value = value.value;
}
/**
* Gets the index of this output in the parent transaction, or throws if this output is free standing. Iterates
* over the parents list to discover this.
*/
public int getIndex() {
List<TransactionOutput> outputs = getParentTransaction().getOutputs();
for (int i = 0; i < outputs.size(); i++) {
if (outputs.get(i) == this)
return i;
}
throw new IllegalStateException("Output linked to wrong parent transaction?");
}
/**
* Will this transaction be relayable and mined by default miners?
*/
public boolean isDust() {
// Transactions that are OP_RETURN can't be dust regardless of their value.
if (ScriptPattern.isOpReturn(getScriptPubKey()))
return false;
return getValue().isLessThan(getMinNonDustValue());
}
/**
* <p>Gets the minimum value for a txout of this size to be considered non-dust by Bitcoin Core
* (and thus relayed). See: CTxOut::IsDust() in Bitcoin Core. The assumption is that any output that would
* consume more than a third of its value in fees is not something the Bitcoin system wants to deal with right now,
* so we call them "dust outputs" and they're made non standard. The choice of one third is somewhat arbitrary and
* may change in future.</p>
*
* <p>You probably should use {@link TransactionOutput#getMinNonDustValue()} which uses
* a safe fee-per-kb by default.</p>
*
* @param feePerKb The fee required per kilobyte. Note that this is the same as Bitcoin Core's -minrelaytxfee * 3
*/
public Coin getMinNonDustValue(Coin feePerKb) {
// A typical output is 33 bytes (pubkey hash + opcodes) and requires an input of 148 bytes to spend so we add
// that together to find out the total amount of data used to transfer this amount of value. Note that this
// formula is wrong for anything that's not a P2PKH output, unfortunately, we must follow Bitcoin Core's
// wrongness in order to ensure we're considered standard. A better formula would either estimate the
// size of data needed to satisfy all different script types, or just hard code 33 below.
final long size = this.unsafeBitcoinSerialize().length + 148;
return feePerKb.multiply(size).divide(1000);
}
/**
* Returns the minimum value for this output to be considered "not dust", i.e. the transaction will be relayable
* and mined by default miners. For normal pay to address outputs, this is 2730 satoshis, the same as
* {@link Transaction#MIN_NONDUST_OUTPUT}.
*/
public Coin getMinNonDustValue() {
return getMinNonDustValue(Transaction.REFERENCE_DEFAULT_MIN_TX_FEE.multiply(3));
}
/**
* Sets this objects availableForSpending flag to false and the spentBy pointer to the given input.
* If the input is null, it means this output was signed over to somebody else rather than one of our own keys.
* @throws IllegalStateException if the transaction was already marked as spent.
*/
public void markAsSpent(TransactionInput input) {
checkState(availableForSpending);
availableForSpending = false;
spentBy = input;
if (parent != null)
if (log.isDebugEnabled()) log.debug("Marked {}:{} as spent by {}", getParentTransactionHash(), getIndex(), input);
else
if (log.isDebugEnabled()) log.debug("Marked floating output as spent by {}", input);
}
/**
* Resets the spent pointer / availableForSpending flag to null.
*/
public void markAsUnspent() {
if (parent != null)
if (log.isDebugEnabled()) log.debug("Un-marked {}:{} as spent by {}", getParentTransactionHash(), getIndex(), spentBy);
else
if (log.isDebugEnabled()) log.debug("Un-marked floating output as spent by {}", spentBy);
availableForSpending = true;
spentBy = null;
}
/**
* Returns whether {@link TransactionOutput#markAsSpent(TransactionInput)} has been called on this class. A
* {@link Wallet} will mark a transaction output as spent once it sees a transaction input that is connected to it.
* Note that this flag can be false when an output has in fact been spent according to the rest of the network if
* the spending transaction wasn't downloaded yet, and it can be marked as spent when in reality the rest of the
* network believes it to be unspent if the signature or script connecting to it was not actually valid.
*/
public boolean isAvailableForSpending() {
return availableForSpending;
}
/**
* The backing script bytes which can be turned into a Script object.
* @return the scriptBytes
*/
public byte[] getScriptBytes() {
return scriptBytes;
}
/**
* Returns true if this output is to a key in the wallet or to an address/script we are watching.
*/
public boolean isMineOrWatched(TransactionBag transactionBag) {
return isMine(transactionBag) || isWatched(transactionBag);
}
/**
* Returns true if this output is to a key, or an address we have the keys for, in the wallet.
*/
public boolean isWatched(TransactionBag transactionBag) {
try {
Script script = getScriptPubKey();
return transactionBag.isWatchedScript(script);
} catch (ScriptException e) {
// Just means we didn't understand the output of this transaction: ignore it.
log.debug("Could not parse tx output script: {}", e.toString());
return false;
}
}
/**
* Returns true if this output is to a key, or an address we have the keys for, in the wallet.
*/
public boolean isMine(TransactionBag transactionBag) {
try {
Script script = getScriptPubKey();
if (ScriptPattern.isPayToPubKey(script))
return transactionBag.isPubKeyMine(ScriptPattern.extractKeyFromPayToPubKey(script));
else if (ScriptPattern.isPayToScriptHash(script))
return transactionBag.isPayToScriptHashMine(ScriptPattern.extractHashFromPayToScriptHash(script));
else if (ScriptPattern.isPayToPubKeyHash(script))
return transactionBag.isPubKeyHashMine(ScriptPattern.extractHashFromPayToPubKeyHash(script));
else
return false;
} catch (ScriptException e) {
// Just means we didn't understand the output of this transaction: ignore it.
log.debug("Could not parse tx {} output script: {}", parent != null ? parent.getHash() : "(no parent)", e.toString());
return false;
}
}
/**
* Returns a human readable debug string.
*/
@Override
public String toString() {
try {
Script script = getScriptPubKey();
StringBuilder buf = new StringBuilder("TxOut of ");
buf.append(Coin.valueOf(value).toFriendlyString());
if (ScriptPattern.isPayToPubKeyHash(script) || ScriptPattern.isPayToScriptHash(script))
buf.append(" to ").append(script.getToAddress(params));
else if (ScriptPattern.isPayToPubKey(script))
buf.append(" to pubkey ").append(Utils.HEX.encode(ScriptPattern.extractKeyFromPayToPubKey(script)));
else if (ScriptPattern.isSentToMultisig(script))
buf.append(" to multisig");
else
buf.append(" (unknown type)");
buf.append(" script:").append(script);
return buf.toString();
} catch (ScriptException e) {
throw new RuntimeException(e);
}
}
/**
* Returns the connected input.
*/
@Nullable
public TransactionInput getSpentBy() {
return spentBy;
}
/**
* Returns the transaction that owns this output.
*/
@Nullable
public Transaction getParentTransaction() {
return (Transaction)parent;
}
/**
* Returns the transaction hash that owns this output.
*/
@Nullable
public Sha256Hash getParentTransactionHash() {
return parent == null ? null : parent.getHash();
}
/**
* Returns the depth in blocks of the parent tx.
*
* <p>If the transaction appears in the top block, the depth is one. If it's anything else (pending, dead, unknown)
* then -1.</p>
* @return The tx depth or -1.
*/
public int getParentTransactionDepthInBlocks() {
if (getParentTransaction() != null) {
TransactionConfidence confidence = getParentTransaction().getConfidence();
if (confidence.getConfidenceType() == TransactionConfidence.ConfidenceType.BUILDING) {
return confidence.getDepthInBlocks();
}
}
return -1;
}
/**
* Returns a new {@link TransactionOutPoint}, which is essentially a structure pointing to this output.
* Requires that this output is not detached.
*/
public TransactionOutPoint getOutPointFor() {
return new TransactionOutPoint(params, getIndex(), getParentTransaction());
}
/** Returns a copy of the output detached from its containing transaction, if need be. */
public TransactionOutput duplicateDetached() {
return new TransactionOutput(params, null, Coin.valueOf(value), Arrays.copyOf(scriptBytes, scriptBytes.length));
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TransactionOutput other = (TransactionOutput) o;
return value == other.value && (parent == null || (parent == other.parent && getIndex() == other.getIndex()))
&& Arrays.equals(scriptBytes, other.scriptBytes);
}
@Override
public int hashCode() {
return Objects.hashCode(value, parent, Arrays.hashCode(scriptBytes));
}
}
|
core/src/main/java/org/bitcoinj/core/TransactionOutput.java
|
/*
* Copyright 2011 Google Inc.
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bitcoinj.core;
import com.google.common.base.Objects;
import org.bitcoinj.script.*;
import org.bitcoinj.wallet.Wallet;
import org.slf4j.*;
import javax.annotation.*;
import java.io.*;
import java.util.Arrays;
import java.util.List;
import static com.google.common.base.Preconditions.*;
/**
* <p>A TransactionOutput message contains a scriptPubKey that controls who is able to spend its value. It is a sub-part
* of the Transaction message.</p>
*
* <p>Instances of this class are not safe for use by multiple threads.</p>
*/
public class TransactionOutput extends ChildMessage {
private static final Logger log = LoggerFactory.getLogger(TransactionOutput.class);
// The output's value is kept as a native type in order to save class instances.
private long value;
// A transaction output has a script used for authenticating that the redeemer is allowed to spend
// this output.
private byte[] scriptBytes;
// The script bytes are parsed and turned into a Script on demand.
private Script scriptPubKey;
// These fields are not Bitcoin serialized. They are used for tracking purposes in our wallet
// only. If set to true, this output is counted towards our balance. If false and spentBy is null the tx output
// was owned by us and was sent to somebody else. If false and spentBy is set it means this output was owned by
// us and used in one of our own transactions (eg, because it is a change output).
private boolean availableForSpending;
@Nullable private TransactionInput spentBy;
private int scriptLen;
/**
* Deserializes a transaction output message. This is usually part of a transaction message.
*/
public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, byte[] payload,
int offset) throws ProtocolException {
super(params, payload, offset);
setParent(parent);
availableForSpending = true;
}
/**
* Deserializes a transaction output message. This is usually part of a transaction message.
*
* @param params NetworkParameters object.
* @param payload Bitcoin protocol formatted byte array containing message content.
* @param offset The location of the first payload byte within the array.
* @param serializer the serializer to use for this message.
* @throws ProtocolException
*/
public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, byte[] payload, int offset, MessageSerializer serializer) throws ProtocolException {
super(params, payload, offset, parent, serializer, UNKNOWN_LENGTH);
availableForSpending = true;
}
/**
* Creates an output that sends 'value' to the given address (public key hash). The amount should be created with
* something like {@link Coin#valueOf(int, int)}. Typically you would use
* {@link Transaction#addOutput(Coin, Address)} instead of creating a TransactionOutput directly.
*/
public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, Coin value, Address to) {
this(params, parent, value, ScriptBuilder.createOutputScript(to).getProgram());
}
/**
* Creates an output that sends 'value' to the given public key using a simple CHECKSIG script (no addresses). The
* amount should be created with something like {@link Coin#valueOf(int, int)}. Typically you would use
* {@link Transaction#addOutput(Coin, ECKey)} instead of creating an output directly.
*/
public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, Coin value, ECKey to) {
this(params, parent, value, ScriptBuilder.createOutputScript(to).getProgram());
}
public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, Coin value, byte[] scriptBytes) {
super(params);
// Negative values obviously make no sense, except for -1 which is used as a sentinel value when calculating
// SIGHASH_SINGLE signatures, so unfortunately we have to allow that here.
checkArgument(value.signum() >= 0 || value.equals(Coin.NEGATIVE_SATOSHI), "Negative values not allowed");
checkArgument(!params.hasMaxMoney() || value.compareTo(params.getMaxMoney()) <= 0, "Values larger than MAX_MONEY not allowed");
this.value = value.value;
this.scriptBytes = scriptBytes;
setParent(parent);
availableForSpending = true;
length = 8 + VarInt.sizeOf(scriptBytes.length) + scriptBytes.length;
}
public Script getScriptPubKey() throws ScriptException {
if (scriptPubKey == null) {
scriptPubKey = new Script(scriptBytes);
}
return scriptPubKey;
}
@Nullable
@Deprecated
public LegacyAddress getAddressFromP2PKHScript(NetworkParameters params) throws ScriptException {
if (ScriptPattern.isPayToPubKeyHash(getScriptPubKey()))
return LegacyAddress.fromPubKeyHash(params,
ScriptPattern.extractHashFromPayToPubKeyHash(getScriptPubKey()));
return null;
}
@Nullable
@Deprecated
public LegacyAddress getAddressFromP2SH(NetworkParameters params) throws ScriptException {
if (ScriptPattern.isPayToScriptHash(getScriptPubKey()))
return LegacyAddress.fromScriptHash(params, ScriptPattern.extractHashFromPayToScriptHash(getScriptPubKey()));
return null;
}
@Override
protected void parse() throws ProtocolException {
value = readInt64();
scriptLen = (int) readVarInt();
length = cursor - offset + scriptLen;
scriptBytes = readBytes(scriptLen);
}
@Override
protected void bitcoinSerializeToStream(OutputStream stream) throws IOException {
checkNotNull(scriptBytes);
Utils.int64ToByteStreamLE(value, stream);
// TODO: Move script serialization into the Script class, where it belongs.
stream.write(new VarInt(scriptBytes.length).encode());
stream.write(scriptBytes);
}
/**
* Returns the value of this output. This is the amount of currency that the destination address
* receives.
*/
public Coin getValue() {
try {
return Coin.valueOf(value);
} catch (IllegalArgumentException e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
/**
* Sets the value of this output.
*/
public void setValue(Coin value) {
checkNotNull(value);
unCache();
this.value = value.value;
}
/**
* Gets the index of this output in the parent transaction, or throws if this output is free standing. Iterates
* over the parents list to discover this.
*/
public int getIndex() {
List<TransactionOutput> outputs = getParentTransaction().getOutputs();
for (int i = 0; i < outputs.size(); i++) {
if (outputs.get(i) == this)
return i;
}
throw new IllegalStateException("Output linked to wrong parent transaction?");
}
/**
* Will this transaction be relayable and mined by default miners?
*/
public boolean isDust() {
// Transactions that are OP_RETURN can't be dust regardless of their value.
if (ScriptPattern.isOpReturn(getScriptPubKey()))
return false;
return getValue().isLessThan(getMinNonDustValue());
}
/**
* <p>Gets the minimum value for a txout of this size to be considered non-dust by Bitcoin Core
* (and thus relayed). See: CTxOut::IsDust() in Bitcoin Core. The assumption is that any output that would
* consume more than a third of its value in fees is not something the Bitcoin system wants to deal with right now,
* so we call them "dust outputs" and they're made non standard. The choice of one third is somewhat arbitrary and
* may change in future.</p>
*
* <p>You probably should use {@link TransactionOutput#getMinNonDustValue()} which uses
* a safe fee-per-kb by default.</p>
*
* @param feePerKb The fee required per kilobyte. Note that this is the same as Bitcoin Core's -minrelaytxfee * 3
*/
public Coin getMinNonDustValue(Coin feePerKb) {
// A typical output is 33 bytes (pubkey hash + opcodes) and requires an input of 148 bytes to spend so we add
// that together to find out the total amount of data used to transfer this amount of value. Note that this
// formula is wrong for anything that's not a P2PKH output, unfortunately, we must follow Bitcoin Core's
// wrongness in order to ensure we're considered standard. A better formula would either estimate the
// size of data needed to satisfy all different script types, or just hard code 33 below.
final long size = this.unsafeBitcoinSerialize().length + 148;
return feePerKb.multiply(size).divide(1000);
}
/**
* Returns the minimum value for this output to be considered "not dust", i.e. the transaction will be relayable
* and mined by default miners. For normal pay to address outputs, this is 2730 satoshis, the same as
* {@link Transaction#MIN_NONDUST_OUTPUT}.
*/
public Coin getMinNonDustValue() {
return getMinNonDustValue(Transaction.REFERENCE_DEFAULT_MIN_TX_FEE.multiply(3));
}
/**
* Sets this objects availableForSpending flag to false and the spentBy pointer to the given input.
* If the input is null, it means this output was signed over to somebody else rather than one of our own keys.
* @throws IllegalStateException if the transaction was already marked as spent.
*/
public void markAsSpent(TransactionInput input) {
checkState(availableForSpending);
availableForSpending = false;
spentBy = input;
if (parent != null)
if (log.isDebugEnabled()) log.debug("Marked {}:{} as spent by {}", getParentTransactionHash(), getIndex(), input);
else
if (log.isDebugEnabled()) log.debug("Marked floating output as spent by {}", input);
}
/**
* Resets the spent pointer / availableForSpending flag to null.
*/
public void markAsUnspent() {
if (parent != null)
if (log.isDebugEnabled()) log.debug("Un-marked {}:{} as spent by {}", getParentTransactionHash(), getIndex(), spentBy);
else
if (log.isDebugEnabled()) log.debug("Un-marked floating output as spent by {}", spentBy);
availableForSpending = true;
spentBy = null;
}
/**
* Returns whether {@link TransactionOutput#markAsSpent(TransactionInput)} has been called on this class. A
* {@link Wallet} will mark a transaction output as spent once it sees a transaction input that is connected to it.
* Note that this flag can be false when an output has in fact been spent according to the rest of the network if
* the spending transaction wasn't downloaded yet, and it can be marked as spent when in reality the rest of the
* network believes it to be unspent if the signature or script connecting to it was not actually valid.
*/
public boolean isAvailableForSpending() {
return availableForSpending;
}
/**
* The backing script bytes which can be turned into a Script object.
* @return the scriptBytes
*/
public byte[] getScriptBytes() {
return scriptBytes;
}
/**
* Returns true if this output is to a key in the wallet or to an address/script we are watching.
*/
public boolean isMineOrWatched(TransactionBag transactionBag) {
return isMine(transactionBag) || isWatched(transactionBag);
}
/**
* Returns true if this output is to a key, or an address we have the keys for, in the wallet.
*/
public boolean isWatched(TransactionBag transactionBag) {
try {
Script script = getScriptPubKey();
return transactionBag.isWatchedScript(script);
} catch (ScriptException e) {
// Just means we didn't understand the output of this transaction: ignore it.
log.debug("Could not parse tx output script: {}", e.toString());
return false;
}
}
/**
* Returns true if this output is to a key, or an address we have the keys for, in the wallet.
*/
public boolean isMine(TransactionBag transactionBag) {
try {
Script script = getScriptPubKey();
if (ScriptPattern.isPayToPubKey(script)) {
return transactionBag.isPubKeyMine(ScriptPattern.extractKeyFromPayToPubKey(script));
} if (ScriptPattern.isPayToScriptHash(script)) {
return transactionBag.isPayToScriptHashMine(ScriptPattern.extractHashFromPayToScriptHash(script));
} else {
byte[] pubkeyHash = script.getPubKeyHash();
return transactionBag.isPubKeyHashMine(pubkeyHash);
}
} catch (ScriptException e) {
// Just means we didn't understand the output of this transaction: ignore it.
log.debug("Could not parse tx {} output script: {}", parent != null ? parent.getHash() : "(no parent)", e.toString());
return false;
}
}
/**
* Returns a human readable debug string.
*/
@Override
public String toString() {
try {
Script script = getScriptPubKey();
StringBuilder buf = new StringBuilder("TxOut of ");
buf.append(Coin.valueOf(value).toFriendlyString());
if (ScriptPattern.isPayToPubKeyHash(script) || ScriptPattern.isPayToScriptHash(script))
buf.append(" to ").append(script.getToAddress(params));
else if (ScriptPattern.isPayToPubKey(script))
buf.append(" to pubkey ").append(Utils.HEX.encode(ScriptPattern.extractKeyFromPayToPubKey(script)));
else if (ScriptPattern.isSentToMultisig(script))
buf.append(" to multisig");
else
buf.append(" (unknown type)");
buf.append(" script:").append(script);
return buf.toString();
} catch (ScriptException e) {
throw new RuntimeException(e);
}
}
/**
* Returns the connected input.
*/
@Nullable
public TransactionInput getSpentBy() {
return spentBy;
}
/**
* Returns the transaction that owns this output.
*/
@Nullable
public Transaction getParentTransaction() {
return (Transaction)parent;
}
/**
* Returns the transaction hash that owns this output.
*/
@Nullable
public Sha256Hash getParentTransactionHash() {
return parent == null ? null : parent.getHash();
}
/**
* Returns the depth in blocks of the parent tx.
*
* <p>If the transaction appears in the top block, the depth is one. If it's anything else (pending, dead, unknown)
* then -1.</p>
* @return The tx depth or -1.
*/
public int getParentTransactionDepthInBlocks() {
if (getParentTransaction() != null) {
TransactionConfidence confidence = getParentTransaction().getConfidence();
if (confidence.getConfidenceType() == TransactionConfidence.ConfidenceType.BUILDING) {
return confidence.getDepthInBlocks();
}
}
return -1;
}
/**
* Returns a new {@link TransactionOutPoint}, which is essentially a structure pointing to this output.
* Requires that this output is not detached.
*/
public TransactionOutPoint getOutPointFor() {
return new TransactionOutPoint(params, getIndex(), getParentTransaction());
}
/** Returns a copy of the output detached from its containing transaction, if need be. */
public TransactionOutput duplicateDetached() {
return new TransactionOutput(params, null, Coin.valueOf(value), Arrays.copyOf(scriptBytes, scriptBytes.length));
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TransactionOutput other = (TransactionOutput) o;
return value == other.value && (parent == null || (parent == other.parent && getIndex() == other.getIndex()))
&& Arrays.equals(scriptBytes, other.scriptBytes);
}
@Override
public int hashCode() {
return Objects.hashCode(value, parent, Arrays.hashCode(scriptBytes));
}
}
|
TransactionOutput: Fix incomplete isMine() implementation.
|
core/src/main/java/org/bitcoinj/core/TransactionOutput.java
|
TransactionOutput: Fix incomplete isMine() implementation.
|
|
Java
|
apache-2.0
|
b4f9ce763954447709fd1c28b5ef2a6f10e81b4b
| 0
|
GoogleCloudPlatform/processing-logs-using-dataflow,GoogleCloudPlatform/processing-logs-using-dataflow,GoogleCloudPlatform/processing-logs-using-dataflow
|
package com.google.cloud.solutions;
import com.google.api.services.bigquery.model.TableFieldSchema;
import com.google.api.services.bigquery.model.TableRow;
import com.google.api.services.bigquery.model.TableSchema;
import com.google.cloud.dataflow.sdk.Pipeline;
import com.google.cloud.dataflow.sdk.PipelineResult;
import com.google.cloud.dataflow.sdk.io.BigQueryIO;
import com.google.cloud.dataflow.sdk.io.TextIO;
import com.google.cloud.dataflow.sdk.options.DataflowPipelineOptions;
import com.google.cloud.dataflow.sdk.options.DataflowWorkerLoggingOptions;
import com.google.cloud.dataflow.sdk.options.Description;
import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory;
import com.google.cloud.dataflow.sdk.transforms.*;
import com.google.cloud.dataflow.sdk.transforms.windowing.FixedWindows;
import com.google.cloud.dataflow.sdk.transforms.windowing.Window;
import com.google.cloud.dataflow.sdk.values.KV;
import com.google.cloud.dataflow.sdk.values.PCollection;
import com.google.cloud.dataflow.sdk.values.PCollectionList;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class LogAnalyticsPipeline {
private static final Logger LOG = LoggerFactory.getLogger(LogAnalyticsPipeline.class);
@SuppressWarnings("unused")
public interface LogAnalyticsPipelineOptions extends DataflowPipelineOptions {
@Description("Log sources configuration file")
String getConfigFile();
void setConfigFile(String configFile);
}
private static Properties parseConfigFile(String configFileName) {
Properties p = new Properties();
InputStream configFile = null;
try {
LOG.debug("loading properties from " + configFileName);
configFile = new FileInputStream(configFileName);
p.load(configFile);
LOG.debug("loaded " + p.size() + " properties");
}
catch(IOException e) {
e.printStackTrace();
}
finally {
if(configFile != null) {
try {
configFile.close();
}
catch(IOException e) {
e.printStackTrace();
}
}
}
return p;
}
private static class EmitLogEntryFn extends DoFn<String,LogEntry> {
private boolean withTimestamp;
private String regexPattern;
public EmitLogEntryFn(boolean withTimestamp, String regexPattern) {
this.withTimestamp = withTimestamp;
this.regexPattern = regexPattern;
}
@Override
public void processElement(ProcessContext c) {
if(withTimestamp) {
Instant timestamp = getTimestampFromEntry(c.element());
LogEntry logEntry = parseEntry(c.element());
c.outputWithTimestamp(logEntry, timestamp);
}
else {
LogEntry logEntry = parseEntry(c.element());
c.output(logEntry);
}
}
private Instant getTimestampFromEntry(String entry) {
JsonParser parser = new JsonParser();
JsonElement rootElement = parser.parse(entry);
JsonObject metadata = rootElement
.getAsJsonObject()
.getAsJsonObject("metadata");
JsonObject timestamp = metadata.getAsJsonObject("timestamp");
DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
return fmt.parseDateTime(timestamp.getAsString()).toInstant();
}
private LogEntry parseEntry(String entry) {
JsonParser parser = new JsonParser();
JsonElement rootElement = parser.parse(entry);
JsonObject structPayload = rootElement
.getAsJsonObject()
.getAsJsonObject("structPayload");
JsonObject log = structPayload.getAsJsonObject("log");
Pattern p = Pattern.compile(this.regexPattern);
Matcher m = p.matcher(log.getAsString());
DateTimeFormatter fmt = DateTimeFormat.forPattern("yyy/MM/dd - HH:mm:ss");
Instant timestamp = fmt.parseDateTime(m.group("timestamp")).toInstant();
int httpStatusCode = Integer.valueOf(m.group("httpStatusCode"));
double responseTime = 0;
if(m.group("resolution") == "µs") {
responseTime = Double.valueOf(m.group("responseTime")) / 1000000;
}
else if(m.group("resolution") == "ms") {
responseTime = Double.valueOf(m.group("responseTime")) / 1000;
}
String source = m.group("source");
String httpMethod = m.group("httpMethod");
String destination = m.group("destination");
return new LogEntry(timestamp, httpStatusCode, responseTime, source, httpMethod, destination);
}
}
private static class LogEntryTableRowFn extends DoFn<LogEntry, TableRow> {
@Override
public void processElement(ProcessContext c) {
LogEntry e = c.element();
TableRow row = new TableRow()
.set("timestamp", e.getTimestamp().toDateTime())
.set("httpStatusCode", e.getHttpStatusCode())
.set("responseTime", e.getResponseTime())
.set("source", e.getSource())
.set("httpMethod", e.getHttpMethod())
.set("destination", e.getDestination());
c.output(row);
}
}
private static class AggregateTableRowFn extends DoFn<KV<String,Double>, TableRow> {
@Override
public void processElement(ProcessContext c) {
KV<String,Double> e = c.element();
TableRow row = new TableRow()
.set("destination", e.getKey())
.set("responseTime", e.getValue());
c.output(row);
}
}
private static TableSchema createTableSchema(String schema) {
String[] fieldTypePairs = schema.split(",");
List<TableFieldSchema> fields = new ArrayList<TableFieldSchema>();
for(String entry : fieldTypePairs) {
String[] fieldAndType = entry.split(":");
fields.add(new TableFieldSchema().setName(fieldAndType[0]).setType(fieldAndType[1]));
}
return new TableSchema().setFields(fields);
}
public static void main(String[] args) {
// Setup LogAnalyticsPipelineOptions
PipelineOptionsFactory.register(LogAnalyticsPipelineOptions.class);
LogAnalyticsPipelineOptions options = PipelineOptionsFactory
.fromArgs(args)
.withValidation()
.as(LogAnalyticsPipelineOptions.class);
// Set DEBUG log level for all workers
options.setDefaultWorkerLogLevel(DataflowWorkerLoggingOptions.Level.DEBUG);
// Get .properties file containing pipeline options
Properties props = parseConfigFile(options.getConfigFile());
String logRegexPattern = props.getProperty("logRegexPattern");
// Create pipeline
Pipeline p = Pipeline.create(options);
// PCollection from
// - Cloud Storage source
// - Extract individual LogEntry objects from each PubSub CloudLogging message (structPayload.log)
// - Change windowing from Global to 1 day fixed windows
PCollection<LogEntry> homeLogsDaily = p
.apply(TextIO.Read.named("homeLogsRead").from(props.getProperty("homeLogSource")))
.apply(ParDo.named("homeLogsToLogEntry").of(new EmitLogEntryFn(true, logRegexPattern)))
.apply(Window.named("homeLogEntryToDaily").<LogEntry>into(FixedWindows.of(Duration.standardDays(1))));
// PCollection from
// - Cloud Storage source
// - Extract individual LogEntry objects from CloudLogging message (structPayload.log)
// - Change windowing from "all" to 1 day fixed windows
PCollection<LogEntry> browseLogsDaily = p
.apply(TextIO.Read.named("browseLogsRead").from(props.getProperty("browseLogSource")))
.apply(ParDo.named("browseLogsToLogEntry").of(new EmitLogEntryFn(true, logRegexPattern)))
.apply(Window.named("browseLogEntryToDaily").<LogEntry>into(FixedWindows.of(Duration.standardDays(1))));
// PCollection from
// - Cloud Storage source
// - Extract individual LogEntry objects from CloudLogging message (structPayload.log)
// - Change windowing from "all" to 1 day fixed windows
PCollection<LogEntry> locateLogsDaily = p
.apply(TextIO.Read.named("locateLogsRead").from(props.getProperty("locateLogSource")))
.apply(ParDo.named("locateLogsToLogEntry").of(new EmitLogEntryFn(true, logRegexPattern)))
.apply(Window.named("locateLogEntryToDaily").<LogEntry>into(FixedWindows.of(Duration.standardDays(1))));
// Create a single PCollection by flattening three (windowed) PCollections
PCollection<LogEntry> logCollection = PCollectionList
.of(homeLogsDaily)
.and(browseLogsDaily)
.and(locateLogsDaily)
.apply(Flatten.<LogEntry>pCollections());
// Create new PCollection containing LogEntry->TableRow
PCollection<TableRow> logsAsTableRows = logCollection
.apply(ParDo.named("logEntryToTableRow").of(new LogEntryTableRowFn()));
// Output TableRow PCollection into BigQuery
// - Append all writes to existing rows
// - Create the table if it does not exist already
TableSchema allLogsTableSchema = createTableSchema(props.getProperty("allLogsTableSchema"));
logsAsTableRows.apply(BigQueryIO.Write
.named("allLogsToBigQuery")
.to(props.getProperty("allLogsTableName"))
.withSchema(allLogsTableSchema)
.withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND)
.withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED));
// Create new PCollection
// - Contains "destination,responseTime" key-value pairs
// - Used for running simple aggregations
PCollection<KV<String,Double>> destResponseTimeCollection = logCollection
.apply(ParDo.named("logEntryToDestRespTime").of(new DoFn<LogEntry, KV<String, Double>>() {
@Override
public void processElement(ProcessContext processContext) throws Exception {
LogEntry l = processContext.element();
processContext.output(KV.of(l.getDestination(), l.getResponseTime()));
}
}));
// Create new PCollection
// - First, find the max responseTime for each destination
// - Convert the result into PCollection of TableRow
PCollection<TableRow> destMaxRespTimeRows = destResponseTimeCollection
.apply(Combine.<String,Double,Double>perKey(new Max.MaxDoubleFn()))
.apply(ParDo.named("maxRespTimeToTableRow").of(new AggregateTableRowFn()));
// Output destination->maxResponseTime PCollection to BigQuery
TableSchema maxRespTimeTableSchema = createTableSchema(props.getProperty("maxRespTimeTableSchema"));
destMaxRespTimeRows.apply(BigQueryIO.Write
.named("maxRespTimeToBigQuery")
.to(props.getProperty("maxRespTimeTableName"))
.withSchema(maxRespTimeTableSchema)
.withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND)
.withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED));
// Create new PCollection
// - First, compute the mean responseTime for each destination
// - Convert the result into PCollection of TableRow
PCollection<TableRow> destMeanRespTimeRows = destResponseTimeCollection
.apply(Mean.<String,Double>perKey())
.apply(ParDo.named("meanRespTimeToTableRow").of(new AggregateTableRowFn()));
// Output destination->meanResponseTime PCollection to BigQuery
TableSchema meanRespTimeTableSchema = createTableSchema(props.getProperty("meanRespTimeTableSchema"));
destMeanRespTimeRows.apply(BigQueryIO.Write
.named("meanRespTimeToBigQuery")
.to(props.getProperty("meanRespTimeTableName"))
.withSchema(meanRespTimeTableSchema)
.withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND)
.withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED));
PipelineResult r = p.run();
LOG.info(r.toString());
}
}
|
dataflow/src/main/java/com/google/cloud/solutions/LogAnalyticsPipeline.java
|
package com.google.cloud.solutions;
import com.google.api.services.bigquery.model.TableFieldSchema;
import com.google.api.services.bigquery.model.TableRow;
import com.google.api.services.bigquery.model.TableSchema;
import com.google.cloud.dataflow.sdk.Pipeline;
import com.google.cloud.dataflow.sdk.PipelineResult;
import com.google.cloud.dataflow.sdk.io.BigQueryIO;
import com.google.cloud.dataflow.sdk.io.TextIO;
import com.google.cloud.dataflow.sdk.options.DataflowPipelineOptions;
import com.google.cloud.dataflow.sdk.options.DataflowWorkerLoggingOptions;
import com.google.cloud.dataflow.sdk.options.Description;
import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory;
import com.google.cloud.dataflow.sdk.transforms.*;
import com.google.cloud.dataflow.sdk.transforms.windowing.FixedWindows;
import com.google.cloud.dataflow.sdk.transforms.windowing.Window;
import com.google.cloud.dataflow.sdk.values.KV;
import com.google.cloud.dataflow.sdk.values.PCollection;
import com.google.cloud.dataflow.sdk.values.PCollectionList;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class LogAnalyticsPipeline {
private static final Logger LOG = LoggerFactory.getLogger(LogAnalyticsPipeline.class);
@SuppressWarnings("unused")
public interface LogAnalyticsPipelineOptions extends DataflowPipelineOptions {
@Description("Log sources configuration file")
String getConfigFile();
void setConfigFile(String configFile);
}
private static Properties parseConfigFile(String configFileName) {
Properties p = new Properties();
InputStream configFile = null;
try {
LOG.debug("loading properties from " + configFileName);
configFile = new FileInputStream(configFileName);
p.load(configFile);
LOG.debug("loaded " + p.size() + " properties");
}
catch(IOException e) {
e.printStackTrace();
}
finally {
if(configFile != null) {
try {
configFile.close();
}
catch(IOException e) {
e.printStackTrace();
}
}
}
return p;
}
private static class EmitLogEntryFn extends DoFn<String,LogEntry> {
private boolean withTimestamp;
private String regexPattern;
public EmitLogEntryFn(boolean withTimestamp, String regexPattern) {
this.withTimestamp = withTimestamp;
this.regexPattern = regexPattern;
}
@Override
public void processElement(ProcessContext c) {
if(withTimestamp) {
Instant timestamp = getTimestampFromEntry(c.element());
LogEntry logEntry = parseEntry(c.element());
c.outputWithTimestamp(logEntry, timestamp);
}
else {
LogEntry logEntry = parseEntry(c.element());
c.output(logEntry);
}
}
private Instant getTimestampFromEntry(String entry) {
JsonParser parser = new JsonParser();
JsonElement rootElement = parser.parse(entry);
JsonObject metadata = rootElement
.getAsJsonObject()
.getAsJsonObject("metadata");
JsonObject timestamp = metadata.getAsJsonObject("timestamp");
DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
return fmt.parseDateTime(timestamp.getAsString()).toInstant();
}
private LogEntry parseEntry(String entry) {
JsonParser parser = new JsonParser();
JsonElement rootElement = parser.parse(entry);
JsonObject structPayload = rootElement
.getAsJsonObject()
.getAsJsonObject("structPayload");
JsonObject log = structPayload.getAsJsonObject("log");
Pattern p = Pattern.compile(this.regexPattern);
Matcher m = p.matcher(log.getAsString());
DateTimeFormatter fmt = DateTimeFormat.forPattern("yyy/MM/dd - HH:mm:ss");
Instant timestamp = fmt.parseDateTime(m.group("timestamp")).toInstant();
int httpStatusCode = Integer.valueOf(m.group("httpStatusCode"));
double responseTime = 0;
if(m.group("resolution") == "µs") {
responseTime = Double.valueOf(m.group("responseTime")) / 1000000;
}
else if(m.group("resolution") == "ms") {
responseTime = Double.valueOf(m.group("responseTime")) / 1000;
}
String source = m.group("source");
String httpMethod = m.group("httpMethod");
String destination = m.group("destination");
return new LogEntry(timestamp, httpStatusCode, responseTime, source, httpMethod, destination);
}
}
private static class LogEntryTableRowFn extends DoFn<LogEntry, TableRow> {
@Override
public void processElement(ProcessContext c) {
LogEntry e = c.element();
TableRow row = new TableRow()
.set("timestamp", e.getTimestamp().toDateTime())
.set("httpStatusCode", e.getHttpStatusCode())
.set("responseTime", e.getResponseTime())
.set("source", e.getSource())
.set("httpMethod", e.getHttpMethod())
.set("destination", e.getDestination());
c.output(row);
}
}
private static class AggregateTableRowFn extends DoFn<KV<String,Double>, TableRow> {
@Override
public void processElement(ProcessContext c) {
KV<String,Double> e = c.element();
TableRow row = new TableRow()
.set("destination", e.getKey())
.set("responseTime", e.getValue());
c.output(row);
}
}
private static TableSchema createTableSchema(String schema) {
String[] fieldTypePairs = schema.split(",");
List<TableFieldSchema> fields = new ArrayList<TableFieldSchema>();
for(String entry : fieldTypePairs) {
String[] fieldAndType = entry.split(":");
fields.add(new TableFieldSchema().setName(fieldAndType[0]).setType(fieldAndType[1]));
}
return new TableSchema().setFields(fields);
}
public static void main(String[] args) {
// Setup LogAnalyticsPipelineOptions
PipelineOptionsFactory.register(LogAnalyticsPipelineOptions.class);
LogAnalyticsPipelineOptions options = PipelineOptionsFactory
.fromArgs(args)
.withValidation()
.as(LogAnalyticsPipelineOptions.class);
// Set DEBUG log level for all workers
options.setDefaultWorkerLogLevel(DataflowWorkerLoggingOptions.Level.DEBUG);
// Get .properties file containing pipeline options
Properties props = parseConfigFile(options.getConfigFile());
String logRegexPattern = props.getProperty("logRegexPattern");
// Create pipeline
Pipeline p = Pipeline.create(options);
// PCollection from
// - Cloud Storage source
// - Extract individual LogEntry objects from each PubSub CloudLogging message (structPayload.log)
// - Change windowing from Global to 1 day fixed windows
PCollection<LogEntry> homeLogsDaily = p
.apply(TextIO.Read.from(props.getProperty("homeLogSource")))
.apply(ParDo.named("homeLogsToLogEntry").of(new EmitLogEntryFn(true, logRegexPattern)))
.apply(Window.named("homeLogEntryToDaily").<LogEntry>into(FixedWindows.of(Duration.standardDays(1))));
// PCollection from
// - Cloud Storage source
// - Extract individual LogEntry objects from CloudLogging message (structPayload.log)
// - Change windowing from "all" to 1 day fixed windows
PCollection<LogEntry> browseLogsDaily = p
.apply(TextIO.Read.from(props.getProperty("browseLogSource")))
.apply(ParDo.named("browseLogsToLogEntry").of(new EmitLogEntryFn(true, logRegexPattern)))
.apply(Window.named("browseLogEntryToDaily").<LogEntry>into(FixedWindows.of(Duration.standardDays(1))));
// PCollection from
// - Cloud Storage source
// - Extract individual LogEntry objects from CloudLogging message (structPayload.log)
// - Change windowing from "all" to 1 day fixed windows
PCollection<LogEntry> locateLogsDaily = p
.apply(TextIO.Read.from(props.getProperty("locateLogSource")))
.apply(ParDo.named("locateLogsToLogEntry").of(new EmitLogEntryFn(true, logRegexPattern)))
.apply(Window.named("locateLogEntryToDaily").<LogEntry>into(FixedWindows.of(Duration.standardDays(1))));
// Create a single PCollection by flattening three (windowed) PCollections
PCollection<LogEntry> logCollection = PCollectionList
.of(homeLogsDaily)
.and(browseLogsDaily)
.and(locateLogsDaily)
.apply(Flatten.<LogEntry>pCollections());
// Create new PCollection containing LogEntry->TableRow
PCollection<TableRow> logsAsTableRows = logCollection
.apply(ParDo.named("logEntryToTableRow").of(new LogEntryTableRowFn()));
// Output TableRow PCollection into BigQuery
// - Append all writes to existing rows
// - Create the table if it does not exist already
TableSchema allLogsTableSchema = createTableSchema(props.getProperty("allLogsTableSchema"));
logsAsTableRows.apply(BigQueryIO.Write
.named("allLogsToBigQuery")
.to(props.getProperty("allLogsTableName"))
.withSchema(allLogsTableSchema)
.withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND)
.withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED));
// Create new PCollection
// - Contains "destination,responseTime" key-value pairs
// - Used for running simple aggregations
PCollection<KV<String,Double>> destResponseTimeCollection = logCollection
.apply(ParDo.named("logEntryToDestRespTime").of(new DoFn<LogEntry, KV<String, Double>>() {
@Override
public void processElement(ProcessContext processContext) throws Exception {
LogEntry l = processContext.element();
processContext.output(KV.of(l.getDestination(), l.getResponseTime()));
}
}));
// Create new PCollection
// - First, find the max responseTime for each destination
// - Convert the result into PCollection of TableRow
PCollection<TableRow> destMaxRespTimeRows = destResponseTimeCollection
.apply(Combine.<String,Double,Double>perKey(new Max.MaxDoubleFn()))
.apply(ParDo.named("maxRespTimeToTableRow").of(new AggregateTableRowFn()));
// Output destination->maxResponseTime PCollection to BigQuery
TableSchema maxRespTimeTableSchema = createTableSchema(props.getProperty("maxRespTimeTableSchema"));
destMaxRespTimeRows.apply(BigQueryIO.Write
.named("maxRespTimeToBigQuery")
.to(props.getProperty("maxRespTimeTableName"))
.withSchema(maxRespTimeTableSchema)
.withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND)
.withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED));
// Create new PCollection
// - First, compute the mean responseTime for each destination
// - Convert the result into PCollection of TableRow
PCollection<TableRow> destMeanRespTimeRows = destResponseTimeCollection
.apply(Mean.<String,Double>perKey())
.apply(ParDo.named("meanRespTimeToTableRow").of(new AggregateTableRowFn()));
// Output destination->meanResponseTime PCollection to BigQuery
TableSchema meanRespTimeTableSchema = createTableSchema(props.getProperty("meanRespTimeTableSchema"));
destMeanRespTimeRows.apply(BigQueryIO.Write
.named("meanRespTimeToBigQuery")
.to(props.getProperty("meanRespTimeTableName"))
.withSchema(meanRespTimeTableSchema)
.withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND)
.withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED));
PipelineResult r = p.run();
LOG.info(r.toString());
}
}
|
named Read.from() operations
|
dataflow/src/main/java/com/google/cloud/solutions/LogAnalyticsPipeline.java
|
named Read.from() operations
|
|
Java
|
apache-2.0
|
d4183bc0dbd844a691031d6282be1446613fc86d
| 0
|
badvision/acs-aem-commons,Adobe-Consulting-Services/acs-aem-commons,Sivaramvt/acs-aem-commons,SachinMali/acs-aem-commons,npeltier/acs-aem-commons,dherges/acs-aem-commons,TKELuke/acs-aem-commons,npeltier/acs-aem-commons,cpilsworth/acs-aem-commons,dfoerderreuther/acs-aem-commons,steffenrosi/acs-aem-commons,TKELuke/acs-aem-commons,badvision/acs-aem-commons,SachinMali/acs-aem-commons,dherges/acs-aem-commons,SachinMali/acs-aem-commons,MikePfaff/acs-aem-commons,npeltier/acs-aem-commons,cpilsworth/acs-aem-commons,schoudry/acs-aem-commons,cpilsworth/acs-aem-commons,dfoerderreuther/acs-aem-commons,zhishan-summer-2013/acs-aem-commons,TKELuke/acs-aem-commons,bstopp/acs-aem-commons,zhishan-summer-2013/acs-aem-commons,MikePfaff/acs-aem-commons,MikePfaff/acs-aem-commons,zhishan-summer-2013/acs-aem-commons,dfoerderreuther/acs-aem-commons,bstopp/acs-aem-commons,schoudry/acs-aem-commons,cpilsworth/acs-aem-commons,bstopp/acs-aem-commons,steffenrosi/acs-aem-commons,schoudry/acs-aem-commons,bstopp/acs-aem-commons,npeltier/acs-aem-commons,badvision/acs-aem-commons,steffenrosi/acs-aem-commons,badvision/acs-aem-commons,Adobe-Consulting-Services/acs-aem-commons,MikePfaff/acs-aem-commons,SachinMali/acs-aem-commons,TKELuke/acs-aem-commons,Sivaramvt/acs-aem-commons,dlh3/acs-aem-commons,dfoerderreuther/acs-aem-commons,schoudry/acs-aem-commons,steffenrosi/acs-aem-commons,dherges/acs-aem-commons,dlh3/acs-aem-commons,zhishan-summer-2013/acs-aem-commons,Sivaramvt/acs-aem-commons,Sivaramvt/acs-aem-commons,dlh3/acs-aem-commons,Adobe-Consulting-Services/acs-aem-commons,dherges/acs-aem-commons,Adobe-Consulting-Services/acs-aem-commons
|
/*
* #%L
* ACS AEM Commons Bundle
* %%
* Copyright (C) 2013 - 2014 Adobe
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.adobe.acs.commons.logging.impl;
import org.apache.felix.scr.annotations.*;
import org.apache.jackrabbit.util.ISO8601;
import org.apache.sling.commons.json.JSONArray;
import org.apache.sling.commons.json.JSONException;
import org.apache.sling.commons.json.JSONObject;
import org.apache.sling.commons.osgi.PropertiesUtil;
import org.apache.sling.event.EventUtil;
import org.osgi.framework.ServiceRegistration;
import org.osgi.service.component.ComponentContext;
import org.osgi.service.event.Event;
import org.osgi.service.event.EventConstants;
import org.osgi.service.event.EventHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* Logs OSGi Events for any set of topics to an SLF4j Logger Category, as JSON objects.
*/
@Component(metatype = true, configurationFactory = true, policy = ConfigurationPolicy.REQUIRE,
label = "ACS AEM Commons - JSON Event Logger", description = "Logs OSGi Events for any set of topics to an SLF4j Logger Category, as JSON objects.")
@SuppressWarnings("PMD.MoreThanOneLogger")
public class JsonEventLogger implements EventHandler {
/**
* Use this logger for tracing this service instance's own lifecycle.
*/
private static final Logger log = LoggerFactory.getLogger(JsonEventLogger.class);
/** We add this timestamp property to all logged events */
private static final String PROP_TIMESTAMP = "_timestamp";
private static final String DEFAULT_LEVEL = "INFO";
/**
* A simple enum for Slf4j logging levels.
*/
private enum LogLevel {
TRACE, DEBUG, INFO, WARN, ERROR;
public static LogLevel fromProperty(String prop) {
if (prop != null) {
for (LogLevel value : values()) {
if (value.name().equalsIgnoreCase(prop)) {
return value;
}
}
}
return null;
}
}
@Property(label = "Event Topics", unbounded = PropertyUnbounded.ARRAY,
description = "This value lists the topics handled by this logger. The value is a list of strings. If the string ends with a star, all topics in this package and all subpackages match. If the string does not end with a star, this is assumed to define an exact topic.")
private static final String OSGI_TOPICS = EventConstants.EVENT_TOPIC;
@Property(label = "Event Filter", description = "LDAP-style event filter query. Leave blank to log all events to the configured topic or topics.")
private static final String OSGI_FILTER = EventConstants.EVENT_FILTER;
@Property(label = "Logger Name", description = "The Sling SLF4j Logger Name or Category to send the JSON messages to. Leave empty to disable the logger.")
private static final String OSGI_CATEGORY = "event.logger.category";
@Property(label = "Logger Level", value = DEFAULT_LEVEL, options = {
@PropertyOption(name = "TRACE", value = "Trace"),
@PropertyOption(name = "DEBUG", value = "Debug"),
@PropertyOption(name = "INFO", value = "Information"),
@PropertyOption(name = "WARN", value = "Warnings"),
@PropertyOption(name = "ERROR", value = "Error")
}, description = "Select the logging level the messages should be sent with.")
private static final String OSGI_LEVEL = "event.logger.level";
private String[] topics;
private String filter;
private String category;
private String level;
private boolean valid;
/**
* Suppress the PMD.LoggerIsNotStaticFinal check because the point is to have an
* SCR-configurable logger separate from the normal class-level log object defined
* above.
*/
@SuppressWarnings("PMD.LoggerIsNotStaticFinal")
private Logger eventLogger;
private LogLevel logLevel;
private ServiceRegistration registration;
/**
* Writes an event to the configured logger using the configured log level
* @param event an OSGi Event
*/
private void logEvent(Event event) {
log.trace("[logEvent] event={}", event);
try {
String message = constructMessage(event);
if (logLevel == LogLevel.ERROR) {
this.eventLogger.error(message);
} else if (logLevel == LogLevel.WARN) {
this.eventLogger.warn(message);
} else if (logLevel == LogLevel.INFO) {
this.eventLogger.info(message);
} else if (logLevel == LogLevel.DEBUG) {
this.eventLogger.debug(message);
} else if (logLevel == LogLevel.TRACE) {
this.eventLogger.trace(message);
}
} catch (JSONException e) {
log.error("[logEvent] failed to construct log message from event: " + event.toString(), e);
}
}
/**
* Determines if the logger category is enabled at the configured level
* @return true if the logger is enabled at the configured log level
*/
private boolean isLoggerEnabled() {
if (this.eventLogger != null && this.logLevel != null) {
if (logLevel == LogLevel.ERROR) {
return this.eventLogger.isErrorEnabled();
} else if (logLevel == LogLevel.WARN) {
return this.eventLogger.isWarnEnabled();
} else if (logLevel == LogLevel.INFO) {
return this.eventLogger.isInfoEnabled();
} else if (logLevel == LogLevel.DEBUG) {
return this.eventLogger.isDebugEnabled();
} else if (logLevel == LogLevel.TRACE) {
return this.eventLogger.isTraceEnabled();
}
}
return false;
}
/**
* Serializes an OSGi {@link org.osgi.service.event.Event} into a JSON object string
*
* @param event the event to be serialized as
* @return a serialized JSON object
* @throws org.apache.sling.commons.json.JSONException
*/
protected static String constructMessage(Event event) throws JSONException {
JSONObject obj = new JSONObject();
for (String prop : event.getPropertyNames()) {
Object val = event.getProperty(prop);
Object converted = convertValue(val);
obj.put(prop, converted == null ? val : converted);
}
obj.put(PROP_TIMESTAMP, ISO8601.format(Calendar.getInstance()));
return obj.toString();
}
/**
* Converts individual java objects to JSONObjects using reflection and recursion
* @param val an untyped Java object to try to convert
* @return {@code val} if not handled, or return a converted JSONObject, JSONArray, or String
* @throws JSONException
*/
@SuppressWarnings("unchecked")
protected static Object convertValue(Object val) throws JSONException {
if (val.getClass().isArray()) {
Object[] vals = (Object[]) val;
JSONArray array = new JSONArray();
for (Object arrayVal : vals) {
Object converted = convertValue(arrayVal);
array.put(converted == null ? arrayVal : converted);
}
return array;
} else if (val instanceof Collection) {
JSONArray array = new JSONArray();
for (Object arrayVal : (Collection<?>) val) {
Object converted = convertValue(arrayVal);
array.put(converted == null ? arrayVal : converted);
}
return array;
} else if (val instanceof Map) {
Map<?, ?> valMap = (Map<?, ?>) val;
JSONObject obj = new JSONObject();
if (valMap.isEmpty()) {
return obj;
} else if (valMap.keySet().iterator().next() instanceof String) {
for (Map.Entry<String, ?> entry : ((Map<String, ?>) valMap).entrySet()) {
Object converted = convertValue(entry.getValue());
obj.put(entry.getKey(), converted == null ? entry.getValue() : converted);
}
} else {
for (Map.Entry<?, ?> entry : valMap.entrySet()) {
Object converted = convertValue(entry.getValue());
obj.put(entry.getKey().toString(),
converted == null ? entry.getValue() : converted);
}
}
return obj;
} else if (val instanceof Calendar) {
try {
return ISO8601.format((Calendar) val);
} catch (IllegalArgumentException e) {
log.debug("[constructMessage] failed to convert Calendar to ISO8601 String: {}, {}", e.getMessage(), val);
}
} else if (val instanceof Date) {
try {
Calendar calendar = Calendar.getInstance();
calendar.setTime((Date) val);
return ISO8601.format(calendar);
} catch (IllegalArgumentException e) {
log.debug("[constructMessage] failed to convert Date to ISO8601 String: {}, {}", e.getMessage(), val);
}
}
return val;
}
//
// ---------------------------------------------------------< EventHandler methods >-----
//
/**
* {@inheritDoc}
*/
@Override
public void handleEvent(Event event) {
if (EventUtil.isLocal(event) && this.isLoggerEnabled()) {
logEvent(event);
}
}
//
// ---------------------------------------------------------< SCR methods >-------------
//
@Activate
protected void activate(ComponentContext ctx) {
log.trace("[activate] entered activate method.");
Dictionary<?, ?> props = ctx.getProperties();
this.topics = PropertiesUtil.toStringArray(props.get(OSGI_TOPICS));
this.filter = PropertiesUtil.toString(props.get(OSGI_FILTER), "").trim();
this.category = PropertiesUtil.toString(props.get(OSGI_CATEGORY), "").trim();
this.level = PropertiesUtil.toString(props.get(OSGI_LEVEL), DEFAULT_LEVEL);
this.logLevel = LogLevel.fromProperty(this.level);
this.valid = (this.topics != null && this.topics.length > 0 && !this.category.isEmpty());
if (this.valid) {
this.eventLogger = LoggerFactory.getLogger(this.category);
Dictionary<String, Object> registrationProps = new Hashtable<String, Object>();
registrationProps.put(EventConstants.EVENT_TOPIC, this.topics);
if (!this.filter.isEmpty()) {
registrationProps.put(EventConstants.EVENT_FILTER, this.filter);
}
this.registration = ctx.getBundleContext().registerService(EventHandler.class.getName(), this, registrationProps);
} else {
log.warn("Not registering invalid event handler. Check configuration.");
}
log.debug("[activate] logger state: {}", this);
}
@Deactivate
protected void deactivate() {
log.trace("[deactivate] entered deactivate method.");
if (this.registration != null) {
this.registration.unregister();
this.registration = null;
}
this.eventLogger = null;
this.logLevel = null;
}
//
// ---------------------------------------------------------< Object methods >-------------
//
@Override
public String toString() {
return "EventLogger{" +
"valid=" + valid +
", topics=" + Arrays.toString(topics) +
", filter='" + filter + '\'' +
", category='" + category + '\'' +
", level='" + level + '\'' +
", enabled=" + isLoggerEnabled() +
'}';
}
}
|
bundle/src/main/java/com/adobe/acs/commons/logging/impl/JsonEventLogger.java
|
/*
* #%L
* ACS AEM Commons Bundle
* %%
* Copyright (C) 2013 - 2014 Adobe
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.adobe.acs.commons.logging.impl;
import org.apache.felix.scr.annotations.*;
import org.apache.jackrabbit.util.ISO8601;
import org.apache.sling.commons.json.JSONArray;
import org.apache.sling.commons.json.JSONException;
import org.apache.sling.commons.json.JSONObject;
import org.apache.sling.commons.osgi.PropertiesUtil;
import org.apache.sling.event.EventUtil;
import org.osgi.framework.ServiceRegistration;
import org.osgi.service.component.ComponentContext;
import org.osgi.service.event.Event;
import org.osgi.service.event.EventConstants;
import org.osgi.service.event.EventHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* Logs OSGi Events for any set of topics to an SLF4j Logger Category, as JSON objects.
*/
@Component(metatype = true, configurationFactory = true, policy = ConfigurationPolicy.REQUIRE,
label = "ACS AEM Commons - JSON Event Logger", description = "Logs OSGi Events for any set of topics to an SLF4j Logger Category, as JSON objects.")
public class JsonEventLogger implements EventHandler {
/**
* Use this logger for tracing this service instance's own lifecycle.
*/
private static final Logger log = LoggerFactory.getLogger(JsonEventLogger.class);
/** We add this timestamp property to all logged events */
private static final String PROP_TIMESTAMP = "_timestamp";
private static final String DEFAULT_LEVEL = "INFO";
/**
* A simple enum for Slf4j logging levels.
*/
private enum LogLevel {
TRACE, DEBUG, INFO, WARN, ERROR;
public static LogLevel fromProperty(String prop) {
if (prop != null) {
for (LogLevel value : values()) {
if (value.name().equalsIgnoreCase(prop)) {
return value;
}
}
}
return null;
}
}
@Property(label = "Event Topics", unbounded = PropertyUnbounded.ARRAY,
description = "This value lists the topics handled by this logger. The value is a list of strings. If the string ends with a star, all topics in this package and all subpackages match. If the string does not end with a star, this is assumed to define an exact topic.")
private static final String OSGI_TOPICS = EventConstants.EVENT_TOPIC;
@Property(label = "Event Filter", description = "LDAP-style event filter query. Leave blank to log all events to the configured topic or topics.")
private static final String OSGI_FILTER = EventConstants.EVENT_FILTER;
@Property(label = "Logger Name", description = "The Sling SLF4j Logger Name or Category to send the JSON messages to. Leave empty to disable the logger.")
private static final String OSGI_CATEGORY = "event.logger.category";
@Property(label = "Logger Level", value = DEFAULT_LEVEL, options = {
@PropertyOption(name = "TRACE", value = "Trace"),
@PropertyOption(name = "DEBUG", value = "Debug"),
@PropertyOption(name = "INFO", value = "Information"),
@PropertyOption(name = "WARN", value = "Warnings"),
@PropertyOption(name = "ERROR", value = "Error")
}, description = "Select the logging level the messages should be sent with.")
private static final String OSGI_LEVEL = "event.logger.level";
private String[] topics;
private String filter;
private String category;
private String level;
private boolean valid;
/**
* Suppress the PMD.LoggerIsNotStaticFinal check because the point is to have an
* SCR-configurable logger separate from the normal class-level log object defined
* above.
*/
@SuppressWarnings("PMD.LoggerIsNotStaticFinal")
private Logger eventLogger;
private LogLevel logLevel;
private ServiceRegistration registration;
/**
* Writes an event to the configured logger using the configured log level
* @param event an OSGi Event
*/
private void logEvent(Event event) {
log.trace("[logEvent] event={}", event);
try {
String message = constructMessage(event);
if (logLevel == LogLevel.ERROR) {
this.eventLogger.error(message);
} else if (logLevel == LogLevel.WARN) {
this.eventLogger.warn(message);
} else if (logLevel == LogLevel.INFO) {
this.eventLogger.info(message);
} else if (logLevel == LogLevel.DEBUG) {
this.eventLogger.debug(message);
} else if (logLevel == LogLevel.TRACE) {
this.eventLogger.trace(message);
}
} catch (JSONException e) {
log.error("[logEvent] failed to construct log message from event: " + event.toString(), e);
}
}
/**
* Determines if the logger category is enabled at the configured level
* @return true if the logger is enabled at the configured log level
*/
private boolean isLoggerEnabled() {
if (this.eventLogger != null && this.logLevel != null) {
if (logLevel == LogLevel.ERROR) {
return this.eventLogger.isErrorEnabled();
} else if (logLevel == LogLevel.WARN) {
return this.eventLogger.isWarnEnabled();
} else if (logLevel == LogLevel.INFO) {
return this.eventLogger.isInfoEnabled();
} else if (logLevel == LogLevel.DEBUG) {
return this.eventLogger.isDebugEnabled();
} else if (logLevel == LogLevel.TRACE) {
return this.eventLogger.isTraceEnabled();
}
}
return false;
}
/**
* Serializes an OSGi {@link org.osgi.service.event.Event} into a JSON object string
*
* @param event the event to be serialized as
* @return a serialized JSON object
* @throws org.apache.sling.commons.json.JSONException
*/
protected static String constructMessage(Event event) throws JSONException {
JSONObject obj = new JSONObject();
for (String prop : event.getPropertyNames()) {
Object val = event.getProperty(prop);
Object converted = convertValue(val);
obj.put(prop, converted == null ? val : converted);
}
obj.put(PROP_TIMESTAMP, ISO8601.format(Calendar.getInstance()));
return obj.toString();
}
/**
* Converts individual java objects to JSONObjects using reflection and recursion
* @param val an untyped Java object to try to convert
* @return {@code val} if not handled, or return a converted JSONObject, JSONArray, or String
* @throws JSONException
*/
@SuppressWarnings("unchecked")
protected static Object convertValue(Object val) throws JSONException {
if (val.getClass().isArray()) {
Object[] vals = (Object[]) val;
JSONArray array = new JSONArray();
for (Object arrayVal : vals) {
Object converted = convertValue(arrayVal);
array.put(converted == null ? arrayVal : converted);
}
return array;
} else if (val instanceof Collection) {
JSONArray array = new JSONArray();
for (Object arrayVal : (Collection<?>) val) {
Object converted = convertValue(arrayVal);
array.put(converted == null ? arrayVal : converted);
}
return array;
} else if (val instanceof Map) {
Map<?, ?> valMap = (Map<?, ?>) val;
JSONObject obj = new JSONObject();
if (valMap.isEmpty()) {
return obj;
} else if (valMap.keySet().iterator().next() instanceof String) {
for (Map.Entry<String, ?> entry : ((Map<String, ?>) valMap).entrySet()) {
Object converted = convertValue(entry.getValue());
obj.put(entry.getKey(), converted == null ? entry.getValue() : converted);
}
} else {
for (Map.Entry<?, ?> entry : valMap.entrySet()) {
Object converted = convertValue(entry.getValue());
obj.put(entry.getKey().toString(),
converted == null ? entry.getValue() : converted);
}
}
return obj;
} else if (val instanceof Calendar) {
try {
return ISO8601.format((Calendar) val);
} catch (IllegalArgumentException e) {
log.debug("[constructMessage] failed to convert Calendar to ISO8601 String: {}, {}", e.getMessage(), val);
}
} else if (val instanceof Date) {
try {
Calendar calendar = Calendar.getInstance();
calendar.setTime((Date) val);
return ISO8601.format(calendar);
} catch (IllegalArgumentException e) {
log.debug("[constructMessage] failed to convert Date to ISO8601 String: {}, {}", e.getMessage(), val);
}
}
return val;
}
//
// ---------------------------------------------------------< EventHandler methods >-----
//
/**
* {@inheritDoc}
*/
@Override
public void handleEvent(Event event) {
if (EventUtil.isLocal(event) && this.isLoggerEnabled()) {
logEvent(event);
}
}
//
// ---------------------------------------------------------< SCR methods >-------------
//
@Activate
protected void activate(ComponentContext ctx) {
log.trace("[activate] entered activate method.");
Dictionary<?, ?> props = ctx.getProperties();
this.topics = PropertiesUtil.toStringArray(props.get(OSGI_TOPICS));
this.filter = PropertiesUtil.toString(props.get(OSGI_FILTER), "").trim();
this.category = PropertiesUtil.toString(props.get(OSGI_CATEGORY), "").trim();
this.level = PropertiesUtil.toString(props.get(OSGI_LEVEL), DEFAULT_LEVEL);
this.logLevel = LogLevel.fromProperty(this.level);
this.valid = (this.topics != null && this.topics.length > 0 && !this.category.isEmpty());
if (this.valid) {
this.eventLogger = LoggerFactory.getLogger(this.category);
Dictionary<String, Object> registrationProps = new Hashtable<String, Object>();
registrationProps.put(EventConstants.EVENT_TOPIC, this.topics);
if (!this.filter.isEmpty()) {
registrationProps.put(EventConstants.EVENT_FILTER, this.filter);
}
this.registration = ctx.getBundleContext().registerService(EventHandler.class.getName(), this, registrationProps);
} else {
log.warn("Not registering invalid event handler. Check configuration.");
}
log.debug("[activate] logger state: {}", this);
}
@Deactivate
protected void deactivate() {
log.trace("[deactivate] entered deactivate method.");
if (this.registration != null) {
this.registration.unregister();
this.registration = null;
}
this.eventLogger = null;
this.logLevel = null;
}
//
// ---------------------------------------------------------< Object methods >-------------
//
@Override
public String toString() {
return "EventLogger{" +
"valid=" + valid +
", topics=" + Arrays.toString(topics) +
", filter='" + filter + '\'' +
", category='" + category + '\'' +
", level='" + level + '\'' +
", enabled=" + isLoggerEnabled() +
'}';
}
}
|
added a @SuppressWarnings("PMD.MoreThanOneLogger") annotation to JsonEventLogger to more properly address the fact that we specifically want more than one logger.
|
bundle/src/main/java/com/adobe/acs/commons/logging/impl/JsonEventLogger.java
|
added a @SuppressWarnings("PMD.MoreThanOneLogger") annotation to JsonEventLogger to more properly address the fact that we specifically want more than one logger.
|
|
Java
|
apache-2.0
|
27a3a8a3946df91bc2fb1bbe89a16667394088eb
| 0
|
pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus
|
/**
* Copyright 2007-2017 University Of Southern California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.isi.pegasus.planner.cluster.aggregator;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import edu.isi.pegasus.common.logging.LogManager;
import edu.isi.pegasus.planner.classes.ADag;
import edu.isi.pegasus.planner.classes.AggregatedJob;
import edu.isi.pegasus.planner.classes.Job;
import edu.isi.pegasus.planner.classes.PegasusBag;
import edu.isi.pegasus.planner.code.gridstart.PegasusLite;
import edu.isi.pegasus.planner.namespace.Globus;
import edu.isi.pegasus.planner.namespace.Pegasus;
import edu.isi.pegasus.planner.partitioner.graph.Graph;
import edu.isi.pegasus.planner.partitioner.graph.GraphNode;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.Iterator;
/**
* Aggergates jobs together, to be launched by pegasus-aws-batch tool.
*
* @author Karan Vahi vahi@isi.edu
* @version $Revision$
*/
public class AWSBatch extends Abstract {
/**
* The logical name of the transformation that is able to run multiple
* jobs via pegasus-aws-batch.
*/
public static final String COLLAPSE_LOGICAL_NAME = "aws-batch";
/**
* The basename of the executable that is able to run multiple
* jobs via mpi.
*/
public static String EXECUTABLE_BASENAME = "pegasus-aws-batch";
//the common files required for the pegasus-aws-batch
/**
* The basename of the script used to launch jobs in the AWS Batch via
* the fetch and run example
*/
public static final String PEGASUS_AWS_BATCH_LAUNCH_BASENAME = "pegasus-aws-batch-launch.sh";
/**
* The basename of the script used to launch jobs in the AWS Batch via
* the fetch and run example
*/
public static final String PEGASUS_LITE_COMMON_FILE_BASENAME = PegasusLite.PEGASUS_LITE_COMMON_FILE_BASENAME;
/**
* The environment variable that designates the key used by fetch_and_run.sh
* executable in batch containers
*/
public static final String BATCH_FILE_TYPE_KEY = "BATCH_FILE_TYPE";
/**
* The environment variable that designates the key used by fetch_and_run.sh
* executable for batch containers to pull the user script from s3
*/
public static final String BATCH_FILE_S3_URL_KEY = "BATCH_FILE_S3_URL";
/**
* The environment variable that designates the key for AWS Batch s3 bucket
*/
public static String PEGASUS_AWS_BATCH_BUCKET_KEY = "PEGASUS_AWS_BATCH_BUCKET";
/**
* The default constructor.
*/
public AWSBatch(){
super();
}
/**
*Initializes the JobAggregator impelementation
*
* @param dag the workflow that is being clustered.
* @param bag the bag of objects that is useful for initialization.
*
*
*/
public void initialize( ADag dag , PegasusBag bag ){
super.initialize(dag, bag);
}
/**
* Enables the abstract clustered job for execution and converts it to it's
* executable form. Also associates the post script that should be invoked
* for the AggregatedJob
*
* @param job the abstract clustered job
*/
public void makeAbstractAggregatedJobConcrete( AggregatedJob job ){
//PM-962 for PMC aggregated values for runtime and memory don't get mapped
//to the PMC job itself
super.makeAbstractAggregatedJobConcrete(job);
return;
}
/**
* Writes out the input file for the aggregated job
*
* @param job the aggregated job
*
* @return shareSHDirectoryPath to the input file
*/
protected File writeOutInputFileForJobAggregator(AggregatedJob job) {
File dir = new File( this.mDirectory, job.getRelativeSubmitDirectory() );
return this.generateAWSBatchInputFile(job, new File( dir, job.getID() + ".in"), true );
}
/**
* Writes out the input file for the aggregated job
*
* @param job the aggregated job
* @param stdin the name of input file for batch to be generated
* @param isClustered a boolean indicating whether the graph belongs to a
* clustered job or not.
*
* @return shareSHDirectoryPath to the input file
*/
public File generateAWSBatchInputFile(Graph job, File stdIn , boolean isClustered ) {
try {
Writer writer;
writer = new BufferedWriter(new FileWriter( stdIn ));
JsonFactory factory = new JsonFactory();
JsonGenerator generator = factory.createGenerator( writer );
generator.setPrettyPrinter(new DefaultPrettyPrinter());
//traverse throught the jobs to determine input/output files
//and merge the profiles for the jobs
int taskid = 1;
/*
"jobDefinition": "XXXX",
"jobName": "pegasus-test-job-1",
"jobQueue": "XXXX",
"executable": "pegasus-aws-batch-launch.sh" ,
"arguments": "sample_pegasus_lite.sh",
"environment": [
{
"name": "BATCH_FILE_TYPE",
"value": "script"
},
{
"name": "BATCH_FILE_S3_URL",
"value": "s3://karan-cmd-test-bucket/pegasus-aws-batch-launch.sh"
},
{
"name": "TRANSFER_INPUT_FILES",
"value": "./sample_pegasus_lite.sh"
}
]
*/
generator.writeStartObject();
generator.writeArrayFieldStart( "SubmitJob" );
for( Iterator<GraphNode> it = job.nodeIterator(); it.hasNext(); taskid++ ) {
GraphNode node = it.next();
Job constitutentJob = (Job) node.getContent();
//handle stdin
if( constitutentJob instanceof AggregatedJob ){
//slurp in contents of it's stdin
throw new RuntimeException( "Clustering of clustered jobs not supported with " + AWSBatch.COLLAPSE_LOGICAL_NAME );
}
generator.writeStartObject();
generator.writeStringField( "jobName", constitutentJob.getID() );
generator.writeStringField( "executable", constitutentJob.getRemoteExecutable());
generator.writeStringField( "arguments", constitutentJob.getArguments());
if( !constitutentJob.envVariables.isEmpty() ){
generator.writeArrayFieldStart( "environment" );
for( Iterator<String> envIT = constitutentJob.envVariables.getProfileKeyIterator(); envIT.hasNext();){
String key = envIT.next();
generator.writeStartObject();
generator.writeStringField( key, (String)constitutentJob.envVariables.get(key) );
generator.writeEndObject();
}
generator.writeEndArray();
}
generator.writeEndObject();
}
generator.writeEndArray();
generator.writeEndObject();
generator.close();
}
catch(IOException e){
mLogger.log("While writing the stdIn file " + e.getMessage(),
LogManager.ERROR_MESSAGE_LEVEL);
throw new RuntimeException( "While writing the stdIn file " + stdIn, e );
}
return stdIn;
}
/**
* Returns the logical name of the transformation that is used to
* collapse the jobs.
*
* @return the the logical name of the collapser executable.
* @see #COLLAPSE_LOGICAL_NAME
*/
public String getClusterExecutableLFN(){
return COLLAPSE_LOGICAL_NAME;
}
/**
* Returns the executable basename of the clustering executable used.
*
* @return the executable basename.
* @see #EXECUTABLE_BASENAME
*/
public String getClusterExecutableBasename(){
return AWSBatch.EXECUTABLE_BASENAME;
}
/**
* Determines whether there is NOT an entry in the transformation catalog
* for the job aggregator executable on a particular site.
*
* @param site the site at which existence check is required.
*
* @return boolean true if an entry does not exists, false otherwise.
*/
public boolean entryNotInTC(String site) {
//batch always runs in site "local"
return this.entryNotInTC( AWSBatch.TRANSFORMATION_NAMESPACE,
AWSBatch.COLLAPSE_LOGICAL_NAME,
AWSBatch.TRANSFORMATION_VERSION,
this.getClusterExecutableBasename(),
"local");
}
/**
* Returns the arguments with which the <code>AggregatedJob</code>
* needs to be invoked with. At present any empty argument string is
* returned.
*
* @param job the <code>AggregatedJob</code> for which the arguments have
* to be constructed.
*
* @return argument string
*/
public String aggregatedJobArguments( AggregatedJob job ){
//the stdin of the job actually needs to be passed as arguments
File jobSubmitDir = new File(this.mDirectory, job.getRelativeSubmitDirectory() );
String stdin = jobSubmitDir + File.separator + job.getStdIn();
StringBuffer args = new StringBuffer();
//add --max-wall-time option PM-625
String walltime = (String) job.globusRSL.get( Globus.MAX_WALLTIME_KEY );
int divisor = 1;
if( walltime == null ){
//PM-962 fall back on pegasus profile runtime key which is in seconds
walltime = job.vdsNS.getStringValue( Pegasus.RUNTIME_KEY );
if( walltime != null ){
divisor = 60;
}
}
args.append( "--conf" ).append( " " ).
append( mProps.getPropertiesInSubmitDirectory( ) ).
append( " " );
//we log to a file based on jobname
args.append( "--log-file" ).append( " " ).append( jobSubmitDir).append( File.separator).append( job.getID() + ".log" ).append( " " );
//the job name is the prefix for the time being
args.append( "--prefix" ).append( " " ).append( job.getID() ).append( " " );
//the S3 bucket to use is picked up from the environment
String bucket = (String) job.envVariables.get( AWSBatch.PEGASUS_AWS_BATCH_BUCKET_KEY );
if( bucket == null ){
throw new RuntimeException( "Clustered job not associated with S3 bucket for AWS Batch " + job.getID() );
}
args.append( "--s3" ).append( " " ).append( bucket ).append( " " );
//add any files to be transferred from submit host
args.append( "--files" ).append( " " );
//check any credentials have to be transferred
String files = job.condorVariables.getIPFilesForTransfer();
job.condorVariables.removeIPFilesForTransfer();
if( files != null ){
args.append( files );
if( !files.endsWith( ",") ){
args.append( "," );
}
}
//add the --files option to transfer the common shell scripts required
StringBuilder shareSHDirectoryPath = new StringBuilder();
File share = mProps.getSharedDir();
if( share == null ){
throw new RuntimeException( "Property for Pegasus share directory is not set" );
}
shareSHDirectoryPath.append( share.getAbsolutePath() ).append( File.separator ).
append( "sh" ).append( File.separator );
args.append( shareSHDirectoryPath ).append( AWSBatch.PEGASUS_LITE_COMMON_FILE_BASENAME ).append( "," ).
append( shareSHDirectoryPath ).append( AWSBatch.PEGASUS_AWS_BATCH_LAUNCH_BASENAME ).append( " " );
// Construct any extra arguments specified in profiles or properties
// This should go last, otherwise we can't override any automatically-
// generated arguments
String extraArgs = job.vdsNS.getStringValue(Pegasus.CLUSTER_ARGUMENTS);
if (extraArgs != null) {
args.append(extraArgs).append(" ");
}
args.append( stdin );
return args.toString();
}
/**
* Setter method to indicate , failure on first consitutent job should
* result in the abort of the whole aggregated job. Ignores any value
* passed, as AWSBatch does not handle it for time being.
*
* @param fail indicates whether to abort or not .
*/
public void setAbortOnFirstJobFailure( boolean fail){
}
/**
* Returns a boolean indicating whether to fail the aggregated job on
* detecting the first failure during execution of constituent jobs.
*
* @return boolean indicating whether to fail or not.
*/
public boolean abortOnFristJobFailure(){
return false;
}
/**
* A boolean indicating whether ordering is important while traversing
* through the aggregated job.
*
* @return false
*/
public boolean topologicalOrderingRequired(){
//ordering is not important, as PMC has a graph representation
return false;
}
/**
* Looks at the profile keys associated with the job to generate the argument
* string fragment containing the cpu required for the job.
*
* @param job the Job for which memory requirements has to be determined.
*
* @return the arguments fragment else empty string
*/
public String getCPURequirementsArgument( Job job ){
StringBuffer result = new StringBuffer();
String value = job.vdsNS.getStringValue( Pegasus.PMC_REQUEST_CPUS_KEY );
if( value == null ){
value = job.vdsNS.getStringValue( Pegasus.CORES_KEY );
}
if( value != null ){
int cpus = -1;
//sanity check on the value
try{
cpus = Integer.parseInt( value );
}
catch( Exception e ){
/* ignore */
}
if ( cpus < 0 ){
//throw an error for negative value
complain( "Invalid Value specified for cpu count ", job.getID(), value );
}
//add only if we have a +ve memory value
if( cpus > 0 ){
result.append( "-c ").append( cpus ).append( " " );
}
}
return result.toString();
}
/**
* Looks at the profile keys associated with the job to generate the argument
* string fragment containing the memory required for the job.
*
* @param job the Job for which memory requirements has to be determined.
*
* @return the arguments fragment else empty string
*/
public String getMemoryRequirementsArgument( Job job ){
StringBuffer result = new StringBuffer();
String value = job.vdsNS.getStringValue( Pegasus.PMC_REQUEST_MEMORY_KEY );
//default to memory parameter. both profiles are in MB
if( value == null ){
value = job.vdsNS.getStringValue( Pegasus.MEMORY_KEY );
}
if( value != null ){
double memory = -1;
//sanity check on the value
try{
memory = Double.parseDouble( value );
}
catch( Exception e ){
/* ignore */
}
if ( memory < 0 ){
//throw an error for negative value
complain( "Invalid Value specified for memory ", job.getID(), value );
}
//add only if we have a +ve memory value
if( memory > 0 ){
result.append( "-m ").append( (long)memory ).append( " " );
}
}
return result.toString();
}
/**
* Looks at the profile keys associated with the job to generate the argument
* string fragment containing the priority to be associated for the job.
* Negative values are allowed
*
* @param job the Job for which memory requirements has to be determined.
*
* @return the arguments fragment else empty string
*/
public String getPriorityArgument( Job job ){
StringBuffer result = new StringBuffer();
String value = job.vdsNS.getStringValue( Pegasus.PMC_PRIORITY_KEY );
if( value != null ){
int priority = 0;
//sanity check on the value
try{
priority = Integer.parseInt( value );
}
catch( Exception e ){
//throw an error for invalid value
complain( "Invalid Value specified for job priority ", job.getID(), value );
}
//=ve values are allowed priorities
result.append( "-p ").append( priority ).append( " " );
}
return result.toString();
}
/**
* Looks at the profile key for pegasus::pmc_task_arguments to determine if extra
* arguments are required for the task.
*
* @param job the constitunt job for which extra arguments need to be determined
* @return the arguments if they are present, or an empty string
*/
public String getExtraArguments( Job job ) {
String extra = job.vdsNS.getStringValue(Pegasus.PMC_TASK_ARGUMENTS);
if (extra == null) {
return "";
}
return extra + " ";
}
/**
* Complains for invalid values passed in profiles
*
* @param message the string describing the error message
* @param id id of the job
* @param value value of the CPU passed
*/
private void complain( String message, String id, String value) {
StringBuffer sb = new StringBuffer();
sb.append( message ).append( value ).append( " for job " )
.append( id );
throw new RuntimeException( sb.toString() );
}
}
|
src/edu/isi/pegasus/planner/cluster/aggregator/AWSBatch.java
|
/**
* Copyright 2007-2017 University Of Southern California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.isi.pegasus.planner.cluster.aggregator;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
import edu.isi.pegasus.common.logging.LogManager;
import edu.isi.pegasus.planner.classes.ADag;
import edu.isi.pegasus.planner.classes.AggregatedJob;
import edu.isi.pegasus.planner.classes.Job;
import edu.isi.pegasus.planner.classes.PegasusBag;
import edu.isi.pegasus.planner.code.gridstart.PegasusLite;
import edu.isi.pegasus.planner.namespace.Globus;
import edu.isi.pegasus.planner.namespace.Pegasus;
import edu.isi.pegasus.planner.partitioner.graph.Graph;
import edu.isi.pegasus.planner.partitioner.graph.GraphNode;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.Iterator;
/**
* Aggergates jobs together, to be launched by pegasus-aws-batch tool.
*
* @author Karan Vahi vahi@isi.edu
* @version $Revision$
*/
public class AWSBatch extends Abstract {
/**
* The logical name of the transformation that is able to run multiple
* jobs via pegasus-aws-batch.
*/
public static final String COLLAPSE_LOGICAL_NAME = "aws-batch";
/**
* The basename of the executable that is able to run multiple
* jobs via mpi.
*/
public static String EXECUTABLE_BASENAME = "pegasus-aws-batch";
//the common files required for the pegasus-aws-batch
/**
* The basename of the script used to launch jobs in the AWS Batch via
* the fetch and run example
*/
public static final String PEGASUS_AWS_BATCH_LAUNCH_BASENAME = "pegasus-aws-batch-launch.sh";
/**
* The basename of the script used to launch jobs in the AWS Batch via
* the fetch and run example
*/
public static final String PEGASUS_LITE_COMMON_FILE_BASENAME = PegasusLite.PEGASUS_LITE_COMMON_FILE_BASENAME;
/**
* The environment variable that designates the key used by fetch_and_run.sh
* executable in batch containers
*/
public static final String BATCH_FILE_TYPE_KEY = "BATCH_FILE_TYPE";
/**
* The environment variable that designates the key used by fetch_and_run.sh
* executable for batch containers to pull the user script from s3
*/
public static final String BATCH_FILE_S3_URL_KEY = "BATCH_FILE_S3_URL";
/**
* The environment variable that designates the key for AWS Batch s3 bucket
*/
public static String PEGASUS_AWS_BATCH_BUCKET_KEY = "PEGASUS_AWS_BATCH_BUCKET";
/**
* The default constructor.
*/
public AWSBatch(){
super();
}
/**
*Initializes the JobAggregator impelementation
*
* @param dag the workflow that is being clustered.
* @param bag the bag of objects that is useful for initialization.
*
*
*/
public void initialize( ADag dag , PegasusBag bag ){
super.initialize(dag, bag);
}
/**
* Enables the abstract clustered job for execution and converts it to it's
* executable form. Also associates the post script that should be invoked
* for the AggregatedJob
*
* @param job the abstract clustered job
*/
public void makeAbstractAggregatedJobConcrete( AggregatedJob job ){
//PM-962 for PMC aggregated values for runtime and memory don't get mapped
//to the PMC job itself
super.makeAbstractAggregatedJobConcrete(job);
return;
}
/**
* Writes out the input file for the aggregated job
*
* @param job the aggregated job
*
* @return shareSHDirectoryPath to the input file
*/
protected File writeOutInputFileForJobAggregator(AggregatedJob job) {
File dir = new File( this.mDirectory, job.getRelativeSubmitDirectory() );
return this.generateAWSBatchInputFile(job, new File( dir, job.getID() + ".in"), true );
}
/**
* Writes out the input file for the aggregated job
*
* @param job the aggregated job
* @param stdin the name of input file for batch to be generated
* @param isClustered a boolean indicating whether the graph belongs to a
* clustered job or not.
*
* @return shareSHDirectoryPath to the input file
*/
public File generateAWSBatchInputFile(Graph job, File stdIn , boolean isClustered ) {
try {
Writer writer;
writer = new BufferedWriter(new FileWriter( stdIn ));
JsonFactory factory = new JsonFactory();
JsonGenerator generator = factory.createGenerator( writer );
generator.setPrettyPrinter(new DefaultPrettyPrinter());
//traverse throught the jobs to determine input/output files
//and merge the profiles for the jobs
int taskid = 1;
/*
"jobDefinition": "XXXX",
"jobName": "pegasus-test-job-1",
"jobQueue": "XXXX",
"executable": "pegasus-aws-batch-launch.sh" ,
"arguments": "sample_pegasus_lite.sh",
"environment": [
{
"name": "BATCH_FILE_TYPE",
"value": "script"
},
{
"name": "BATCH_FILE_S3_URL",
"value": "s3://karan-cmd-test-bucket/pegasus-aws-batch-launch.sh"
},
{
"name": "TRANSFER_INPUT_FILES",
"value": "./sample_pegasus_lite.sh"
}
]
*/
generator.writeStartObject();
generator.writeArrayFieldStart( "SubmitJob" );
for( Iterator<GraphNode> it = job.nodeIterator(); it.hasNext(); taskid++ ) {
GraphNode node = it.next();
Job constitutentJob = (Job) node.getContent();
//handle stdin
if( constitutentJob instanceof AggregatedJob ){
//slurp in contents of it's stdin
throw new RuntimeException( "Clustering of clustered jobs not supported with " + AWSBatch.COLLAPSE_LOGICAL_NAME );
}
generator.writeStartObject();
generator.writeStringField( "jobName", constitutentJob.getID() );
generator.writeStringField( "executable", constitutentJob.getRemoteExecutable());
generator.writeStringField( "arguments", constitutentJob.getArguments());
if( !constitutentJob.envVariables.isEmpty() ){
generator.writeArrayFieldStart( "environment" );
for( Iterator<String> envIT = constitutentJob.envVariables.getProfileKeyIterator(); envIT.hasNext();){
String key = envIT.next();
generator.writeStartObject();
generator.writeStringField( key, (String)constitutentJob.envVariables.get(key) );
generator.writeEndObject();
}
generator.writeEndArray();
}
generator.writeEndObject();
}
generator.writeEndArray();
generator.writeEndObject();
generator.close();
}
catch(IOException e){
mLogger.log("While writing the stdIn file " + e.getMessage(),
LogManager.ERROR_MESSAGE_LEVEL);
throw new RuntimeException( "While writing the stdIn file " + stdIn, e );
}
return stdIn;
}
/**
* Returns the logical name of the transformation that is used to
* collapse the jobs.
*
* @return the the logical name of the collapser executable.
* @see #COLLAPSE_LOGICAL_NAME
*/
public String getClusterExecutableLFN(){
return COLLAPSE_LOGICAL_NAME;
}
/**
* Returns the executable basename of the clustering executable used.
*
* @return the executable basename.
* @see #EXECUTABLE_BASENAME
*/
public String getClusterExecutableBasename(){
return AWSBatch.EXECUTABLE_BASENAME;
}
/**
* Determines whether there is NOT an entry in the transformation catalog
* for the job aggregator executable on a particular site.
*
* @param site the site at which existence check is required.
*
* @return boolean true if an entry does not exists, false otherwise.
*/
public boolean entryNotInTC(String site) {
//batch always runs in site "local"
return this.entryNotInTC( AWSBatch.TRANSFORMATION_NAMESPACE,
AWSBatch.COLLAPSE_LOGICAL_NAME,
AWSBatch.TRANSFORMATION_VERSION,
this.getClusterExecutableBasename(),
"local");
}
/**
* Returns the arguments with which the <code>AggregatedJob</code>
* needs to be invoked with. At present any empty argument string is
* returned.
*
* @param job the <code>AggregatedJob</code> for which the arguments have
* to be constructed.
*
* @return argument string
*/
public String aggregatedJobArguments( AggregatedJob job ){
//the stdin of the job actually needs to be passed as arguments
String stdin = job.getRelativeSubmitDirectory() + File.separator + job.getStdIn();
StringBuffer args = new StringBuffer();
//add --max-wall-time option PM-625
String walltime = (String) job.globusRSL.get( Globus.MAX_WALLTIME_KEY );
int divisor = 1;
if( walltime == null ){
//PM-962 fall back on pegasus profile runtime key which is in seconds
walltime = job.vdsNS.getStringValue( Pegasus.RUNTIME_KEY );
if( walltime != null ){
divisor = 60;
}
}
args.append( "--conf" ).append( " " ).
append( mProps.getPropertiesInSubmitDirectory( ) ).
append( " " );
//we log to a file based on jobname
args.append( "--log-file" ).append( " " ).append( job.getID() + ".log" ).append( " " );
//the job name is the prefix for the time being
args.append( "--prefix" ).append( " " ).append( job.getID() ).append( " " );
//the S3 bucket to use is picked up from the environment
String bucket = (String) job.envVariables.get( AWSBatch.PEGASUS_AWS_BATCH_BUCKET_KEY );
if( bucket == null ){
throw new RuntimeException( "Clustered job not associated with S3 bucket for AWS Batch " + job.getID() );
}
args.append( "--s3" ).append( " " ).append( bucket ).append( " " );
//add any files to be transferred from submit host
args.append( "--files" ).append( " " );
//check any credentials have to be transferred
String files = job.condorVariables.getIPFilesForTransfer();
job.condorVariables.removeIPFilesForTransfer();
if( files != null ){
args.append( files );
if( !files.endsWith( ",") ){
args.append( "," );
}
}
//add the --files option to transfer the common shell scripts required
StringBuilder shareSHDirectoryPath = new StringBuilder();
File share = mProps.getSharedDir();
if( share == null ){
throw new RuntimeException( "Property for Pegasus share directory is not set" );
}
shareSHDirectoryPath.append( share.getAbsolutePath() ).append( File.separator ).
append( "sh" ).append( File.separator );
args.append( shareSHDirectoryPath ).append( AWSBatch.PEGASUS_LITE_COMMON_FILE_BASENAME ).append( "," ).
append( shareSHDirectoryPath ).append( AWSBatch.PEGASUS_AWS_BATCH_LAUNCH_BASENAME ).append( " " );
// Construct any extra arguments specified in profiles or properties
// This should go last, otherwise we can't override any automatically-
// generated arguments
String extraArgs = job.vdsNS.getStringValue(Pegasus.CLUSTER_ARGUMENTS);
if (extraArgs != null) {
args.append(extraArgs).append(" ");
}
args.append( stdin );
return args.toString();
}
/**
* Setter method to indicate , failure on first consitutent job should
* result in the abort of the whole aggregated job. Ignores any value
* passed, as AWSBatch does not handle it for time being.
*
* @param fail indicates whether to abort or not .
*/
public void setAbortOnFirstJobFailure( boolean fail){
}
/**
* Returns a boolean indicating whether to fail the aggregated job on
* detecting the first failure during execution of constituent jobs.
*
* @return boolean indicating whether to fail or not.
*/
public boolean abortOnFristJobFailure(){
return false;
}
/**
* A boolean indicating whether ordering is important while traversing
* through the aggregated job.
*
* @return false
*/
public boolean topologicalOrderingRequired(){
//ordering is not important, as PMC has a graph representation
return false;
}
/**
* Looks at the profile keys associated with the job to generate the argument
* string fragment containing the cpu required for the job.
*
* @param job the Job for which memory requirements has to be determined.
*
* @return the arguments fragment else empty string
*/
public String getCPURequirementsArgument( Job job ){
StringBuffer result = new StringBuffer();
String value = job.vdsNS.getStringValue( Pegasus.PMC_REQUEST_CPUS_KEY );
if( value == null ){
value = job.vdsNS.getStringValue( Pegasus.CORES_KEY );
}
if( value != null ){
int cpus = -1;
//sanity check on the value
try{
cpus = Integer.parseInt( value );
}
catch( Exception e ){
/* ignore */
}
if ( cpus < 0 ){
//throw an error for negative value
complain( "Invalid Value specified for cpu count ", job.getID(), value );
}
//add only if we have a +ve memory value
if( cpus > 0 ){
result.append( "-c ").append( cpus ).append( " " );
}
}
return result.toString();
}
/**
* Looks at the profile keys associated with the job to generate the argument
* string fragment containing the memory required for the job.
*
* @param job the Job for which memory requirements has to be determined.
*
* @return the arguments fragment else empty string
*/
public String getMemoryRequirementsArgument( Job job ){
StringBuffer result = new StringBuffer();
String value = job.vdsNS.getStringValue( Pegasus.PMC_REQUEST_MEMORY_KEY );
//default to memory parameter. both profiles are in MB
if( value == null ){
value = job.vdsNS.getStringValue( Pegasus.MEMORY_KEY );
}
if( value != null ){
double memory = -1;
//sanity check on the value
try{
memory = Double.parseDouble( value );
}
catch( Exception e ){
/* ignore */
}
if ( memory < 0 ){
//throw an error for negative value
complain( "Invalid Value specified for memory ", job.getID(), value );
}
//add only if we have a +ve memory value
if( memory > 0 ){
result.append( "-m ").append( (long)memory ).append( " " );
}
}
return result.toString();
}
/**
* Looks at the profile keys associated with the job to generate the argument
* string fragment containing the priority to be associated for the job.
* Negative values are allowed
*
* @param job the Job for which memory requirements has to be determined.
*
* @return the arguments fragment else empty string
*/
public String getPriorityArgument( Job job ){
StringBuffer result = new StringBuffer();
String value = job.vdsNS.getStringValue( Pegasus.PMC_PRIORITY_KEY );
if( value != null ){
int priority = 0;
//sanity check on the value
try{
priority = Integer.parseInt( value );
}
catch( Exception e ){
//throw an error for invalid value
complain( "Invalid Value specified for job priority ", job.getID(), value );
}
//=ve values are allowed priorities
result.append( "-p ").append( priority ).append( " " );
}
return result.toString();
}
/**
* Looks at the profile key for pegasus::pmc_task_arguments to determine if extra
* arguments are required for the task.
*
* @param job the constitunt job for which extra arguments need to be determined
* @return the arguments if they are present, or an empty string
*/
public String getExtraArguments( Job job ) {
String extra = job.vdsNS.getStringValue(Pegasus.PMC_TASK_ARGUMENTS);
if (extra == null) {
return "";
}
return extra + " ";
}
/**
* Complains for invalid values passed in profiles
*
* @param message the string describing the error message
* @param id id of the job
* @param value value of the CPU passed
*/
private void complain( String message, String id, String value) {
StringBuffer sb = new StringBuffer();
sb.append( message ).append( value ).append( " for job " )
.append( id );
throw new RuntimeException( sb.toString() );
}
}
|
PM-1231 we need to have full paths to the log file and the input file
|
src/edu/isi/pegasus/planner/cluster/aggregator/AWSBatch.java
|
PM-1231 we need to have full paths to the log file and the input file
|
|
Java
|
apache-2.0
|
ce50b3135874e960f643148c2a8d24fc115d3cb4
| 0
|
billho/symphony,billho/symphony,billho/symphony
|
/*
* Symphony - A modern community (forum/BBS/SNS/blog) platform written in Java.
* Copyright (C) 2012-2018, b3log.org & hacpai.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.b3log.symphony.processor;
import org.apache.commons.lang.StringUtils;
import org.b3log.latke.Keys;
import org.b3log.latke.Latkes;
import org.b3log.latke.ioc.inject.Inject;
import org.b3log.latke.model.Pagination;
import org.b3log.latke.servlet.HTTPRequestContext;
import org.b3log.latke.servlet.HTTPRequestMethod;
import org.b3log.latke.servlet.annotation.After;
import org.b3log.latke.servlet.annotation.Before;
import org.b3log.latke.servlet.annotation.RequestProcessing;
import org.b3log.latke.servlet.annotation.RequestProcessor;
import org.b3log.latke.servlet.renderer.freemarker.AbstractFreeMarkerRenderer;
import org.b3log.latke.util.Paginator;
import org.b3log.symphony.model.*;
import org.b3log.symphony.processor.advice.AnonymousViewCheck;
import org.b3log.symphony.processor.advice.PermissionGrant;
import org.b3log.symphony.processor.advice.stopwatch.StopwatchEndAdvice;
import org.b3log.symphony.processor.advice.stopwatch.StopwatchStartAdvice;
import org.b3log.symphony.service.*;
import org.b3log.symphony.util.Sessions;
import org.b3log.symphony.util.Symphonys;
import org.json.JSONObject;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Tag processor.
* <ul>
* <li>Shows tags wall (/tags), GET</li>
* <li>Shows tag articles (/tag/{tagTitle}), GET</li>
* <li>Query tags (/tags/query), GET</li>
* </ul>
*
* @author <a href="http://88250.b3log.org">Liang Ding</a>
* @author <a href="http://vanessa.b3log.org">Liyuan Li</a>
* @version 1.7.0.13, Jun 6, 2018
* @since 0.2.0
*/
@RequestProcessor
public class TagProcessor {
/**
* Tag query service.
*/
@Inject
private TagQueryService tagQueryService;
/**
* Article query service.
*/
@Inject
private ArticleQueryService articleQueryService;
/**
* Follow query service.
*/
@Inject
private FollowQueryService followQueryService;
/**
* User query service.
*/
@Inject
private UserQueryService userQueryService;
/**
* Data model service.
*/
@Inject
private DataModelService dataModelService;
/**
* Queries tags.
*
* @param context the specified context
* @param request the specified request
* @param response the specified response
* @throws Exception exception
*/
@RequestProcessing(value = "/tags/query", method = HTTPRequestMethod.GET)
public void queryTags(final HTTPRequestContext context, final HttpServletRequest request, final HttpServletResponse response)
throws Exception {
if (null == Sessions.currentUser(request)) {
response.setStatus(HttpServletResponse.SC_FORBIDDEN);
return;
}
context.renderJSON().renderTrueResult();
final String titlePrefix = request.getParameter("title");
List<JSONObject> tags;
final int fetchSize = 7;
if (StringUtils.isBlank(titlePrefix)) {
tags = tagQueryService.getTags(fetchSize);
} else {
tags = tagQueryService.getTagsByPrefix(titlePrefix, fetchSize);
}
final List<String> ret = new ArrayList<>();
for (final JSONObject tag : tags) {
ret.add(tag.optString(Tag.TAG_TITLE));
}
context.renderJSONValue(Tag.TAGS, ret);
}
/**
* Shows tags wall.
*
* @param context the specified context
* @param request the specified request
* @param response the specified response
* @throws Exception exception
*/
@RequestProcessing(value = "/tags", method = HTTPRequestMethod.GET)
@Before(adviceClass = {StopwatchStartAdvice.class, AnonymousViewCheck.class})
@After(adviceClass = {PermissionGrant.class, StopwatchEndAdvice.class})
public void showTagsWall(final HTTPRequestContext context, final HttpServletRequest request, final HttpServletResponse response)
throws Exception {
final AbstractFreeMarkerRenderer renderer = new SkinRenderer(request);
context.setRenderer(renderer);
renderer.setTemplateName("tags.ftl");
final Map<String, Object> dataModel = renderer.getDataModel();
final List<JSONObject> trendTags = tagQueryService.getTrendTags(Symphonys.getInt("tagsWallTrendCnt"));
final List<JSONObject> coldTags = tagQueryService.getColdTags(Symphonys.getInt("tagsWallColdCnt"));
dataModel.put(Common.TREND_TAGS, trendTags);
dataModel.put(Common.COLD_TAGS, coldTags);
dataModelService.fillHeaderAndFooter(request, response, dataModel);
}
/**
* Shows tag articles.
*
* @param context the specified context
* @param request the specified request
* @param response the specified response
* @param tagURI the specified tag URI
* @throws Exception exception
*/
@RequestProcessing(value = {"/tag/{tagURI}", "/tag/{tagURI}/hot", "/tag/{tagURI}/good", "/tag/{tagURI}/reply",
"/tag/{tagURI}/perfect"}, method = HTTPRequestMethod.GET)
@Before(adviceClass = {StopwatchStartAdvice.class, AnonymousViewCheck.class})
@After(adviceClass = {PermissionGrant.class, StopwatchEndAdvice.class})
public void showTagArticles(final HTTPRequestContext context, final HttpServletRequest request, final HttpServletResponse response,
final String tagURI) throws Exception {
final AbstractFreeMarkerRenderer renderer = new SkinRenderer(request);
context.setRenderer(renderer);
renderer.setTemplateName("tag-articles.ftl");
final Map<String, Object> dataModel = renderer.getDataModel();
dataModelService.fillHeaderAndFooter(request, response, dataModel);
final int pageNum = Paginator.getPage(request);
int pageSize = Symphonys.getInt("indexArticlesCnt");
final JSONObject user = userQueryService.getCurrentUser(request);
if (null != user) {
pageSize = user.optInt(UserExt.USER_LIST_PAGE_SIZE);
if (!UserExt.finshedGuide(user)) {
response.sendRedirect(Latkes.getServePath() + "/guide");
return;
}
}
final JSONObject tag = tagQueryService.getTagByURI(tagURI);
if (null == tag) {
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
tag.put(Common.IS_RESERVED, tagQueryService.isReservedTag(tag.optString(Tag.TAG_TITLE)));
dataModel.put(Tag.TAG, tag);
final String tagId = tag.optString(Keys.OBJECT_ID);
final List<JSONObject> relatedTags = tagQueryService.getRelatedTags(tagId, Symphonys.getInt("tagRelatedTagsCnt"));
tag.put(Tag.TAG_T_RELATED_TAGS, (Object) relatedTags);
final boolean isLoggedIn = (Boolean) dataModel.get(Common.IS_LOGGED_IN);
if (isLoggedIn) {
final JSONObject currentUser = (JSONObject) dataModel.get(Common.CURRENT_USER);
final String followerId = currentUser.optString(Keys.OBJECT_ID);
final boolean isFollowing = followQueryService.isFollowing(followerId, tagId, Follow.FOLLOWING_TYPE_C_TAG);
dataModel.put(Common.IS_FOLLOWING, isFollowing);
}
final int avatarViewMode = (int) request.getAttribute(UserExt.USER_AVATAR_VIEW_MODE);
String sortModeStr = StringUtils.substringAfter(request.getRequestURI(), "/tag/" + tagURI);
int sortMode;
switch (sortModeStr) {
case "":
sortMode = 0;
break;
case "/hot":
sortMode = 1;
break;
case "/good":
sortMode = 2;
break;
case "/reply":
sortMode = 3;
break;
case "/perfect":
sortMode = 4;
break;
default:
sortMode = 0;
}
final List<JSONObject> articles = articleQueryService.getArticlesByTag(avatarViewMode, sortMode, tag,
pageNum, pageSize);
dataModel.put(Article.ARTICLES, articles);
final JSONObject tagCreator = tagQueryService.getCreator(avatarViewMode, tagId);
tag.put(Tag.TAG_T_CREATOR_THUMBNAIL_URL, tagCreator.optString(Tag.TAG_T_CREATOR_THUMBNAIL_URL));
tag.put(Tag.TAG_T_CREATOR_NAME, tagCreator.optString(Tag.TAG_T_CREATOR_NAME));
tag.put(Tag.TAG_T_CREATOR_THUMBNAIL_UPDATE_TIME, tagCreator.optLong(Tag.TAG_T_CREATOR_THUMBNAIL_UPDATE_TIME));
tag.put(Tag.TAG_T_PARTICIPANTS, (Object) tagQueryService.getParticipants(
avatarViewMode, tagId, Symphonys.getInt("tagParticipantsCnt")));
final int tagRefCnt = tag.getInt(Tag.TAG_REFERENCE_CNT);
final int pageCount = (int) Math.ceil(tagRefCnt / (double) pageSize);
final int windowSize = Symphonys.getInt("tagArticlesWindowSize");
final List<Integer> pageNums = Paginator.paginate(pageNum, pageSize, pageCount, windowSize);
if (!pageNums.isEmpty()) {
dataModel.put(Pagination.PAGINATION_FIRST_PAGE_NUM, pageNums.get(0));
dataModel.put(Pagination.PAGINATION_LAST_PAGE_NUM, pageNums.get(pageNums.size() - 1));
}
dataModel.put(Pagination.PAGINATION_CURRENT_PAGE_NUM, pageNum);
dataModel.put(Pagination.PAGINATION_PAGE_COUNT, pageCount);
dataModel.put(Pagination.PAGINATION_PAGE_NUMS, pageNums);
dataModelService.fillRandomArticles(dataModel);
dataModelService.fillSideHotArticles(dataModel);
dataModelService.fillSideTags(dataModel);
dataModelService.fillLatestCmts(dataModel);
dataModel.put(Common.CURRENT, StringUtils.substringAfter(URLDecoder.decode(request.getRequestURI(), "UTF-8"),
"/tag/" + tagURI));
}
}
|
src/main/java/org/b3log/symphony/processor/TagProcessor.java
|
/*
* Symphony - A modern community (forum/BBS/SNS/blog) platform written in Java.
* Copyright (C) 2012-2018, b3log.org & hacpai.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package org.b3log.symphony.processor;
import org.apache.commons.lang.StringUtils;
import org.b3log.latke.Keys;
import org.b3log.latke.Latkes;
import org.b3log.latke.ioc.inject.Inject;
import org.b3log.latke.model.Pagination;
import org.b3log.latke.servlet.HTTPRequestContext;
import org.b3log.latke.servlet.HTTPRequestMethod;
import org.b3log.latke.servlet.annotation.After;
import org.b3log.latke.servlet.annotation.Before;
import org.b3log.latke.servlet.annotation.RequestProcessing;
import org.b3log.latke.servlet.annotation.RequestProcessor;
import org.b3log.latke.servlet.renderer.freemarker.AbstractFreeMarkerRenderer;
import org.b3log.latke.util.Paginator;
import org.b3log.symphony.model.*;
import org.b3log.symphony.processor.advice.AnonymousViewCheck;
import org.b3log.symphony.processor.advice.PermissionGrant;
import org.b3log.symphony.processor.advice.stopwatch.StopwatchEndAdvice;
import org.b3log.symphony.processor.advice.stopwatch.StopwatchStartAdvice;
import org.b3log.symphony.service.*;
import org.b3log.symphony.util.Sessions;
import org.b3log.symphony.util.Symphonys;
import org.json.JSONObject;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Tag processor.
* <ul>
* <li>Shows tags wall (/tags), GET</li>
* <li>Shows tag articles (/tag/{tagTitle}), GET</li>
* <li>Query tags (/tags/query), GET</li>
* </ul>
*
* @author <a href="http://88250.b3log.org">Liang Ding</a>
* @author <a href="http://vanessa.b3log.org">Liyuan Li</a>
* @version 1.7.0.13, Jun 6, 2018
* @since 0.2.0
*/
@RequestProcessor
public class TagProcessor {
/**
* Tag query service.
*/
@Inject
private TagQueryService tagQueryService;
/**
* Article query service.
*/
@Inject
private ArticleQueryService articleQueryService;
/**
* Follow query service.
*/
@Inject
private FollowQueryService followQueryService;
/**
* User query service.
*/
@Inject
private UserQueryService userQueryService;
/**
* Data model service.
*/
@Inject
private DataModelService dataModelService;
/**
* Queries tags.
*
* @param context the specified context
* @param request the specified request
* @param response the specified response
* @throws Exception exception
*/
@RequestProcessing(value = "/tags/query", method = HTTPRequestMethod.GET)
public void queryTags(final HTTPRequestContext context, final HttpServletRequest request, final HttpServletResponse response)
throws Exception {
if (null == Sessions.currentUser(request)) {
response.setStatus(HttpServletResponse.SC_FORBIDDEN);
return;
}
context.renderJSON().renderTrueResult();
final String titlePrefix = request.getParameter("title");
List<JSONObject> tags;
final int fetchSize = 7;
if (StringUtils.isBlank(titlePrefix)) {
tags = tagQueryService.getTags(fetchSize);
} else {
tags = tagQueryService.getTagsByPrefix(titlePrefix, fetchSize);
}
final List<String> ret = new ArrayList<>();
for (final JSONObject tag : tags) {
ret.add(tag.optString(Tag.TAG_TITLE));
}
context.renderJSONValue(Tag.TAGS, ret);
}
/**
* Shows tags wall.
*
* @param context the specified context
* @param request the specified request
* @param response the specified response
* @throws Exception exception
*/
@RequestProcessing(value = "/tags", method = HTTPRequestMethod.GET)
@Before(adviceClass = {StopwatchStartAdvice.class, AnonymousViewCheck.class})
@After(adviceClass = {PermissionGrant.class, StopwatchEndAdvice.class})
public void showTagsWall(final HTTPRequestContext context, final HttpServletRequest request, final HttpServletResponse response)
throws Exception {
final AbstractFreeMarkerRenderer renderer = new SkinRenderer(request);
;
context.setRenderer(renderer);
renderer.setTemplateName("tags.ftl");
final Map<String, Object> dataModel = renderer.getDataModel();
final List<JSONObject> trendTags = tagQueryService.getTrendTags(Symphonys.getInt("tagsWallTrendCnt"));
final List<JSONObject> coldTags = tagQueryService.getColdTags(Symphonys.getInt("tagsWallColdCnt"));
dataModel.put(Common.TREND_TAGS, trendTags);
dataModel.put(Common.COLD_TAGS, coldTags);
dataModelService.fillHeaderAndFooter(request, response, dataModel);
}
/**
* Shows tag articles.
*
* @param context the specified context
* @param request the specified request
* @param response the specified response
* @param tagURI the specified tag URI
* @throws Exception exception
*/
@RequestProcessing(value = {"/tag/{tagURI}", "/tag/{tagURI}/hot", "/tag/{tagURI}/good", "/tag/{tagURI}/reply",
"/tag/{tagURI}/perfect"}, method = HTTPRequestMethod.GET)
@Before(adviceClass = {StopwatchStartAdvice.class, AnonymousViewCheck.class})
@After(adviceClass = {PermissionGrant.class, StopwatchEndAdvice.class})
public void showTagArticles(final HTTPRequestContext context, final HttpServletRequest request, final HttpServletResponse response,
final String tagURI) throws Exception {
final AbstractFreeMarkerRenderer renderer = new SkinRenderer(request);
context.setRenderer(renderer);
renderer.setTemplateName("tag-articles.ftl");
final Map<String, Object> dataModel = renderer.getDataModel();
dataModelService.fillHeaderAndFooter(request, response, dataModel);
final int pageNum = Paginator.getPage(request);
int pageSize = Symphonys.getInt("indexArticlesCnt");
final JSONObject user = userQueryService.getCurrentUser(request);
if (null != user) {
pageSize = user.optInt(UserExt.USER_LIST_PAGE_SIZE);
if (!UserExt.finshedGuide(user)) {
response.sendRedirect(Latkes.getServePath() + "/guide");
return;
}
}
final JSONObject tag = tagQueryService.getTagByURI(tagURI);
if (null == tag) {
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
tag.put(Common.IS_RESERVED, tagQueryService.isReservedTag(tag.optString(Tag.TAG_TITLE)));
dataModel.put(Tag.TAG, tag);
final String tagId = tag.optString(Keys.OBJECT_ID);
final List<JSONObject> relatedTags = tagQueryService.getRelatedTags(tagId, Symphonys.getInt("tagRelatedTagsCnt"));
tag.put(Tag.TAG_T_RELATED_TAGS, (Object) relatedTags);
final boolean isLoggedIn = (Boolean) dataModel.get(Common.IS_LOGGED_IN);
if (isLoggedIn) {
final JSONObject currentUser = (JSONObject) dataModel.get(Common.CURRENT_USER);
final String followerId = currentUser.optString(Keys.OBJECT_ID);
final boolean isFollowing = followQueryService.isFollowing(followerId, tagId, Follow.FOLLOWING_TYPE_C_TAG);
dataModel.put(Common.IS_FOLLOWING, isFollowing);
}
final int avatarViewMode = (int) request.getAttribute(UserExt.USER_AVATAR_VIEW_MODE);
String sortModeStr = StringUtils.substringAfter(request.getRequestURI(), "/tag/" + tagURI);
int sortMode;
switch (sortModeStr) {
case "":
sortMode = 0;
break;
case "/hot":
sortMode = 1;
break;
case "/good":
sortMode = 2;
break;
case "/reply":
sortMode = 3;
break;
case "/perfect":
sortMode = 4;
break;
default:
sortMode = 0;
}
final List<JSONObject> articles = articleQueryService.getArticlesByTag(avatarViewMode, sortMode, tag,
pageNum, pageSize);
dataModel.put(Article.ARTICLES, articles);
final JSONObject tagCreator = tagQueryService.getCreator(avatarViewMode, tagId);
tag.put(Tag.TAG_T_CREATOR_THUMBNAIL_URL, tagCreator.optString(Tag.TAG_T_CREATOR_THUMBNAIL_URL));
tag.put(Tag.TAG_T_CREATOR_NAME, tagCreator.optString(Tag.TAG_T_CREATOR_NAME));
tag.put(Tag.TAG_T_CREATOR_THUMBNAIL_UPDATE_TIME, tagCreator.optLong(Tag.TAG_T_CREATOR_THUMBNAIL_UPDATE_TIME));
tag.put(Tag.TAG_T_PARTICIPANTS, (Object) tagQueryService.getParticipants(
avatarViewMode, tagId, Symphonys.getInt("tagParticipantsCnt")));
final int tagRefCnt = tag.getInt(Tag.TAG_REFERENCE_CNT);
final int pageCount = (int) Math.ceil(tagRefCnt / (double) pageSize);
final int windowSize = Symphonys.getInt("tagArticlesWindowSize");
final List<Integer> pageNums = Paginator.paginate(pageNum, pageSize, pageCount, windowSize);
if (!pageNums.isEmpty()) {
dataModel.put(Pagination.PAGINATION_FIRST_PAGE_NUM, pageNums.get(0));
dataModel.put(Pagination.PAGINATION_LAST_PAGE_NUM, pageNums.get(pageNums.size() - 1));
}
dataModel.put(Pagination.PAGINATION_CURRENT_PAGE_NUM, pageNum);
dataModel.put(Pagination.PAGINATION_PAGE_COUNT, pageCount);
dataModel.put(Pagination.PAGINATION_PAGE_NUMS, pageNums);
dataModelService.fillRandomArticles(dataModel);
dataModelService.fillSideHotArticles(dataModel);
dataModelService.fillSideTags(dataModel);
dataModelService.fillLatestCmts(dataModel);
dataModel.put(Common.CURRENT, StringUtils.substringAfter(URLDecoder.decode(request.getRequestURI(), "UTF-8"),
"/tag/" + tagURI));
}
}
|
:art: Cleanup code
|
src/main/java/org/b3log/symphony/processor/TagProcessor.java
|
:art: Cleanup code
|
|
Java
|
apache-2.0
|
7ca8d76e9085e46bf6b5f1ea6737f6f82bf40e9b
| 0
|
marbon87/spring-cloud-config,shakuzen/spring-cloud-config,royclarkson/spring-cloud-config,mbenson/spring-cloud-config,spring-cloud/spring-cloud-config,marbon87/spring-cloud-config,royclarkson/spring-cloud-config,shakuzen/spring-cloud-config,fkissel/spring-cloud-config,mbenson/spring-cloud-config,fkissel/spring-cloud-config,shakuzen/spring-cloud-config,mbenson/spring-cloud-config,spring-cloud/spring-cloud-config,spring-cloud/spring-cloud-config,fkissel/spring-cloud-config,marbon87/spring-cloud-config,royclarkson/spring-cloud-config
|
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.config.monitor;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.StandardWatchEventKinds;
import java.nio.file.WatchEvent;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.config.server.AbstractScmEnvironmentRepository;
import org.springframework.cloud.config.server.NativeEnvironmentRepository;
import org.springframework.context.ResourceLoaderAware;
import org.springframework.context.SmartLifecycle;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.PatternMatchUtils;
import lombok.extern.apachecommons.CommonsLog;
/**
* Configuration for a file watcher that detects changes in local files related to the
* environment repository. If any files change the {@link PropertyPathEndpoint} is pinged
* with the paths of the files. This applies to the source files of a local git repository
* (i.e. a git repository with a "file:" URI) or to a native repository.
*
* @author Dave Syer
*
*/
@Configuration
@CommonsLog
@EnableScheduling
public class FileMonitorConfiguration implements SmartLifecycle, ResourceLoaderAware {
@Autowired
PropertyPathEndpoint endpoint;
@Autowired(required = false)
AbstractScmEnvironmentRepository scmRepository;
@Autowired(required = false)
NativeEnvironmentRepository nativeEnvironmentRepository;
private boolean running;
private WatchService watcher;
private Set<Path> directory;
private int phase;
private boolean autoStartup = true;
private ResourceLoader resourceLoader;
private String[] excludes = new String[] { ".*", "#*", "*#" };
@Override
public void setResourceLoader(ResourceLoader resourceLoader) {
this.resourceLoader = resourceLoader;
}
@Override
public int getPhase() {
return this.phase;
}
/**
* see {@link #getPhase()}
* @param phase the phase.
*/
public void setPhase(int phase) {
this.phase = phase;
}
@Override
public boolean isRunning() {
return this.running;
}
/**
* @see #isRunning()
* @param running true if running.
*/
public void setRunning(boolean running) {
this.running = running;
}
@Override
public boolean isAutoStartup() {
return this.autoStartup;
}
/**
* @see #isAutoStartup()
* @param autoStartup true to auto start.
*/
public void setAutoStartup(boolean autoStartup) {
this.autoStartup = autoStartup;
}
@Override
public synchronized void start() {
if (!this.running) {
this.directory = getFileRepo();
if (this.directory != null && !this.directory.isEmpty()) {
log.info("Monitoring for local config changes: " + this.directory);
try {
this.watcher = FileSystems.getDefault().newWatchService();
for (Path path : this.directory) {
walkDirectory(path);
}
}
catch (IOException e) {
}
}
else {
log.info("Not monitoring for local config changes");
}
this.running = true;
}
}
@Override
public synchronized void stop() {
if (this.running) {
if (this.watcher != null) {
try {
this.watcher.close();
}
catch (IOException e) {
log.error("Failed to close watcher for " + this.directory.toString(),
e);
}
}
this.running = false;
}
}
@Override
public void stop(Runnable callback) {
stop();
callback.run();
}
@Scheduled(fixedRateString = "${spring.cloud.config.server.monitor.fixedDelay:5000}")
public void poll() {
for (File file : filesFromEvents()) {
this.endpoint.notifyByPath(new LinkedMultiValueMap<String, String>(),
Collections.<String, Object> singletonMap("path",
file.getAbsolutePath()));
}
}
private Set<Path> getFileRepo() {
if (this.scmRepository != null
&& this.scmRepository.getUri().startsWith("file:")) {
try {
return Collections.singleton(Paths.get(this.resourceLoader
.getResource(this.scmRepository.getUri()).getURI()));
}
catch (IOException e) {
log.error("Cannot resolve URI for path: " + this.scmRepository.getUri());
}
}
if (this.nativeEnvironmentRepository != null) {
Set<Path> paths = new LinkedHashSet<>();
for (String path : this.nativeEnvironmentRepository.getSearchLocations()) {
Resource resource = this.resourceLoader.getResource(path);
if (resource.exists()) {
try {
paths.add(Paths.get(resource.getURI()));
}
catch (IOException e) {
log.error("Cannot resolve URI for path: " + path);
}
}
}
return paths;
}
return null;
}
private Set<File> filesFromEvents() {
Set<File> files = new LinkedHashSet<File>();
if (this.watcher == null) {
return files;
}
WatchKey key = this.watcher.poll();
while (key != null) {
for (WatchEvent<?> event : key.pollEvents()) {
if (event.kind() == StandardWatchEventKinds.ENTRY_CREATE
|| event.kind() == StandardWatchEventKinds.ENTRY_MODIFY) {
Path item = (Path) event.context();
File file = new File(((Path) key.watchable()).toAbsolutePath()
+ File.separator + item.getFileName());
if (file.isDirectory()) {
files.addAll(walkDirectory(file.toPath()));
}
else {
if (!file.getPath().contains(".git") && !PatternMatchUtils
.simpleMatch(this.excludes, file.getName())) {
if (log.isDebugEnabled()) {
log.debug("Watch Event: " + event.kind() + ": " + file);
}
files.add(file);
}
}
}
else if (event.kind() == StandardWatchEventKinds.OVERFLOW) {
if (log.isDebugEnabled()) {
log.debug("Watch Event: " + event.kind() + ": context: "
+ event.context());
}
if (event.context() != null && event.context() instanceof Path) {
files.addAll(walkDirectory((Path) event.context()));
}
else {
for (Path path : this.directory) {
files.addAll(walkDirectory(path));
}
}
}
else {
if (log.isDebugEnabled()) {
log.debug("Watch Event: " + event.kind() + ": context: "
+ event.context());
}
}
}
key.reset();
key = this.watcher.poll();
}
return files;
}
private Set<File> walkDirectory(Path directory) {
final Set<File> walkedFiles = new LinkedHashSet<File>();
try {
registerWatch(directory);
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(Path dir,
BasicFileAttributes attrs) throws IOException {
FileVisitResult fileVisitResult = super.preVisitDirectory(dir, attrs);
registerWatch(dir);
return fileVisitResult;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
FileVisitResult fileVisitResult = super.visitFile(file, attrs);
walkedFiles.add(file.toFile());
return fileVisitResult;
}
});
}
catch (IOException e) {
log.error("Failed to walk directory: " + directory.toString(), e);
}
return walkedFiles;
}
private void registerWatch(Path dir) throws IOException {
if (log.isDebugEnabled()) {
log.debug("registering: " + dir + " for file creation events");
}
dir.register(this.watcher, StandardWatchEventKinds.ENTRY_CREATE,
StandardWatchEventKinds.ENTRY_MODIFY);
}
}
|
spring-cloud-config-monitor/src/main/java/org/springframework/cloud/config/monitor/FileMonitorConfiguration.java
|
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.config.monitor;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.StandardWatchEventKinds;
import java.nio.file.WatchEvent;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.config.server.AbstractScmEnvironmentRepository;
import org.springframework.cloud.config.server.NativeEnvironmentRepository;
import org.springframework.context.ResourceLoaderAware;
import org.springframework.context.SmartLifecycle;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.PatternMatchUtils;
import lombok.extern.apachecommons.CommonsLog;
/**
* Configuration for a file watcher that detects changes in local files related to the
* environment repository. If any files change the {@link PropertyPathEndpoint} is pinged
* with the paths of the files. This applies to the source files of a local git repository
* (i.e. a git repository with a "file:" URI) or to a native repository.
*
* @author Dave Syer
*
*/
@Configuration
@CommonsLog
@EnableScheduling
public class FileMonitorConfiguration implements SmartLifecycle, ResourceLoaderAware {
@Autowired
PropertyPathEndpoint endpoint;
@Autowired(required = false)
AbstractScmEnvironmentRepository scmRepository;
@Autowired(required = false)
NativeEnvironmentRepository nativeEnvironmentRepository;
private boolean running;
private WatchService watcher;
private Set<Path> directory;
private int phase;
private boolean autoStartup = true;
private ResourceLoader resourceLoader;
private String[] excludes = new String[] { ".*", "#*", "*#" };
@Override
public void setResourceLoader(ResourceLoader resourceLoader) {
this.resourceLoader = resourceLoader;
}
@Override
public int getPhase() {
return this.phase;
}
/**
* see {@link #getPhase()}
* @param phase the phase.
*/
public void setPhase(int phase) {
this.phase = phase;
}
@Override
public boolean isRunning() {
return this.running;
}
/**
* @see #isRunning()
* @param running true if running.
*/
public void setRunning(boolean running) {
this.running = running;
}
@Override
public boolean isAutoStartup() {
return this.autoStartup;
}
/**
* @see #isAutoStartup()
* @param autoStartup true to auto start.
*/
public void setAutoStartup(boolean autoStartup) {
this.autoStartup = autoStartup;
}
@Override
public synchronized void start() {
if (!this.running) {
this.directory = getFileRepo();
if (this.directory != null && !this.directory.isEmpty()) {
try {
this.watcher = FileSystems.getDefault().newWatchService();
for (Path path : this.directory) {
walkDirectory(path);
}
}
catch (IOException e) {
}
}
this.running = true;
}
}
@Override
public synchronized void stop() {
if (this.running) {
try {
this.watcher.close();
}
catch (IOException e) {
log.error("Failed to close watcher for " + this.directory.toString(), e);
}
this.running = false;
}
}
@Override
public void stop(Runnable callback) {
stop();
callback.run();
}
@Scheduled(fixedRateString = "${spring.cloud.config.server.monitor.fixedDelay:5000}")
public void poll() {
for (File file : filesFromEvents()) {
this.endpoint.notifyByPath(new LinkedMultiValueMap<String, String>(),
Collections.<String, Object> singletonMap("path",
file.getAbsolutePath()));
}
}
private Set<Path> getFileRepo() {
if (this.scmRepository != null
&& this.scmRepository.getUri().startsWith("file:")) {
try {
return Collections.singleton(Paths.get(this.resourceLoader
.getResource(this.scmRepository.getUri()).getURI()));
}
catch (IOException e) {
log.error("Cannot resolve URI for path: " + this.scmRepository.getUri());
}
}
if (this.nativeEnvironmentRepository != null) {
Set<Path> paths = new LinkedHashSet<>();
for (String path : this.nativeEnvironmentRepository.getSearchLocations()) {
Resource resource = this.resourceLoader.getResource(path);
if (resource.exists()) {
try {
paths.add(Paths.get(resource.getURI()));
}
catch (IOException e) {
log.error("Cannot resolve URI for path: " + path);
}
}
}
return paths;
}
return null;
}
private Set<File> filesFromEvents() {
WatchKey key = this.watcher.poll();
Set<File> files = new LinkedHashSet<File>();
while (key != null) {
for (WatchEvent<?> event : key.pollEvents()) {
if (event.kind() == StandardWatchEventKinds.ENTRY_CREATE
|| event.kind() == StandardWatchEventKinds.ENTRY_MODIFY) {
Path item = (Path) event.context();
File file = new File(((Path) key.watchable()).toAbsolutePath()
+ File.separator + item.getFileName());
if (file.isDirectory()) {
files.addAll(walkDirectory(file.toPath()));
}
else {
if (!file.getPath().contains(".git") && !PatternMatchUtils
.simpleMatch(this.excludes, file.getName())) {
if (log.isDebugEnabled()) {
log.debug("Watch Event: " + event.kind() + ": " + file);
}
files.add(file);
}
}
}
else if (event.kind() == StandardWatchEventKinds.OVERFLOW) {
if (log.isDebugEnabled()) {
log.debug("Watch Event: " + event.kind() + ": context: "
+ event.context());
}
if (event.context() != null && event.context() instanceof Path) {
files.addAll(walkDirectory((Path) event.context()));
}
else {
for (Path path : this.directory) {
files.addAll(walkDirectory(path));
}
}
}
else {
if (log.isDebugEnabled()) {
log.debug("Watch Event: " + event.kind() + ": context: "
+ event.context());
}
}
}
key.reset();
key = this.watcher.poll();
}
return files;
}
private Set<File> walkDirectory(Path directory) {
final Set<File> walkedFiles = new LinkedHashSet<File>();
try {
registerWatch(directory);
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(Path dir,
BasicFileAttributes attrs) throws IOException {
FileVisitResult fileVisitResult = super.preVisitDirectory(dir, attrs);
registerWatch(dir);
return fileVisitResult;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
FileVisitResult fileVisitResult = super.visitFile(file, attrs);
walkedFiles.add(file.toFile());
return fileVisitResult;
}
});
}
catch (IOException e) {
log.error("Failed to walk directory: " + directory.toString(), e);
}
return walkedFiles;
}
private void registerWatch(Path dir) throws IOException {
if (log.isDebugEnabled()) {
log.debug("registering: " + dir + " for file creation events");
}
dir.register(this.watcher, StandardWatchEventKinds.ENTRY_CREATE,
StandardWatchEventKinds.ENTRY_MODIFY);
}
}
|
Ensure no errros if not monitoring local files
Fixes gh-239
|
spring-cloud-config-monitor/src/main/java/org/springframework/cloud/config/monitor/FileMonitorConfiguration.java
|
Ensure no errros if not monitoring local files
|
|
Java
|
apache-2.0
|
e25051995011e5b690e154378deb39e3b42566ce
| 0
|
holmes/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,da1z/intellij-community,holmes/intellij-community,retomerz/intellij-community,ol-loginov/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,samthor/intellij-community,samthor/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,ftomassetti/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,Distrotech/intellij-community,supersven/intellij-community,retomerz/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,clumsy/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,orekyuu/intellij-community,da1z/intellij-community,ivan-fedorov/intellij-community,semonte/intellij-community,asedunov/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,akosyakov/intellij-community,slisson/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,amith01994/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,xfournet/intellij-community,samthor/intellij-community,dslomov/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,ibinti/intellij-community,apixandru/intellij-community,samthor/intellij-community,apixandru/intellij-community,signed/intellij-community,da1z/intellij-community,robovm/robovm-studio,blademainer/intellij-community,jagguli/intellij-community,salguarnieri/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,jexp/idea2,izonder/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,caot/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,muntasirsyed/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,slisson/intellij-community,retomerz/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,gnuhub/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,da1z/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,jexp/idea2,TangHao1987/intellij-community,orekyuu/intellij-community,signed/intellij-community,fitermay/intellij-community,petteyg/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,da1z/intellij-community,adedayo/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,consulo/consulo,clumsy/intellij-community,kool79/intellij-community,retomerz/intellij-community,kool79/intellij-community,ahb0327/intellij-community,hurricup/intellij-community,fnouama/intellij-community,semonte/intellij-community,ernestp/consulo,xfournet/intellij-community,lucafavatella/intellij-community,petteyg/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,robovm/robovm-studio,fnouama/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,clumsy/intellij-community,asedunov/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,holmes/intellij-community,joewalnes/idea-community,ibinti/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,dslomov/intellij-community,xfournet/intellij-community,ryano144/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,blademainer/intellij-community,izonder/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,allotria/intellij-community,hurricup/intellij-community,samthor/intellij-community,signed/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,joewalnes/idea-community,amith01994/intellij-community,caot/intellij-community,dslomov/intellij-community,kool79/intellij-community,holmes/intellij-community,kool79/intellij-community,adedayo/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,xfournet/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,joewalnes/idea-community,ryano144/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,apixandru/intellij-community,ernestp/consulo,allotria/intellij-community,signed/intellij-community,dslomov/intellij-community,blademainer/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,TangHao1987/intellij-community,izonder/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,kdwink/intellij-community,suncycheng/intellij-community,izonder/intellij-community,apixandru/intellij-community,fitermay/intellij-community,izonder/intellij-community,ryano144/intellij-community,supersven/intellij-community,ibinti/intellij-community,supersven/intellij-community,ryano144/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,joewalnes/idea-community,suncycheng/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,clumsy/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,jagguli/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,ahb0327/intellij-community,supersven/intellij-community,vvv1559/intellij-community,semonte/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,fitermay/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,semonte/intellij-community,samthor/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,holmes/intellij-community,izonder/intellij-community,clumsy/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,holmes/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,blademainer/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,supersven/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,FHannes/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,petteyg/intellij-community,fengbaicanhe/intellij-community,semonte/intellij-community,vvv1559/intellij-community,ernestp/consulo,xfournet/intellij-community,orekyuu/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,asedunov/intellij-community,muntasirsyed/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,ivan-fedorov/intellij-community,jexp/idea2,adedayo/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,fnouama/intellij-community,holmes/intellij-community,joewalnes/idea-community,nicolargo/intellij-community,consulo/consulo,robovm/robovm-studio,dslomov/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,clumsy/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,joewalnes/idea-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,da1z/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,allotria/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,slisson/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,blademainer/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,allotria/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,consulo/consulo,caot/intellij-community,suncycheng/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,hurricup/intellij-community,allotria/intellij-community,caot/intellij-community,mglukhikh/intellij-community,fengbaicanhe/intellij-community,semonte/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,ryano144/intellij-community,Lekanich/intellij-community,suncycheng/intellij-community,kool79/intellij-community,fitermay/intellij-community,hurricup/intellij-community,robovm/robovm-studio,MER-GROUP/intellij-community,supersven/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,salguarnieri/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,xfournet/intellij-community,ernestp/consulo,signed/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,diorcety/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,dslomov/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,asedunov/intellij-community,izonder/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,signed/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,da1z/intellij-community,kool79/intellij-community,ernestp/consulo,ftomassetti/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,semonte/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,mglukhikh/intellij-community,jexp/idea2,Lekanich/intellij-community,mglukhikh/intellij-community,jexp/idea2,nicolargo/intellij-community,slisson/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,apixandru/intellij-community,slisson/intellij-community,kdwink/intellij-community,fitermay/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,samthor/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,consulo/consulo,akosyakov/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,FHannes/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,vladmm/intellij-community,asedunov/intellij-community,caot/intellij-community,asedunov/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,jexp/idea2,tmpgit/intellij-community,semonte/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,wreckJ/intellij-community,holmes/intellij-community,holmes/intellij-community,izonder/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,caot/intellij-community,semonte/intellij-community,samthor/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,da1z/intellij-community,jagguli/intellij-community,kool79/intellij-community,fitermay/intellij-community,hurricup/intellij-community,hurricup/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,izonder/intellij-community,jagguli/intellij-community,kdwink/intellij-community,hurricup/intellij-community,joewalnes/idea-community,jexp/idea2,vladmm/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,blademainer/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,ernestp/consulo,ahb0327/intellij-community,youdonghai/intellij-community,caot/intellij-community,consulo/consulo,allotria/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,jexp/idea2,consulo/consulo,vvv1559/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,ol-loginov/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,signed/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,gnuhub/intellij-community,samthor/intellij-community,alphafoobar/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,caot/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,retomerz/intellij-community,fnouama/intellij-community,joewalnes/idea-community,ol-loginov/intellij-community,fitermay/intellij-community,amith01994/intellij-community,allotria/intellij-community,jagguli/intellij-community,salguarnieri/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,fnouama/intellij-community,caot/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,allotria/intellij-community,vladmm/intellij-community,apixandru/intellij-community,joewalnes/idea-community,xfournet/intellij-community,pwoodworth/intellij-community,signed/intellij-community,amith01994/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,ahb0327/intellij-community,supersven/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,slisson/intellij-community,FHannes/intellij-community,gnuhub/intellij-community,izonder/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,signed/intellij-community,petteyg/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,slisson/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,allotria/intellij-community,adedayo/intellij-community,kool79/intellij-community,signed/intellij-community,ol-loginov/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,retomerz/intellij-community
|
/*
* Created by IntelliJ IDEA.
* User: max
* Date: May 20, 2002
* Time: 6:21:42 PM
* To change template for new class use
* Code Style | Class Templates options (Tools | IDE Options).
*/
package com.intellij.codeInsight.editorActions;
import com.intellij.ide.DataManager;
import com.intellij.lang.properties.PropertiesUtil;
import com.intellij.lang.properties.psi.PropertiesFile;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.DataKeys;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.LogicalPosition;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.editor.actionSystem.EditorActionHandler;
import com.intellij.openapi.editor.actionSystem.EditorWriteActionHandler;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.IncorrectOperationException;
public class JoinLinesHandler extends EditorWriteActionHandler {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.editorActions.JoinLinesHandler");
private final EditorActionHandler myOriginalHandler;
public JoinLinesHandler(EditorActionHandler originalHandler) {
myOriginalHandler = originalHandler;
}
public void executeWriteAction(final Editor editor, final DataContext dataContext) {
if (!(editor.getDocument() instanceof DocumentEx)) {
myOriginalHandler.execute(editor, dataContext);
return;
}
final DocumentEx doc = (DocumentEx)editor.getDocument();
final Project project = DataKeys.PROJECT.getData(DataManager.getInstance().getDataContext(editor.getContentComponent()));
LogicalPosition caretPosition = editor.getCaretModel().getLogicalPosition();
final PsiDocumentManager docManager = PsiDocumentManager.getInstance(project);
PsiFile psiFile = docManager.getPsiFile(doc);
if (psiFile == null) {
myOriginalHandler.execute(editor, dataContext);
return;
}
int startLine = caretPosition.line;
int endLine = startLine + 1;
if (editor.getSelectionModel().hasSelection()) {
startLine = doc.getLineNumber(editor.getSelectionModel().getSelectionStart());
endLine = doc.getLineNumber(editor.getSelectionModel().getSelectionEnd());
if (doc.getLineStartOffset(endLine) == editor.getSelectionModel().getSelectionEnd()) endLine--;
}
int caretRestoreOffset = -1;
for (int i = startLine; i < endLine; i++) {
if (i >= doc.getLineCount() - 1) break;
int lineEndOffset = doc.getLineEndOffset(startLine);
docManager.commitDocument(doc);
CharSequence text = doc.getCharsSequence();
int firstNonSpaceOffsetInNextLine = doc.getLineStartOffset(startLine + 1);
while (text.charAt(firstNonSpaceOffsetInNextLine) == ' ' || text.charAt(firstNonSpaceOffsetInNextLine) == '\t') {
firstNonSpaceOffsetInNextLine++;
}
PsiElement elementAtNextLineStart = psiFile.findElementAt(firstNonSpaceOffsetInNextLine);
boolean isNextLineStartsWithComment = elementAtNextLineStart instanceof PsiComment ||
elementAtNextLineStart != null &&
PsiTreeUtil.getParentOfType(elementAtNextLineStart, PsiDocComment.class) != null;
int lastNonSpaceOffsetInStartLine = lineEndOffset;
while (lastNonSpaceOffsetInStartLine > 0 &&
(text.charAt(lastNonSpaceOffsetInStartLine - 1) == ' ' || text.charAt(lastNonSpaceOffsetInStartLine - 1) == '\t')) {
lastNonSpaceOffsetInStartLine--;
}
int elemOffset = lastNonSpaceOffsetInStartLine > doc.getLineStartOffset(startLine) ? lastNonSpaceOffsetInStartLine - 1 : -1;
PsiElement elementAtStartLineEnd = elemOffset == -1 ? null : psiFile.findElementAt(elemOffset);
boolean isStartLineEndsWithComment = elementAtStartLineEnd instanceof PsiComment ||
elementAtStartLineEnd != null &&
PsiTreeUtil.getParentOfType(elementAtStartLineEnd, PsiDocComment.class) != null;
if (lastNonSpaceOffsetInStartLine == doc.getLineStartOffset(startLine)) {
doc.deleteString(doc.getLineStartOffset(startLine), firstNonSpaceOffsetInNextLine);
int indent = -1;
try {
docManager.commitDocument(doc);
indent = CodeStyleManager.getInstance(project).adjustLineIndent(psiFile, startLine == 0 ? 0 : doc.getLineStartOffset(startLine));
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
if (caretRestoreOffset == -1) {
caretRestoreOffset = indent;
}
continue;
}
doc.deleteString(lineEndOffset, lineEndOffset + doc.getLineSeparatorLength(startLine));
text = doc.getCharsSequence();
int start = lineEndOffset - 1;
int end = lineEndOffset;
while (start > 0 && (text.charAt(start) == ' ' || text.charAt(start) == '\t')) start--;
while (end < doc.getTextLength() && (text.charAt(end) == ' ' || text.charAt(end) == '\t')) end++;
// Check if we're joining splitted string literal.
docManager.commitDocument(doc);
int rc = tryJoinStringLiteral(doc, psiFile, start);
if (rc == -1) {
PsiElement psiAtStartLineEnd = psiFile.findElementAt(start);
PsiElement psiAtNextLineStart = psiFile.findElementAt(end);
rc = tryJoinDeclaration(psiAtStartLineEnd, psiAtNextLineStart);
if (rc == -1) {
rc = tryUnwrapBlockStatement(psiAtStartLineEnd, psiAtNextLineStart);
if (rc == -1) {
rc = tryJoinProperties(doc, psiFile, start);
}
}
}
PsiDocumentManager.getInstance(project).doPostponedOperationsAndUnblockDocument(doc);
if (rc != -1) {
if (caretRestoreOffset == -1) caretRestoreOffset = rc;
continue;
}
if (caretRestoreOffset == -1) caretRestoreOffset = start == lineEndOffset ? start : start + 1;
if (isStartLineEndsWithComment && isNextLineStartsWithComment) {
if (text.charAt(end) == '*' && end < text.length() && text.charAt(end + 1) != '/') {
end++;
while (end < doc.getTextLength() && (text.charAt(end) == ' ' || text.charAt(end) == '\t')) end++;
}
else if (text.charAt(end) == '/') {
end += 2;
while (end < doc.getTextLength() && (text.charAt(end) == ' ' || text.charAt(end) == '\t')) end++;
}
doc.replaceString(start == lineEndOffset ? start : start + 1, end, " ");
continue;
}
while (end < doc.getTextLength() && (text.charAt(end) == ' ' || text.charAt(end) == '\t')) end++;
doc.replaceString(start == lineEndOffset ? start : start + 1, end, " ");
if (start <= doc.getLineStartOffset(startLine)) {
try {
docManager.commitDocument(doc);
CodeStyleManager.getInstance(project).adjustLineIndent(psiFile, doc.getLineStartOffset(startLine));
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
int prevLineCount = doc.getLineCount();
docManager.commitDocument(doc);
try {
CodeStyleManager.getInstance(project).reformatText(psiFile, start + 1, end);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
if (prevLineCount < doc.getLineCount()) {
end = doc.getLineEndOffset(startLine) + doc.getLineSeparatorLength(startLine);
start = end - doc.getLineSeparatorLength(startLine);
int addedLinesCount = doc.getLineCount() - prevLineCount - 1;
while (end < doc.getTextLength() &&
(text.charAt(end) == ' ' || text.charAt(end) == '\t' || text.charAt(end) == '\n' && addedLinesCount > 0)) {
if (text.charAt(end) == '\n') addedLinesCount--;
end++;
}
doc.replaceString(start, end, " ");
}
docManager.commitDocument(doc);
}
if (editor.getSelectionModel().hasSelection()) {
editor.getCaretModel().moveToOffset(editor.getSelectionModel().getSelectionEnd());
}
else if (caretRestoreOffset != -1) {
editor.getCaretModel().moveToOffset(caretRestoreOffset);
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
editor.getSelectionModel().removeSelection();
}
}
private static int tryJoinProperties(final DocumentEx doc, final PsiFile psiFile, int start) {
if (!(psiFile instanceof PropertiesFile)) return -1;
// strip continuation char
if (PropertiesUtil.isUnescapedBackSlashAtTheEnd(doc.getText().substring(0, start + 1))) {
doc.deleteString(start, start + 1);
start--;
}
return start + 1;
}
private static int tryUnwrapBlockStatement(PsiElement elementAtStartLineEnd, PsiElement elementAtNextLineStart) {
if (elementAtStartLineEnd == null || elementAtNextLineStart == null) return -1;
if (!(elementAtStartLineEnd instanceof PsiJavaToken) || ((PsiJavaToken)elementAtStartLineEnd).getTokenType() != JavaTokenType.LBRACE) {
return -1;
}
final PsiElement codeBlock = elementAtStartLineEnd.getParent();
if (!(codeBlock instanceof PsiCodeBlock)) return -1;
if (!(codeBlock.getParent() instanceof PsiBlockStatement)) return -1;
final PsiElement parentStatement = codeBlock.getParent().getParent();
final CodeStyleSettings codeStyleSettings = CodeStyleSettingsManager.getSettings(elementAtStartLineEnd.getProject());
if (!(parentStatement instanceof PsiIfStatement && codeStyleSettings.IF_BRACE_FORCE != CodeStyleSettings.FORCE_BRACES_ALWAYS ||
parentStatement instanceof PsiWhileStatement && codeStyleSettings.WHILE_BRACE_FORCE != CodeStyleSettings.FORCE_BRACES_ALWAYS ||
(parentStatement instanceof PsiForStatement || parentStatement instanceof PsiForeachStatement) &&
codeStyleSettings.FOR_BRACE_FORCE != CodeStyleSettings.FORCE_BRACES_ALWAYS ||
parentStatement instanceof PsiDoWhileStatement &&
codeStyleSettings
.DOWHILE_BRACE_FORCE !=
CodeStyleSettings
.FORCE_BRACES_ALWAYS)) {
return -1;
}
PsiElement foundStatement = null;
for (PsiElement element = elementAtStartLineEnd.getNextSibling(); element != null; element = element.getNextSibling()) {
if (element instanceof PsiWhiteSpace) continue;
if (element instanceof PsiJavaToken &&
((PsiJavaToken)element).getTokenType() == JavaTokenType.RBRACE &&
element.getParent() == codeBlock) {
if (foundStatement == null) return -1;
break;
}
if (foundStatement != null) return -1;
foundStatement = element;
}
try {
final PsiElement newStatement = codeBlock.getParent().replace(foundStatement);
return newStatement.getTextRange().getStartOffset();
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
return -1;
}
private static int tryJoinDeclaration(PsiElement elementAtStartLineEnd, PsiElement elementAtNextLineStart) {
if (elementAtStartLineEnd == null || elementAtNextLineStart == null) return -1;
// first line.
if (!(elementAtStartLineEnd instanceof PsiJavaToken)) return -1;
PsiJavaToken lastFirstLineToken = (PsiJavaToken)elementAtStartLineEnd;
if (lastFirstLineToken.getTokenType() != JavaTokenType.SEMICOLON) return -1;
if (!(lastFirstLineToken.getParent() instanceof PsiLocalVariable)) return -1;
PsiLocalVariable var = (PsiLocalVariable)lastFirstLineToken.getParent();
if (!(var.getParent() instanceof PsiDeclarationStatement)) return -1;
PsiDeclarationStatement decl = (PsiDeclarationStatement)var.getParent();
if (decl.getDeclaredElements().length > 1) return -1;
//second line.
if (!(elementAtNextLineStart instanceof PsiJavaToken)) return -1;
PsiJavaToken firstNextLineToken = (PsiJavaToken)elementAtNextLineStart;
if (firstNextLineToken.getTokenType() != JavaTokenType.IDENTIFIER) return -1;
if (!(firstNextLineToken.getParent() instanceof PsiReferenceExpression)) return -1;
PsiReferenceExpression ref = (PsiReferenceExpression)firstNextLineToken.getParent();
PsiElement refResolved = ref.resolve();
PsiManager psiManager = ref.getManager();
if (!psiManager.areElementsEquivalent(refResolved, var)) return -1;
if (!(ref.getParent() instanceof PsiAssignmentExpression)) return -1;
PsiAssignmentExpression assignment = (PsiAssignmentExpression)ref.getParent();
if (!(assignment.getParent() instanceof PsiExpressionStatement)) return -1;
if (psiManager.getSearchHelper().findReferences(var, new LocalSearchScope(assignment.getRExpression()), false).length > 0) {
return -1;
}
final PsiElementFactory factory = psiManager.getElementFactory();
PsiExpression initializerExpression;
final IElementType originalOpSign = assignment.getOperationSign().getTokenType();
if (originalOpSign == JavaTokenType.EQ) {
initializerExpression = assignment.getRExpression();
}
else {
if (var.getInitializer() == null) return -1;
String opSign = null;
if (originalOpSign == JavaTokenType.ANDEQ) {
opSign = "&";
}
else if (originalOpSign == JavaTokenType.ASTERISKEQ) {
opSign = "*";
}
else if (originalOpSign == JavaTokenType.DIVEQ) {
opSign = "/";
}
else if (originalOpSign == JavaTokenType.GTGTEQ) {
opSign = ">>";
}
else if (originalOpSign == JavaTokenType.GTGTGTEQ) {
opSign = ">>>";
}
else if (originalOpSign == JavaTokenType.LTLTEQ) {
opSign = "<<";
}
else if (originalOpSign == JavaTokenType.MINUSEQ) {
opSign = "-";
}
else if (originalOpSign == JavaTokenType.OREQ) {
opSign = "|";
}
else if (originalOpSign == JavaTokenType.PERCEQ) {
opSign = "%";
}
else if (originalOpSign == JavaTokenType.PLUSEQ) {
opSign = "+";
}
else if (originalOpSign == JavaTokenType.XOREQ) {
opSign = "^";
}
try {
initializerExpression =
factory.createExpressionFromText(var.getInitializer().getText() + opSign + assignment.getRExpression().getText(), var);
initializerExpression = (PsiExpression)CodeStyleManager.getInstance(psiManager).reformat(initializerExpression);
}
catch (IncorrectOperationException e) {
LOG.error(e);
return -1;
}
}
PsiExpressionStatement statement = (PsiExpressionStatement)assignment.getParent();
int startOffset = decl.getTextRange().getStartOffset();
try {
PsiDeclarationStatement newDecl = factory.createVariableDeclarationStatement(var.getName(), var.getType(), initializerExpression);
PsiVariable newVar = (PsiVariable)newDecl.getDeclaredElements()[0];
if (var.getModifierList().getText().length() > 0) {
newVar.getModifierList().setModifierProperty(PsiModifier.FINAL, true);
}
newVar.getModifierList().replace(var.getModifierList());
PsiVariable variable = (PsiVariable)newDecl.getDeclaredElements()[0];
final int offsetBeforeEQ = variable.getNameIdentifier().getTextRange().getEndOffset();
final int offsetAfterEQ = variable.getInitializer().getTextRange().getStartOffset() + 1;
newDecl = (PsiDeclarationStatement)CodeStyleManager.getInstance(psiManager).reformatRange(newDecl, offsetBeforeEQ, offsetAfterEQ);
decl.replace(newDecl);
statement.delete();
return startOffset + newDecl.getTextRange().getEndOffset() - newDecl.getTextRange().getStartOffset();
}
catch (IncorrectOperationException e) {
LOG.error(e);
return -1;
}
}
private static int tryJoinStringLiteral(Document doc, PsiFile psiFile, int offsetNear) {
CharSequence text = doc.getCharsSequence();
int start = offsetNear;
while (text.charAt(start) == ' ' || text.charAt(start) == '\t' || text.charAt(start) == '+') start--;
if (text.charAt(start) == '\"') start--;
if (start < offsetNear) start++;
int state = 0;
int startQuoteOffset = -1;
state_loop:
for (int j = start; j < doc.getTextLength(); j++) {
switch (text.charAt(j)) {
case ' ':
case '\t':
break;
case '\"':
if (state == 0) {
state = 1;
startQuoteOffset = j;
PsiElement psiAtOffset = psiFile.findElementAt(j);
if (!(psiAtOffset instanceof PsiJavaToken)) return -1;
if (((PsiJavaToken)psiAtOffset).getTokenType() != JavaTokenType.STRING_LITERAL) return -1;
break;
}
if (state == 2) {
doc.deleteString(startQuoteOffset, j + 1);
return startQuoteOffset;
}
break state_loop;
case '+':
if (state != 1) break state_loop;
state = 2;
break;
default:
break state_loop;
}
}
return -1;
}
}
|
codeInsight/impl/com/intellij/codeInsight/editorActions/JoinLinesHandler.java
|
/*
* Created by IntelliJ IDEA.
* User: max
* Date: May 20, 2002
* Time: 6:21:42 PM
* To change template for new class use
* Code Style | Class Templates options (Tools | IDE Options).
*/
package com.intellij.codeInsight.editorActions;
import com.intellij.ide.DataManager;
import com.intellij.lang.properties.PropertiesUtil;
import com.intellij.lang.properties.psi.PropertiesFile;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.DataKeys;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.LogicalPosition;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.editor.actionSystem.EditorActionHandler;
import com.intellij.openapi.editor.actionSystem.EditorWriteActionHandler;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.IncorrectOperationException;
public class JoinLinesHandler extends EditorWriteActionHandler {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.editorActions.JoinLinesHandler");
private EditorActionHandler myOriginalHandler;
public JoinLinesHandler(EditorActionHandler originalHandler) {
myOriginalHandler = originalHandler;
}
public void executeWriteAction(final Editor editor, final DataContext dataContext) {
if (!(editor.getDocument() instanceof DocumentEx)) {
myOriginalHandler.execute(editor, dataContext);
return;
}
final DocumentEx doc = (DocumentEx) editor.getDocument();
final Project project = DataKeys.PROJECT.getData(DataManager.getInstance().getDataContext(editor.getContentComponent()));
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
LogicalPosition caretPosition = editor.getCaretModel().getLogicalPosition();
final PsiDocumentManager docManager = PsiDocumentManager.getInstance(project);
PsiFile psiFile = docManager.getPsiFile(doc);
if (psiFile == null) {
myOriginalHandler.execute(editor, dataContext);
return;
}
int startLine = caretPosition.line;
int endLine = startLine + 1;
if (editor.getSelectionModel().hasSelection()) {
startLine = doc.getLineNumber(editor.getSelectionModel().getSelectionStart());
endLine = doc.getLineNumber(editor.getSelectionModel().getSelectionEnd());
if (doc.getLineStartOffset(endLine) == editor.getSelectionModel().getSelectionEnd()) endLine--;
}
int caretRestoreOffset = -1;
for (int i = startLine; i < endLine; i++) {
if (i >= doc.getLineCount() - 1) break;
int lineEndOffset = doc.getLineEndOffset(startLine);
docManager.commitDocument(doc);
CharSequence text = doc.getCharsSequence();
int firstNonSpaceOffsetInNextLine = doc.getLineStartOffset(startLine + 1);
while (text.charAt(firstNonSpaceOffsetInNextLine) == ' ' || text.charAt(firstNonSpaceOffsetInNextLine) == '\t') firstNonSpaceOffsetInNextLine++;
PsiElement elementAtNextLineStart = psiFile.findElementAt(firstNonSpaceOffsetInNextLine);
boolean isNextLineStartsWithComment = elementAtNextLineStart instanceof PsiComment ||
elementAtNextLineStart != null &&
PsiTreeUtil.getParentOfType(
elementAtNextLineStart,
PsiDocComment.class
) != null;
int lastNonSpaceOffsetInStartLine = lineEndOffset;
while (lastNonSpaceOffsetInStartLine > 0 &&
(text.charAt(lastNonSpaceOffsetInStartLine - 1) == ' ' || text.charAt(lastNonSpaceOffsetInStartLine - 1) == '\t')) {
lastNonSpaceOffsetInStartLine--;
}
int elemOffset = lastNonSpaceOffsetInStartLine > doc.getLineStartOffset(startLine)
? lastNonSpaceOffsetInStartLine - 1
: -1;
PsiElement elementAtStartLineEnd = elemOffset == -1 ? null : psiFile.findElementAt(elemOffset);
boolean isStartLineEndsWithComment = elementAtStartLineEnd instanceof PsiComment ||
elementAtStartLineEnd != null &&
PsiTreeUtil.getParentOfType(elementAtStartLineEnd, PsiDocComment.class) != null;
if (lastNonSpaceOffsetInStartLine == doc.getLineStartOffset(startLine)) {
doc.deleteString(doc.getLineStartOffset(startLine), firstNonSpaceOffsetInNextLine);
int indent = -1;
try {
docManager.commitDocument(doc);
indent = CodeStyleManager.getInstance(project).adjustLineIndent(
psiFile,
startLine == 0 ? 0 : doc.getLineStartOffset(startLine)
);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
if (caretRestoreOffset == -1) {
caretRestoreOffset = indent;
}
continue;
}
doc.deleteString(lineEndOffset, lineEndOffset + doc.getLineSeparatorLength(startLine));
text = doc.getCharsSequence();
int start = lineEndOffset-1;
int end = lineEndOffset;
while (start > 0 && (text.charAt(start) == ' ' || text.charAt(start) == '\t')) start--;
while (end < doc.getTextLength() && (text.charAt(end) == ' ' || text.charAt(end) == '\t')) end++;
// Check if we're joining splitted string literal.
docManager.commitDocument(doc);
int rc = tryJoinStringLiteral(doc, psiFile, start);
if (rc == -1) {
PsiElement psiAtStartLineEnd = psiFile.findElementAt(start);
PsiElement psiAtNextLineStart = psiFile.findElementAt(end);
rc = tryJoinDeclaration(psiAtStartLineEnd, psiAtNextLineStart);
if (rc == -1) {
rc = tryUnwrapBlockStatement(psiAtStartLineEnd, psiAtNextLineStart);
if (rc == -1) {
rc = tryJoinProperties(doc, psiFile, start);
}
}
}
if (rc != -1) {
if (caretRestoreOffset == -1) caretRestoreOffset = rc;
continue;
}
if (caretRestoreOffset == -1) caretRestoreOffset = start == lineEndOffset ? start : start + 1;
if (isStartLineEndsWithComment && isNextLineStartsWithComment) {
if (text.charAt(end) == '*' && end < text.length() && text.charAt(end + 1) != '/') {
end++;
while (end < doc.getTextLength() && (text.charAt(end) == ' ' || text.charAt(end) == '\t')) end++;
} else if (text.charAt(end) == '/') {
end += 2;
while (end < doc.getTextLength() && (text.charAt(end) == ' ' || text.charAt(end) == '\t')) end++;
}
doc.replaceString(start == lineEndOffset ? start : start + 1, end, " ");
continue;
}
while (end < doc.getTextLength() && (text.charAt(end) == ' ' || text.charAt(end) == '\t')) end++;
doc.replaceString(start == lineEndOffset ? start : start + 1, end, " ");
if (start <= doc.getLineStartOffset(startLine)) {
try {
docManager.commitDocument(doc);
CodeStyleManager.getInstance(project).adjustLineIndent(psiFile, doc.getLineStartOffset(startLine));
} catch (IncorrectOperationException e) {
LOG.error(e);
}
}
int prevLineCount = doc.getLineCount();
docManager.commitDocument(doc);
try {
CodeStyleManager.getInstance(project).reformatText(psiFile, start+1, end);
} catch (IncorrectOperationException e) {
LOG.error(e);
}
if (prevLineCount < doc.getLineCount()) {
end = doc.getLineEndOffset(startLine) + doc.getLineSeparatorLength(startLine);
start = end - doc.getLineSeparatorLength(startLine);
int addedLinesCount = doc.getLineCount() - prevLineCount - 1;
while (end < doc.getTextLength() &&
(text.charAt(end) == ' ' || text.charAt(end) == '\t' || text.charAt(end) == '\n' && addedLinesCount > 0)) {
if (text.charAt(end) == '\n') addedLinesCount--;
end++;
}
doc.replaceString(start, end, " ");
}
docManager.commitDocument(doc);
}
if (editor.getSelectionModel().hasSelection()) {
editor.getCaretModel().moveToOffset(editor.getSelectionModel().getSelectionEnd());
} else if (caretRestoreOffset != -1) {
editor.getCaretModel().moveToOffset(caretRestoreOffset);
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
editor.getSelectionModel().removeSelection();
}
}
});
}
private static int tryJoinProperties(final DocumentEx doc, final PsiFile psiFile, int start) {
if (!(psiFile instanceof PropertiesFile)) return -1;
// strip continuation char
if (PropertiesUtil.isUnescapedBackSlashAtTheEnd(doc.getText().substring(0,start+1))) {
doc.deleteString(start, start + 1);
start--;
}
return start+1;
}
private static int tryUnwrapBlockStatement(PsiElement elementAtStartLineEnd, PsiElement elementAtNextLineStart) {
if (elementAtStartLineEnd == null || elementAtNextLineStart == null) return -1;
if (!(elementAtStartLineEnd instanceof PsiJavaToken) || ((PsiJavaToken)elementAtStartLineEnd).getTokenType() != JavaTokenType.LBRACE) {
return -1;
}
final PsiElement codeBlock = elementAtStartLineEnd.getParent();
if (!(codeBlock instanceof PsiCodeBlock)) return -1;
if (!(codeBlock.getParent() instanceof PsiBlockStatement)) return -1;
final PsiElement parentStatement = codeBlock.getParent().getParent();
final CodeStyleSettings codeStyleSettings = CodeStyleSettingsManager.getSettings(elementAtStartLineEnd.getProject());
if (!(parentStatement instanceof PsiIfStatement && codeStyleSettings.IF_BRACE_FORCE != CodeStyleSettings.FORCE_BRACES_ALWAYS ||
parentStatement instanceof PsiWhileStatement && codeStyleSettings.WHILE_BRACE_FORCE != CodeStyleSettings.FORCE_BRACES_ALWAYS ||
(parentStatement instanceof PsiForStatement || parentStatement instanceof PsiForeachStatement) &&
codeStyleSettings.FOR_BRACE_FORCE != CodeStyleSettings.FORCE_BRACES_ALWAYS ||
parentStatement instanceof PsiDoWhileStatement &&
codeStyleSettings.DOWHILE_BRACE_FORCE != CodeStyleSettings.FORCE_BRACES_ALWAYS)) {
return -1;
}
PsiElement foundStatement = null;
for (PsiElement element = elementAtStartLineEnd.getNextSibling(); element != null; element = element.getNextSibling()) {
if (element instanceof PsiWhiteSpace) continue;
if (element instanceof PsiJavaToken && ((PsiJavaToken)element).getTokenType() == JavaTokenType.RBRACE && element.getParent() == codeBlock) {
if (foundStatement == null) return -1;
break;
}
if (foundStatement != null) return -1;
foundStatement = element;
}
try {
final PsiElement newStatement = codeBlock.getParent().replace(foundStatement);
return newStatement.getTextRange().getStartOffset();
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
return -1;
}
private static int tryJoinDeclaration(PsiElement elementAtStartLineEnd, PsiElement elementAtNextLineStart) {
if (elementAtStartLineEnd == null || elementAtNextLineStart == null) return -1;
// first line.
if (!(elementAtStartLineEnd instanceof PsiJavaToken)) return -1;
PsiJavaToken lastFirstLineToken = (PsiJavaToken) elementAtStartLineEnd;
if (lastFirstLineToken.getTokenType() != JavaTokenType.SEMICOLON) return -1;
if (!(lastFirstLineToken.getParent() instanceof PsiLocalVariable)) return -1;
PsiLocalVariable var = (PsiLocalVariable) lastFirstLineToken.getParent();
if (!(var.getParent() instanceof PsiDeclarationStatement)) return -1;
PsiDeclarationStatement decl = (PsiDeclarationStatement) var.getParent();
if (decl.getDeclaredElements().length > 1) return -1;
//second line.
if (!(elementAtNextLineStart instanceof PsiJavaToken)) return -1;
PsiJavaToken firstNextLineToken = (PsiJavaToken) elementAtNextLineStart;
if (firstNextLineToken.getTokenType() != JavaTokenType.IDENTIFIER) return -1;
if (!(firstNextLineToken.getParent() instanceof PsiReferenceExpression)) return -1;
PsiReferenceExpression ref = (PsiReferenceExpression) firstNextLineToken.getParent();
PsiElement refResolved = ref.resolve();
PsiManager psiManager = ref.getManager();
if (!psiManager.areElementsEquivalent(refResolved, var)) return -1;
if (!(ref.getParent() instanceof PsiAssignmentExpression)) return -1;
PsiAssignmentExpression assignment = (PsiAssignmentExpression) ref.getParent();
if (!(assignment.getParent() instanceof PsiExpressionStatement)) return -1;
if (psiManager.getSearchHelper().findReferences(var, new LocalSearchScope(assignment.getRExpression()), false).length > 0) {
return -1;
}
final PsiElementFactory factory = psiManager.getElementFactory();
PsiExpression initializerExpression;
final IElementType originalOpSign = assignment.getOperationSign().getTokenType();
if (originalOpSign == JavaTokenType.EQ) {
initializerExpression = assignment.getRExpression();
}
else {
if (var.getInitializer() == null) return -1;
String opSign = null;
if (originalOpSign == JavaTokenType.ANDEQ) {
opSign = "&";
}
else if (originalOpSign == JavaTokenType.ASTERISKEQ) {
opSign = "*";
}
else if (originalOpSign == JavaTokenType.DIVEQ) {
opSign = "/";
}
else if (originalOpSign == JavaTokenType.GTGTEQ) {
opSign = ">>";
}
else if (originalOpSign == JavaTokenType.GTGTGTEQ) {
opSign = ">>>";
}
else if (originalOpSign == JavaTokenType.LTLTEQ) {
opSign = "<<";
}
else if (originalOpSign == JavaTokenType.MINUSEQ) {
opSign = "-";
}
else if (originalOpSign == JavaTokenType.OREQ) {
opSign = "|";
}
else if (originalOpSign == JavaTokenType.PERCEQ) {
opSign = "%";
}
else if (originalOpSign == JavaTokenType.PLUSEQ) {
opSign = "+";
}
else if (originalOpSign == JavaTokenType.XOREQ) {
opSign = "^";
}
try {
initializerExpression = factory.createExpressionFromText(var.getInitializer().getText() + opSign + assignment.getRExpression().getText(), var);
initializerExpression = (PsiExpression)CodeStyleManager.getInstance(psiManager).reformat(initializerExpression);
}
catch (IncorrectOperationException e) {
LOG.error(e);
return -1;
}
}
PsiExpressionStatement statement = (PsiExpressionStatement) assignment.getParent();
int startOffset = decl.getTextRange().getStartOffset();
try {
PsiDeclarationStatement newDecl = factory.createVariableDeclarationStatement(
var.getName(), var.getType(),
initializerExpression
);
PsiVariable newVar = (PsiVariable)newDecl.getDeclaredElements()[0];
if (var.getModifierList().getText().length() > 0) {
newVar.getModifierList().setModifierProperty(PsiModifier.FINAL, true);
}
newVar.getModifierList().replace(var.getModifierList());
PsiVariable variable = (PsiVariable)newDecl.getDeclaredElements()[0];
final int offsetBeforeEQ = variable.getNameIdentifier().getTextRange().getEndOffset();
final int offsetAfterEQ = variable.getInitializer().getTextRange().getStartOffset() + 1;
newDecl = (PsiDeclarationStatement)CodeStyleManager.getInstance(psiManager).reformatRange(newDecl,
offsetBeforeEQ,
offsetAfterEQ);
decl.replace(newDecl);
statement.delete();
return startOffset + newDecl.getTextRange().getEndOffset() - newDecl.getTextRange().getStartOffset();
} catch (IncorrectOperationException e) {
LOG.error(e);
return -1;
}
}
private static int tryJoinStringLiteral(Document doc, PsiFile psiFile, int offsetNear) {
CharSequence text = doc.getCharsSequence();
int start = offsetNear;
while (text.charAt(start) == ' ' || text.charAt(start) == '\t' || text.charAt(start) == '+') start--;
if (text.charAt(start) == '\"') start--;
if (start < offsetNear) start++;
int state = 0;
int startQuoteOffset = -1;
state_loop:
for (int j = start; j < doc.getTextLength(); j++) {
switch (text.charAt(j)) {
case ' ':
case '\t':
break;
case '\"':
if (state == 0) {
state = 1;
startQuoteOffset = j;
PsiElement psiAtOffset = psiFile.findElementAt(j);
if (!(psiAtOffset instanceof PsiJavaToken)) return -1;
if (((PsiJavaToken)psiAtOffset).getTokenType() != JavaTokenType.STRING_LITERAL) return -1;
break;
}
if (state == 2) {
doc.deleteString(startQuoteOffset, j + 1);
return startQuoteOffset;
}
break state_loop;
case '+':
if (state != 1) break state_loop;
state = 2;
break;
default: break state_loop;
}
}
return -1;
}
}
|
IDEADEV-21419
|
codeInsight/impl/com/intellij/codeInsight/editorActions/JoinLinesHandler.java
|
IDEADEV-21419
|
|
Java
|
apache-2.0
|
0c7ca02070e43ba401f002e15e0d8c8fad754b61
| 0
|
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
|
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.daemon;
import com.intellij.psi.PsiElement;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
import java.util.List;
/**
* @author yole
* @see LineMarkerProviders#EP_NAME
* @see LineMarkerProviderDescriptor
* @see RelatedItemLineMarkerProvider
*/
public interface LineMarkerProvider {
/**
* Get line markers for this PsiElement.
* <p/>
* NOTE for implementers:
* Please create line marker info for leaf elements only - i.e. the smallest possible elements.
* For example, instead of returning method marker for {@code PsiMethod},
* create the marker for the {@code PsiIdentifier} which is a name of this method.
* <p/>
* More technical details:<p>
* Highlighting (specifically, LineMarkersPass) queries all LineMarkerProviders in two passes (for performance reasons):
* <ul>
* <li>first pass for all elements in visible area</li>
* <li>second pass for all the rest elements</li>
* </ul>
* If provider returned nothing for both areas, its line markers are cleared.
* <p/>
* So imagine a {@code LineMarkerProvider} which (incorrectly) written like this:
* <pre>
* {@code
* class MyBadLineMarkerProvider implements LineMarkerProvider {
* public LineMarkerInfo getLineMarkerInfo(PsiElement element) {
* if (element instanceof PsiMethod) { // ACTUALLY DONT!
* return new LineMarkerInfo(element, element.getTextRange(), icon, null,null, alignment);
* }
* else {
* return null;
* }
* }
* ...
* }
* }
* </pre>
* Note that it create LineMarkerInfo for the whole method body.
* Following will happen when this method is half-visible (e.g. its name is visible but a part of its body isn't):
* <ul>
* <li>the first pass would remove line marker info because the whole PsiMethod isn't visible</li>
* <li>the second pass would try to add line marker info back because LineMarkerProvider was called for the PsiMethod at last</li>
* </ul>
* As a result, line marker icon will blink annoyingly.
* Instead, write this:
* <pre>
* {@code
* class MyGoodLineMarkerProvider implements LineMarkerProvider {
* public LineMarkerInfo getLineMarkerInfo(PsiElement element) {
* if (element instanceof PsiIdentifier &&
* (parent = element.getParent()) instanceof PsiMethod &&
* ((PsiMethod)parent).getMethodIdentifier() == element)) { // aha, we are at method name
* return new LineMarkerInfo(element, element.getTextRange(), icon, null,null, alignment);
* }
* else {
* return null;
* }
* }
* ...
* }
* }
* </pre>
*/
@Nullable
LineMarkerInfo getLineMarkerInfo(@NotNull PsiElement element);
default void collectSlowLineMarkers(@NotNull List<PsiElement> elements, @NotNull Collection<LineMarkerInfo> result) {
}
}
|
platform/lang-api/src/com/intellij/codeInsight/daemon/LineMarkerProvider.java
|
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.daemon;
import com.intellij.psi.PsiElement;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
import java.util.List;
/**
* @author yole
* @see LineMarkerProviders#EP_NAME
* @see LineMarkerProviderDescriptor
*/
public interface LineMarkerProvider {
/**
* Get line markers for this PsiElement.
* <p/>
* NOTE for implementers:
* Please create line marker info for leaf elements only - i.e. the smallest possible elements.
* For example, instead of returning method marker for {@code PsiMethod},
* create the marker for the {@code PsiIdentifier} which is a name of this method.
* <p/>
* More technical details:<p>
* Highlighting (specifically, LineMarkersPass) queries all LineMarkerProviders in two passes (for performance reasons):
* <ul>
* <li>first pass for all elements in visible area</li>
* <li>second pass for all the rest elements</li>
* </ul>
* If provider returned nothing for both areas, its line markers are cleared.
* <p/>
* So imagine a {@code LineMarkerProvider} which (incorrectly) written like this:
* <pre>
* {@code
* class MyBadLineMarkerProvider implements LineMarkerProvider {
* public LineMarkerInfo getLineMarkerInfo(PsiElement element) {
* if (element instanceof PsiMethod) { // ACTUALLY DONT!
* return new LineMarkerInfo(element, element.getTextRange(), icon, null,null, alignment);
* }
* else {
* return null;
* }
* }
* ...
* }
* }
* </pre>
* Note that it create LIneMarkerInfo for the whole method body.
* Following will happen when this method is half-visible (e.g. its name is visible but a part of its body isn't):
* <ul>
* <li>the first pass would remove line marker info because the whole PsiMethod isn't visible</li>
* <li>the second pass would try to add line marker info back because LineMarkerProvider was called for the PsiMethod at last</li>
* </ul>
* As a result, line marker icon will blink annoyingly.
* Instead, write this:
* <pre>
* {@code
* class MyGoodLineMarkerProvider implements LineMarkerProvider {
* public LineMarkerInfo getLineMarkerInfo(PsiElement element) {
* if (element instanceof PsiIdentifier &&
* (parent = element.getParent()) instanceof PsiMethod &&
* ((PsiMethod)parent).getMethodIdentifier() == element)) { // aha, we are at method name
* return new LineMarkerInfo(element, element.getTextRange(), icon, null,null, alignment);
* }
* else {
* return null;
* }
* }
* ...
* }
* }
* </pre>
*/
@Nullable
LineMarkerInfo getLineMarkerInfo(@NotNull PsiElement element);
default void collectSlowLineMarkers(@NotNull List<PsiElement> elements, @NotNull Collection<LineMarkerInfo> result) {
}
}
|
LineMarkerProvider: javadoc cleanup
GitOrigin-RevId: cfa8bc6acc8a84f6f0870273c288c62ed07f80a4
|
platform/lang-api/src/com/intellij/codeInsight/daemon/LineMarkerProvider.java
|
LineMarkerProvider: javadoc cleanup
|
|
Java
|
apache-2.0
|
e1ec1d1a0043b82d547347e72848307c399a13e0
| 0
|
Aulust/async-http-client,Aulust/async-http-client
|
/*
* Copyright (c) 2014 AsyncHttpClient Project. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package org.asynchttpclient.netty.request;
import static org.asynchttpclient.util.Assertions.assertNotNull;
import static org.asynchttpclient.util.AuthenticatorUtils.*;
import static org.asynchttpclient.util.HttpConstants.Methods.*;
import static org.asynchttpclient.util.HttpUtils.requestTimeout;
import static org.asynchttpclient.util.MiscUtils.getCause;
import static org.asynchttpclient.util.ProxyUtils.getProxyServer;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelProgressivePromise;
import io.netty.handler.codec.http.DefaultHttpHeaders;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.util.Timeout;
import io.netty.util.Timer;
import io.netty.util.TimerTask;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.asynchttpclient.AsyncHandler;
import org.asynchttpclient.AsyncHttpClientConfig;
import org.asynchttpclient.ListenableFuture;
import org.asynchttpclient.Realm;
import org.asynchttpclient.Realm.AuthScheme;
import org.asynchttpclient.Request;
import org.asynchttpclient.exception.RemotelyClosedException;
import org.asynchttpclient.filter.FilterContext;
import org.asynchttpclient.filter.FilterException;
import org.asynchttpclient.filter.IOExceptionFilter;
import org.asynchttpclient.handler.AsyncHandlerExtensions;
import org.asynchttpclient.handler.TransferCompletionHandler;
import org.asynchttpclient.netty.Callback;
import org.asynchttpclient.netty.NettyResponseFuture;
import org.asynchttpclient.netty.SimpleFutureListener;
import org.asynchttpclient.netty.channel.ChannelManager;
import org.asynchttpclient.netty.channel.ChannelState;
import org.asynchttpclient.netty.channel.Channels;
import org.asynchttpclient.netty.channel.NettyConnectListener;
import org.asynchttpclient.netty.timeout.ReadTimeoutTimerTask;
import org.asynchttpclient.netty.timeout.RequestTimeoutTimerTask;
import org.asynchttpclient.netty.timeout.TimeoutsHolder;
import org.asynchttpclient.proxy.ProxyServer;
import org.asynchttpclient.resolver.RequestHostnameResolver;
import org.asynchttpclient.uri.Uri;
import org.asynchttpclient.ws.WebSocketUpgradeHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class NettyRequestSender {
private static final Logger LOGGER = LoggerFactory.getLogger(NettyRequestSender.class);
private final AsyncHttpClientConfig config;
private final ChannelManager channelManager;
private final Timer nettyTimer;
private final AtomicBoolean closed;
private final NettyRequestFactory requestFactory;
public NettyRequestSender(AsyncHttpClientConfig config,//
ChannelManager channelManager,//
Timer nettyTimer,//
AtomicBoolean closed) {
this.config = config;
this.channelManager = channelManager;
this.nettyTimer = nettyTimer;
this.closed = closed;
requestFactory = new NettyRequestFactory(config);
}
public <T> ListenableFuture<T> sendRequest(final Request request,//
final AsyncHandler<T> asyncHandler,//
NettyResponseFuture<T> future,//
boolean reclaimCache) {
if (isClosed())
throw new IllegalStateException("Closed");
validateWebSocketRequest(request, asyncHandler);
ProxyServer proxyServer = getProxyServer(config, request);
// websockets use connect tunnelling to work with proxies
if (proxyServer != null && (request.getUri().isSecured() || request.getUri().isWebSocket()) && !isConnectDone(request, future))
if (future != null && future.isConnectAllowed())
// SSL proxy or websocket: CONNECT for sure
return sendRequestWithCertainForceConnect(request, asyncHandler, future, reclaimCache, proxyServer, true);
else
// CONNECT will depend if we can pool or connection or if we have to open a new one
return sendRequestThroughSslProxy(request, asyncHandler, future, reclaimCache, proxyServer);
else
// no CONNECT for sure
return sendRequestWithCertainForceConnect(request, asyncHandler, future, reclaimCache, proxyServer, false);
}
private boolean isConnectDone(Request request, NettyResponseFuture<?> future) {
return future != null //
&& future.getNettyRequest() != null //
&& future.getNettyRequest().getHttpRequest().getMethod() == HttpMethod.CONNECT //
&& !request.getMethod().equals(CONNECT);
}
/**
* We know for sure if we have to force to connect or not, so we can build the HttpRequest right away This reduces the probability of having a pooled channel closed by the
* server by the time we build the request
*/
private <T> ListenableFuture<T> sendRequestWithCertainForceConnect(//
Request request,//
AsyncHandler<T> asyncHandler,//
NettyResponseFuture<T> future,//
boolean reclaimCache,//
ProxyServer proxyServer,//
boolean forceConnect) {
NettyResponseFuture<T> newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, proxyServer, forceConnect);
Channel channel = getOpenChannel(future, request, proxyServer, asyncHandler);
if (Channels.isChannelValid(channel))
return sendRequestWithOpenChannel(request, proxyServer, newFuture, asyncHandler, channel);
else
return sendRequestWithNewChannel(request, proxyServer, newFuture, asyncHandler, reclaimCache);
}
/**
* Using CONNECT depends on wither we can fetch a valid channel or not Loop until we get a valid channel from the pool and it's still valid once the request is built @
*/
@SuppressWarnings("unused")
private <T> ListenableFuture<T> sendRequestThroughSslProxy(//
Request request,//
AsyncHandler<T> asyncHandler,//
NettyResponseFuture<T> future,//
boolean reclaimCache,//
ProxyServer proxyServer) {
NettyResponseFuture<T> newFuture = null;
for (int i = 0; i < 3; i++) {
Channel channel = getOpenChannel(future, request, proxyServer, asyncHandler);
if (Channels.isChannelValid(channel))
if (newFuture == null)
newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, proxyServer, false);
if (Channels.isChannelValid(channel))
// if the channel is still active, we can use it, otherwise try
// gain
return sendRequestWithOpenChannel(request, proxyServer, newFuture, asyncHandler, channel);
else
// pool is empty
break;
}
newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, proxyServer, true);
return sendRequestWithNewChannel(request, proxyServer, newFuture, asyncHandler, reclaimCache);
}
private <T> NettyResponseFuture<T> newNettyRequestAndResponseFuture(final Request request, final AsyncHandler<T> asyncHandler, NettyResponseFuture<T> originalFuture,
ProxyServer proxy, boolean forceConnect) {
Realm realm = null;
if (originalFuture != null) {
realm = originalFuture.getRealm();
} else if (config.getRealm() != null) {
realm = config.getRealm();
} else {
realm = request.getRealm();
}
Realm proxyRealm = null;
if (originalFuture != null) {
proxyRealm = originalFuture.getProxyRealm();
} else if (proxy != null) {
proxyRealm = proxy.getRealm();
}
NettyRequest nettyRequest = requestFactory.newNettyRequest(request, forceConnect, proxy, realm, proxyRealm);
if (originalFuture == null) {
NettyResponseFuture<T> future = newNettyResponseFuture(request, asyncHandler, nettyRequest, proxy);
future.setRealm(realm);
future.setProxyRealm(proxyRealm);
return future;
} else {
originalFuture.setNettyRequest(nettyRequest);
originalFuture.setCurrentRequest(request);
return originalFuture;
}
}
private Channel getOpenChannel(NettyResponseFuture<?> future, Request request, ProxyServer proxyServer, AsyncHandler<?> asyncHandler) {
if (future != null && future.reuseChannel() && Channels.isChannelValid(future.channel()))
return future.channel();
else
return pollPooledChannel(request, proxyServer, asyncHandler);
}
private <T> ListenableFuture<T> sendRequestWithOpenChannel(Request request, ProxyServer proxy, NettyResponseFuture<T> future, AsyncHandler<T> asyncHandler, Channel channel) {
if (asyncHandler instanceof AsyncHandlerExtensions)
AsyncHandlerExtensions.class.cast(asyncHandler).onConnectionPooled(channel);
future.setChannelState(ChannelState.POOLED);
future.attachChannel(channel, false);
LOGGER.debug("Using open Channel {} for {} '{}'", channel, future.getNettyRequest().getHttpRequest().getMethod(), future.getNettyRequest().getHttpRequest().getUri());
if (Channels.isChannelValid(channel)) {
Channels.setAttribute(channel, future);
writeRequest(future, channel);
} else {
// bad luck, the channel was closed in-between
// there's a very good chance onClose was already notified but the
// future wasn't already registered
handleUnexpectedClosedChannel(channel, future);
}
return future;
}
private <T> ListenableFuture<T> sendRequestWithNewChannel(//
Request request,//
ProxyServer proxy,//
NettyResponseFuture<T> future,//
AsyncHandler<T> asyncHandler,//
boolean reclaimCache) {
// some headers are only set when performing the first request
HttpHeaders headers = future.getNettyRequest().getHttpRequest().headers();
Realm realm = future.getRealm();
Realm proxyRealm = future.getProxyRealm();
requestFactory.addAuthorizationHeader(headers, perConnectionAuthorizationHeader(request, proxy, realm));
requestFactory.setProxyAuthorizationHeader(headers, perConnectionProxyAuthorizationHeader(request, proxyRealm));
future.getInAuth().set(realm != null && realm.isUsePreemptiveAuth() && realm.getScheme() != AuthScheme.NTLM);
future.getInProxyAuth().set(proxyRealm != null && proxyRealm.isUsePreemptiveAuth() && proxyRealm.getScheme() != AuthScheme.NTLM);
// Do not throw an exception when we need an extra connection for a redirect
// FIXME why? This violate the max connection per host handling, right?
Bootstrap bootstrap = channelManager.getBootstrap(request.getUri(), proxy);
Object partitionKey = future.getPartitionKey();
final boolean channelPreempted = !reclaimCache;
try {
// Do not throw an exception when we need an extra connection for a
// redirect.
if (channelPreempted) {
// if there's an exception here, channel wasn't preempted and resolve won't happen
channelManager.preemptChannel(partitionKey);
}
} catch (Throwable t) {
abort(null, future, getCause(t));
// exit and don't try to resolve address
return future;
}
RequestHostnameResolver.INSTANCE.resolve(request, proxy, asyncHandler)//
.addListener(new SimpleFutureListener<List<InetSocketAddress>>() {
@Override
protected void onSuccess(List<InetSocketAddress> addresses) {
NettyConnectListener<T> connectListener = new NettyConnectListener<>(future, NettyRequestSender.this, channelManager, channelPreempted, partitionKey);
new NettyChannelConnector(request.getLocalAddress(), addresses, asyncHandler).connect(bootstrap, connectListener);
}
@Override
protected void onFailure(Throwable cause) {
if (channelPreempted) {
channelManager.abortChannelPreemption(partitionKey);
}
abort(null, future, getCause(cause));
}
});
return future;
}
private <T> NettyResponseFuture<T> newNettyResponseFuture(Request request, AsyncHandler<T> asyncHandler, NettyRequest nettyRequest, ProxyServer proxyServer) {
NettyResponseFuture<T> future = new NettyResponseFuture<>(//
request,//
asyncHandler,//
nettyRequest,//
config.getMaxRequestRetry(),//
request.getChannelPoolPartitioning(),//
proxyServer);
String expectHeader = request.getHeaders().get(HttpHeaders.Names.EXPECT);
if (expectHeader != null && expectHeader.equalsIgnoreCase(HttpHeaders.Values.CONTINUE))
future.setDontWriteBodyBecauseExpectContinue(true);
return future;
}
public <T> void writeRequest(NettyResponseFuture<T> future, Channel channel) {
NettyRequest nettyRequest = future.getNettyRequest();
HttpRequest httpRequest = nettyRequest.getHttpRequest();
AsyncHandler<T> handler = future.getAsyncHandler();
// if the channel is dead because it was pooled and the remote
// server decided to close it,
// we just let it go and the channelInactive do its work
if (!Channels.isChannelValid(channel))
return;
try {
if (handler instanceof TransferCompletionHandler)
configureTransferAdapter(handler, httpRequest);
boolean writeBody = !future.isDontWriteBodyBecauseExpectContinue() && httpRequest.getMethod() != HttpMethod.CONNECT && nettyRequest.getBody() != null;
if (!future.isHeadersAlreadyWrittenOnContinue()) {
if (future.getAsyncHandler() instanceof AsyncHandlerExtensions)
AsyncHandlerExtensions.class.cast(future.getAsyncHandler()).onRequestSend(nettyRequest);
ChannelProgressivePromise promise = channel.newProgressivePromise();
ChannelFuture f = writeBody ? channel.write(httpRequest, promise) : channel.writeAndFlush(httpRequest, promise);
f.addListener(new ProgressListener(future.getAsyncHandler(), future, true, 0L));
}
if (writeBody)
nettyRequest.getBody().write(channel, future);
// don't bother scheduling timeouts if channel became invalid
if (Channels.isChannelValid(channel))
scheduleTimeouts(future);
} catch (Exception e) {
LOGGER.error("Can't write request", e);
abort(channel, future, e);
}
}
private void configureTransferAdapter(AsyncHandler<?> handler, HttpRequest httpRequest) {
HttpHeaders h = new DefaultHttpHeaders(false).set(httpRequest.headers());
TransferCompletionHandler.class.cast(handler).headers(h);
}
private void scheduleTimeouts(NettyResponseFuture<?> nettyResponseFuture) {
nettyResponseFuture.touch();
int requestTimeoutInMs = requestTimeout(config, nettyResponseFuture.getTargetRequest());
TimeoutsHolder timeoutsHolder = new TimeoutsHolder();
if (requestTimeoutInMs != -1) {
Timeout requestTimeout = newTimeout(new RequestTimeoutTimerTask(nettyResponseFuture, this, timeoutsHolder, requestTimeoutInMs), requestTimeoutInMs);
timeoutsHolder.requestTimeout = requestTimeout;
}
int readTimeoutValue = config.getReadTimeout();
if (readTimeoutValue != -1 && readTimeoutValue < requestTimeoutInMs) {
// no need to schedule a readTimeout if the requestTimeout happens first
Timeout readTimeout = newTimeout(new ReadTimeoutTimerTask(nettyResponseFuture, this, timeoutsHolder, requestTimeoutInMs, readTimeoutValue), readTimeoutValue);
timeoutsHolder.readTimeout = readTimeout;
}
nettyResponseFuture.setTimeoutsHolder(timeoutsHolder);
}
public Timeout newTimeout(TimerTask task, long delay) {
return nettyTimer.newTimeout(task, delay, TimeUnit.MILLISECONDS);
}
public void abort(Channel channel, NettyResponseFuture<?> future, Throwable t) {
if (channel != null)
channelManager.closeChannel(channel);
if (!future.isDone()) {
future.setChannelState(ChannelState.CLOSED);
LOGGER.debug("Aborting Future {}\n", future);
LOGGER.debug(t.getMessage(), t);
future.abort(t);
}
}
public void handleUnexpectedClosedChannel(Channel channel, NettyResponseFuture<?> future) {
if (future.isDone())
channelManager.closeChannel(channel);
else if (!retry(future))
abort(channel, future, RemotelyClosedException.INSTANCE);
}
public boolean retry(NettyResponseFuture<?> future) {
if (isClosed())
return false;
if (future.canBeReplayed()) {
// FIXME should we set future.setReuseChannel(false); ?
future.setChannelState(ChannelState.RECONNECTED);
future.getAndSetStatusReceived(false);
LOGGER.debug("Trying to recover request {}\n", future.getNettyRequest().getHttpRequest());
if (future.getAsyncHandler() instanceof AsyncHandlerExtensions) {
AsyncHandlerExtensions.class.cast(future.getAsyncHandler()).onRetry();
}
try {
sendNextRequest(future.getCurrentRequest(), future);
return true;
} catch (Exception e) {
abort(future.channel(), future, e);
return false;
}
} else {
LOGGER.debug("Unable to recover future {}\n", future);
return false;
}
}
public boolean applyIoExceptionFiltersAndReplayRequest(NettyResponseFuture<?> future, IOException e, Channel channel) {
boolean replayed = false;
@SuppressWarnings({ "unchecked", "rawtypes" })
FilterContext<?> fc = new FilterContext.FilterContextBuilder().asyncHandler(future.getAsyncHandler()).request(future.getCurrentRequest()).ioException(e).build();
for (IOExceptionFilter asyncFilter : config.getIoExceptionFilters()) {
try {
fc = asyncFilter.filter(fc);
assertNotNull(fc, "filterContext");
} catch (FilterException efe) {
abort(channel, future, efe);
}
}
if (fc.replayRequest() && future.canBeReplayed()) {
replayRequest(future, fc, channel);
replayed = true;
}
return replayed;
}
public <T> void sendNextRequest(final Request request, final NettyResponseFuture<T> future) {
// remove attribute in case the channel gets closed so it doesn't try to recover the previous future
Channels.setAttribute(future.channel(), null);
sendRequest(request, future.getAsyncHandler(), future, true);
}
private void validateWebSocketRequest(Request request, AsyncHandler<?> asyncHandler) {
Uri uri = request.getUri();
boolean isWs = uri.isWebSocket();
if (asyncHandler instanceof WebSocketUpgradeHandler) {
if (!isWs)
throw new IllegalArgumentException("WebSocketUpgradeHandler but scheme isn't ws or wss: " + uri.getScheme());
else if (!request.getMethod().equals(GET))
throw new IllegalArgumentException("WebSocketUpgradeHandler but method isn't GET: " + request.getMethod());
} else if (isWs) {
throw new IllegalArgumentException("No WebSocketUpgradeHandler but scheme is " + uri.getScheme());
}
}
private Channel pollPooledChannel(Request request, ProxyServer proxy, AsyncHandler<?> asyncHandler) {
if (asyncHandler instanceof AsyncHandlerExtensions)
AsyncHandlerExtensions.class.cast(asyncHandler).onConnectionPoolAttempt();
Uri uri = request.getUri();
String virtualHost = request.getVirtualHost();
final Channel channel = channelManager.poll(uri, virtualHost, proxy, request.getChannelPoolPartitioning());
if (channel != null) {
LOGGER.debug("Using polled Channel {}\n for uri {}\n", channel, uri);
}
return channel;
}
@SuppressWarnings({ "rawtypes", "unchecked" })
public void replayRequest(final NettyResponseFuture<?> future, FilterContext fc, Channel channel) {
Request newRequest = fc.getRequest();
future.setAsyncHandler(fc.getAsyncHandler());
future.setChannelState(ChannelState.NEW);
future.touch();
LOGGER.debug("\n\nReplaying Request {}\n for Future {}\n", newRequest, future);
if (future.getAsyncHandler() instanceof AsyncHandlerExtensions)
AsyncHandlerExtensions.class.cast(future.getAsyncHandler()).onRetry();
channelManager.drainChannelAndOffer(channel, future);
sendNextRequest(newRequest, future);
}
public boolean isClosed() {
return closed.get();
}
public final Callback newExecuteNextRequestCallback(final NettyResponseFuture<?> future, final Request nextRequest) {
return new Callback(future) {
@Override
public void call() {
sendNextRequest(nextRequest, future);
}
};
}
public void drainChannelAndExecuteNextRequest(final Channel channel, final NettyResponseFuture<?> future, Request nextRequest) {
Channels.setAttribute(channel, newExecuteNextRequestCallback(future, nextRequest));
}
}
|
client/src/main/java/org/asynchttpclient/netty/request/NettyRequestSender.java
|
/*
* Copyright (c) 2014 AsyncHttpClient Project. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package org.asynchttpclient.netty.request;
import static org.asynchttpclient.util.Assertions.assertNotNull;
import static org.asynchttpclient.util.AuthenticatorUtils.*;
import static org.asynchttpclient.util.HttpConstants.Methods.*;
import static org.asynchttpclient.util.HttpUtils.requestTimeout;
import static org.asynchttpclient.util.MiscUtils.getCause;
import static org.asynchttpclient.util.ProxyUtils.getProxyServer;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelProgressivePromise;
import io.netty.handler.codec.http.DefaultHttpHeaders;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.util.Timeout;
import io.netty.util.Timer;
import io.netty.util.TimerTask;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.asynchttpclient.AsyncHandler;
import org.asynchttpclient.AsyncHttpClientConfig;
import org.asynchttpclient.ListenableFuture;
import org.asynchttpclient.Realm;
import org.asynchttpclient.Realm.AuthScheme;
import org.asynchttpclient.Request;
import org.asynchttpclient.exception.RemotelyClosedException;
import org.asynchttpclient.filter.FilterContext;
import org.asynchttpclient.filter.FilterException;
import org.asynchttpclient.filter.IOExceptionFilter;
import org.asynchttpclient.handler.AsyncHandlerExtensions;
import org.asynchttpclient.handler.TransferCompletionHandler;
import org.asynchttpclient.netty.Callback;
import org.asynchttpclient.netty.NettyResponseFuture;
import org.asynchttpclient.netty.SimpleFutureListener;
import org.asynchttpclient.netty.channel.ChannelManager;
import org.asynchttpclient.netty.channel.ChannelState;
import org.asynchttpclient.netty.channel.Channels;
import org.asynchttpclient.netty.channel.NettyConnectListener;
import org.asynchttpclient.netty.timeout.ReadTimeoutTimerTask;
import org.asynchttpclient.netty.timeout.RequestTimeoutTimerTask;
import org.asynchttpclient.netty.timeout.TimeoutsHolder;
import org.asynchttpclient.proxy.ProxyServer;
import org.asynchttpclient.resolver.RequestHostnameResolver;
import org.asynchttpclient.uri.Uri;
import org.asynchttpclient.ws.WebSocketUpgradeHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class NettyRequestSender {
private static final Logger LOGGER = LoggerFactory.getLogger(NettyRequestSender.class);
private final AsyncHttpClientConfig config;
private final ChannelManager channelManager;
private final Timer nettyTimer;
private final AtomicBoolean closed;
private final NettyRequestFactory requestFactory;
public NettyRequestSender(AsyncHttpClientConfig config,//
ChannelManager channelManager,//
Timer nettyTimer,//
AtomicBoolean closed) {
this.config = config;
this.channelManager = channelManager;
this.nettyTimer = nettyTimer;
this.closed = closed;
requestFactory = new NettyRequestFactory(config);
}
public <T> ListenableFuture<T> sendRequest(final Request request,//
final AsyncHandler<T> asyncHandler,//
NettyResponseFuture<T> future,//
boolean reclaimCache) {
if (isClosed())
throw new IllegalStateException("Closed");
validateWebSocketRequest(request, asyncHandler);
ProxyServer proxyServer = getProxyServer(config, request);
// websockets use connect tunnelling to work with proxies
if (proxyServer != null && (request.getUri().isSecured() || request.getUri().isWebSocket()) && !isConnectDone(request, future))
if (future != null && future.isConnectAllowed())
// SSL proxy or websocket: CONNECT for sure
return sendRequestWithCertainForceConnect(request, asyncHandler, future, reclaimCache, proxyServer, true);
else
// CONNECT will depend if we can pool or connection or if we have to open a new one
return sendRequestThroughSslProxy(request, asyncHandler, future, reclaimCache, proxyServer);
else
// no CONNECT for sure
return sendRequestWithCertainForceConnect(request, asyncHandler, future, reclaimCache, proxyServer, false);
}
private boolean isConnectDone(Request request, NettyResponseFuture<?> future) {
return future != null //
&& future.getNettyRequest() != null //
&& future.getNettyRequest().getHttpRequest().getMethod() == HttpMethod.CONNECT //
&& !request.getMethod().equals(CONNECT);
}
/**
* We know for sure if we have to force to connect or not, so we can build the HttpRequest right away This reduces the probability of having a pooled channel closed by the
* server by the time we build the request
*/
private <T> ListenableFuture<T> sendRequestWithCertainForceConnect(//
Request request,//
AsyncHandler<T> asyncHandler,//
NettyResponseFuture<T> future,//
boolean reclaimCache,//
ProxyServer proxyServer,//
boolean forceConnect) {
NettyResponseFuture<T> newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, proxyServer, forceConnect);
Channel channel = getOpenChannel(future, request, proxyServer, asyncHandler);
if (Channels.isChannelValid(channel))
return sendRequestWithOpenChannel(request, proxyServer, newFuture, asyncHandler, channel);
else
return sendRequestWithNewChannel(request, proxyServer, newFuture, asyncHandler, reclaimCache);
}
/**
* Using CONNECT depends on wither we can fetch a valid channel or not Loop until we get a valid channel from the pool and it's still valid once the request is built @
*/
@SuppressWarnings("unused")
private <T> ListenableFuture<T> sendRequestThroughSslProxy(//
Request request,//
AsyncHandler<T> asyncHandler,//
NettyResponseFuture<T> future,//
boolean reclaimCache,//
ProxyServer proxyServer) {
NettyResponseFuture<T> newFuture = null;
for (int i = 0; i < 3; i++) {
Channel channel = getOpenChannel(future, request, proxyServer, asyncHandler);
if (Channels.isChannelValid(channel))
if (newFuture == null)
newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, proxyServer, false);
if (Channels.isChannelValid(channel))
// if the channel is still active, we can use it, otherwise try
// gain
return sendRequestWithOpenChannel(request, proxyServer, newFuture, asyncHandler, channel);
else
// pool is empty
break;
}
newFuture = newNettyRequestAndResponseFuture(request, asyncHandler, future, proxyServer, true);
return sendRequestWithNewChannel(request, proxyServer, newFuture, asyncHandler, reclaimCache);
}
private <T> NettyResponseFuture<T> newNettyRequestAndResponseFuture(final Request request, final AsyncHandler<T> asyncHandler, NettyResponseFuture<T> originalFuture,
ProxyServer proxy, boolean forceConnect) {
Realm realm = null;
if (originalFuture != null) {
realm = originalFuture.getRealm();
} else if (config.getRealm() != null) {
realm = config.getRealm();
} else {
realm = request.getRealm();
}
Realm proxyRealm = null;
if (originalFuture != null) {
proxyRealm = originalFuture.getProxyRealm();
} else if (proxy != null) {
proxyRealm = proxy.getRealm();
}
NettyRequest nettyRequest = requestFactory.newNettyRequest(request, forceConnect, proxy, realm, proxyRealm);
if (originalFuture == null) {
NettyResponseFuture<T> future = newNettyResponseFuture(request, asyncHandler, nettyRequest, proxy);
future.setRealm(realm);
future.setProxyRealm(proxyRealm);
return future;
} else {
originalFuture.setNettyRequest(nettyRequest);
originalFuture.setCurrentRequest(request);
return originalFuture;
}
}
private Channel getOpenChannel(NettyResponseFuture<?> future, Request request, ProxyServer proxyServer, AsyncHandler<?> asyncHandler) {
if (future != null && future.reuseChannel() && Channels.isChannelValid(future.channel()))
return future.channel();
else
return pollPooledChannel(request, proxyServer, asyncHandler);
}
private <T> ListenableFuture<T> sendRequestWithOpenChannel(Request request, ProxyServer proxy, NettyResponseFuture<T> future, AsyncHandler<T> asyncHandler, Channel channel) {
if (asyncHandler instanceof AsyncHandlerExtensions)
AsyncHandlerExtensions.class.cast(asyncHandler).onConnectionPooled(channel);
future.setChannelState(ChannelState.POOLED);
future.attachChannel(channel, false);
LOGGER.debug("Using open Channel {} for {} '{}'", channel, future.getNettyRequest().getHttpRequest().getMethod(), future.getNettyRequest().getHttpRequest().getUri());
if (Channels.isChannelValid(channel)) {
Channels.setAttribute(channel, future);
writeRequest(future, channel);
} else {
// bad luck, the channel was closed in-between
// there's a very good chance onClose was already notified but the
// future wasn't already registered
handleUnexpectedClosedChannel(channel, future);
}
return future;
}
private <T> ListenableFuture<T> sendRequestWithNewChannel(//
Request request,//
ProxyServer proxy,//
NettyResponseFuture<T> future,//
AsyncHandler<T> asyncHandler,//
boolean reclaimCache) {
// some headers are only set when performing the first request
HttpHeaders headers = future.getNettyRequest().getHttpRequest().headers();
Realm realm = future.getRealm();
Realm proxyRealm = future.getProxyRealm();
requestFactory.addAuthorizationHeader(headers, perConnectionAuthorizationHeader(request, proxy, realm));
requestFactory.setProxyAuthorizationHeader(headers, perConnectionProxyAuthorizationHeader(request, proxyRealm));
future.getInAuth().set(realm != null && realm.isUsePreemptiveAuth() && realm.getScheme() != AuthScheme.NTLM);
future.getInProxyAuth().set(proxyRealm != null && proxyRealm.isUsePreemptiveAuth() && proxyRealm.getScheme() != AuthScheme.NTLM);
// Do not throw an exception when we need an extra connection for a redirect
// FIXME why? This violate the max connection per host handling, right?
Bootstrap bootstrap = channelManager.getBootstrap(request.getUri(), proxy);
Object partitionKey = future.getPartitionKey();
final boolean channelPreempted = !reclaimCache;
try {
// Do not throw an exception when we need an extra connection for a
// redirect.
if (channelPreempted) {
// if there's an exception here, channel wasn't preempted and resolve won't happen
channelManager.preemptChannel(partitionKey);
}
} catch (Throwable t) {
abort(null, future, getCause(t));
// exit and don't try to resolve address
return future;
}
RequestHostnameResolver.INSTANCE.resolve(request, proxy, asyncHandler)//
.addListener(new SimpleFutureListener<List<InetSocketAddress>>() {
@Override
protected void onSuccess(List<InetSocketAddress> addresses) {
NettyConnectListener<T> connectListener = new NettyConnectListener<>(future, NettyRequestSender.this, channelManager, channelPreempted, partitionKey);
new NettyChannelConnector(request.getLocalAddress(), addresses, asyncHandler).connect(bootstrap, connectListener);
}
@Override
protected void onFailure(Throwable cause) {
if (channelPreempted) {
channelManager.abortChannelPreemption(partitionKey);
}
abort(null, future, getCause(cause));
}
});
return future;
}
private <T> NettyResponseFuture<T> newNettyResponseFuture(Request request, AsyncHandler<T> asyncHandler, NettyRequest nettyRequest, ProxyServer proxyServer) {
NettyResponseFuture<T> future = new NettyResponseFuture<>(//
request,//
asyncHandler,//
nettyRequest,//
config.getMaxRequestRetry(),//
request.getChannelPoolPartitioning(),//
proxyServer);
String expectHeader = request.getHeaders().get(HttpHeaders.Names.EXPECT);
if (expectHeader != null && expectHeader.equalsIgnoreCase(HttpHeaders.Values.CONTINUE))
future.setDontWriteBodyBecauseExpectContinue(true);
return future;
}
public <T> void writeRequest(NettyResponseFuture<T> future, Channel channel) {
NettyRequest nettyRequest = future.getNettyRequest();
HttpRequest httpRequest = nettyRequest.getHttpRequest();
AsyncHandler<T> handler = future.getAsyncHandler();
// if the channel is dead because it was pooled and the remote
// server decided to close it,
// we just let it go and the channelInactive do its work
if (!Channels.isChannelValid(channel))
return;
try {
if (handler instanceof TransferCompletionHandler)
configureTransferAdapter(handler, httpRequest);
boolean writeBody = !future.isDontWriteBodyBecauseExpectContinue() && httpRequest.getMethod() != HttpMethod.CONNECT && nettyRequest.getBody() != null;
if (!future.isHeadersAlreadyWrittenOnContinue()) {
if (future.getAsyncHandler() instanceof AsyncHandlerExtensions)
AsyncHandlerExtensions.class.cast(future.getAsyncHandler()).onRequestSend(nettyRequest);
ChannelProgressivePromise promise = channel.newProgressivePromise();
ChannelFuture f = writeBody ? channel.write(httpRequest, promise) : channel.writeAndFlush(httpRequest, promise);
f.addListener(new ProgressListener(future.getAsyncHandler(), future, true, 0L));
}
if (writeBody)
nettyRequest.getBody().write(channel, future);
// don't bother scheduling timeouts if channel became invalid
if (Channels.isChannelValid(channel))
scheduleTimeouts(future);
} catch (Exception e) {
LOGGER.error("Can't write request", e);
abort(channel, future, e);
}
}
private void configureTransferAdapter(AsyncHandler<?> handler, HttpRequest httpRequest) {
HttpHeaders h = new DefaultHttpHeaders(false).set(httpRequest.headers());
TransferCompletionHandler.class.cast(handler).headers(h);
}
private void scheduleTimeouts(NettyResponseFuture<?> nettyResponseFuture) {
nettyResponseFuture.touch();
int requestTimeoutInMs = requestTimeout(config, nettyResponseFuture.getTargetRequest());
TimeoutsHolder timeoutsHolder = new TimeoutsHolder();
if (requestTimeoutInMs != -1) {
Timeout requestTimeout = newTimeout(new RequestTimeoutTimerTask(nettyResponseFuture, this, timeoutsHolder, requestTimeoutInMs), requestTimeoutInMs);
timeoutsHolder.requestTimeout = requestTimeout;
}
int readTimeoutValue = config.getReadTimeout();
if (readTimeoutValue != -1 && readTimeoutValue < requestTimeoutInMs) {
// no need to schedule a readTimeout if the requestTimeout happens first
Timeout readTimeout = newTimeout(new ReadTimeoutTimerTask(nettyResponseFuture, this, timeoutsHolder, requestTimeoutInMs, readTimeoutValue), readTimeoutValue);
timeoutsHolder.readTimeout = readTimeout;
}
nettyResponseFuture.setTimeoutsHolder(timeoutsHolder);
}
public Timeout newTimeout(TimerTask task, long delay) {
return nettyTimer.newTimeout(task, delay, TimeUnit.MILLISECONDS);
}
public void abort(Channel channel, NettyResponseFuture<?> future, Throwable t) {
if (channel != null)
channelManager.closeChannel(channel);
if (!future.isDone()) {
future.setChannelState(ChannelState.CLOSED);
LOGGER.debug("Aborting Future {}\n", future);
LOGGER.debug(t.getMessage(), t);
future.abort(t);
}
}
public void handleUnexpectedClosedChannel(Channel channel, NettyResponseFuture<?> future) {
if (future.isDone())
channelManager.closeChannel(channel);
else if (!retry(future))
abort(channel, future, RemotelyClosedException.INSTANCE);
}
public boolean retry(NettyResponseFuture<?> future) {
if (isClosed())
return false;
if (future.canBeReplayed()) {
future.setChannelState(ChannelState.RECONNECTED);
future.getAndSetStatusReceived(false);
LOGGER.debug("Trying to recover request {}\n", future.getNettyRequest().getHttpRequest());
if (future.getAsyncHandler() instanceof AsyncHandlerExtensions) {
AsyncHandlerExtensions.class.cast(future.getAsyncHandler()).onRetry();
}
try {
sendNextRequest(future.getCurrentRequest(), future);
return true;
} catch (Exception e) {
abort(future.channel(), future, e);
return false;
}
} else {
LOGGER.debug("Unable to recover future {}\n", future);
return false;
}
}
public boolean applyIoExceptionFiltersAndReplayRequest(NettyResponseFuture<?> future, IOException e, Channel channel) {
boolean replayed = false;
@SuppressWarnings({ "unchecked", "rawtypes" })
FilterContext<?> fc = new FilterContext.FilterContextBuilder().asyncHandler(future.getAsyncHandler()).request(future.getCurrentRequest()).ioException(e).build();
for (IOExceptionFilter asyncFilter : config.getIoExceptionFilters()) {
try {
fc = asyncFilter.filter(fc);
assertNotNull(fc, "filterContext");
} catch (FilterException efe) {
abort(channel, future, efe);
}
}
if (fc.replayRequest() && future.canBeReplayed()) {
replayRequest(future, fc, channel);
replayed = true;
}
return replayed;
}
public <T> void sendNextRequest(final Request request, final NettyResponseFuture<T> future) {
sendRequest(request, future.getAsyncHandler(), future, true);
}
private void validateWebSocketRequest(Request request, AsyncHandler<?> asyncHandler) {
Uri uri = request.getUri();
boolean isWs = uri.isWebSocket();
if (asyncHandler instanceof WebSocketUpgradeHandler) {
if (!isWs)
throw new IllegalArgumentException("WebSocketUpgradeHandler but scheme isn't ws or wss: " + uri.getScheme());
else if (!request.getMethod().equals(GET))
throw new IllegalArgumentException("WebSocketUpgradeHandler but method isn't GET: " + request.getMethod());
} else if (isWs) {
throw new IllegalArgumentException("No WebSocketUpgradeHandler but scheme is " + uri.getScheme());
}
}
private Channel pollPooledChannel(Request request, ProxyServer proxy, AsyncHandler<?> asyncHandler) {
if (asyncHandler instanceof AsyncHandlerExtensions)
AsyncHandlerExtensions.class.cast(asyncHandler).onConnectionPoolAttempt();
Uri uri = request.getUri();
String virtualHost = request.getVirtualHost();
final Channel channel = channelManager.poll(uri, virtualHost, proxy, request.getChannelPoolPartitioning());
if (channel != null) {
LOGGER.debug("Using polled Channel {}\n for uri {}\n", channel, uri);
}
return channel;
}
@SuppressWarnings({ "rawtypes", "unchecked" })
public void replayRequest(final NettyResponseFuture<?> future, FilterContext fc, Channel channel) {
Request newRequest = fc.getRequest();
future.setAsyncHandler(fc.getAsyncHandler());
future.setChannelState(ChannelState.NEW);
future.touch();
LOGGER.debug("\n\nReplaying Request {}\n for Future {}\n", newRequest, future);
if (future.getAsyncHandler() instanceof AsyncHandlerExtensions)
AsyncHandlerExtensions.class.cast(future.getAsyncHandler()).onRetry();
channelManager.drainChannelAndOffer(channel, future);
sendNextRequest(newRequest, future);
}
public boolean isClosed() {
return closed.get();
}
public final Callback newExecuteNextRequestCallback(final NettyResponseFuture<?> future, final Request nextRequest) {
return new Callback(future) {
@Override
public void call() {
sendNextRequest(nextRequest, future);
}
};
}
public void drainChannelAndExecuteNextRequest(final Channel channel, final NettyResponseFuture<?> future, Request nextRequest) {
Channels.setAttribute(channel, newExecuteNextRequestCallback(future, nextRequest));
}
}
|
Fix race condition on connection reuse, close #1059
|
client/src/main/java/org/asynchttpclient/netty/request/NettyRequestSender.java
|
Fix race condition on connection reuse, close #1059
|
|
Java
|
apache-2.0
|
218b1b33ffe83cf2e330a2aa90685d0c14547a3d
| 0
|
ChetnaChaudhari/hadoop,lukmajercak/hadoop,ctrezzo/hadoop,plusplusjiajia/hadoop,soumabrata-chakraborty/hadoop,plusplusjiajia/hadoop,ronny-macmaster/hadoop,huafengw/hadoop,Ethanlm/hadoop,wwjiang007/hadoop,apurtell/hadoop,apache/hadoop,lukmajercak/hadoop,xiao-chen/hadoop,JingchengDu/hadoop,steveloughran/hadoop,JingchengDu/hadoop,ucare-uchicago/hadoop,szegedim/hadoop,ronny-macmaster/hadoop,lukmajercak/hadoop,ChetnaChaudhari/hadoop,plusplusjiajia/hadoop,soumabrata-chakraborty/hadoop,xiao-chen/hadoop,wenxinhe/hadoop,dennishuo/hadoop,Ethanlm/hadoop,GeLiXin/hadoop,plusplusjiajia/hadoop,ronny-macmaster/hadoop,dennishuo/hadoop,apache/hadoop,szegedim/hadoop,Ethanlm/hadoop,nandakumar131/hadoop,szegedim/hadoop,ucare-uchicago/hadoop,mapr/hadoop-common,nandakumar131/hadoop,dierobotsdie/hadoop,mapr/hadoop-common,wenxinhe/hadoop,JingchengDu/hadoop,plusplusjiajia/hadoop,dierobotsdie/hadoop,mapr/hadoop-common,wwjiang007/hadoop,steveloughran/hadoop,GeLiXin/hadoop,apache/hadoop,Ethanlm/hadoop,apache/hadoop,mapr/hadoop-common,dierobotsdie/hadoop,szegedim/hadoop,apurtell/hadoop,ronny-macmaster/hadoop,wwjiang007/hadoop,soumabrata-chakraborty/hadoop,mapr/hadoop-common,huafengw/hadoop,littlezhou/hadoop,littlezhou/hadoop,JingchengDu/hadoop,apurtell/hadoop,dennishuo/hadoop,apurtell/hadoop,lukmajercak/hadoop,apurtell/hadoop,wenxinhe/hadoop,Ethanlm/hadoop,GeLiXin/hadoop,ctrezzo/hadoop,979969786/hadoop,Ethanlm/hadoop,steveloughran/hadoop,littlezhou/hadoop,plusplusjiajia/hadoop,ChetnaChaudhari/hadoop,wwjiang007/hadoop,steveloughran/hadoop,apache/hadoop,ChetnaChaudhari/hadoop,szegedim/hadoop,huafengw/hadoop,GeLiXin/hadoop,apache/hadoop,ucare-uchicago/hadoop,ChetnaChaudhari/hadoop,plusplusjiajia/hadoop,ronny-macmaster/hadoop,ronny-macmaster/hadoop,xiao-chen/hadoop,mapr/hadoop-common,lukmajercak/hadoop,nandakumar131/hadoop,wenxinhe/hadoop,wwjiang007/hadoop,ucare-uchicago/hadoop,lukmajercak/hadoop,apurtell/hadoop,ChetnaChaudhari/hadoop,ctrezzo/hadoop,979969786/hadoop,szegedim/hadoop,xiao-chen/hadoop,dennishuo/hadoop,wwjiang007/hadoop,littlezhou/hadoop,xiao-chen/hadoop,GeLiXin/hadoop,soumabrata-chakraborty/hadoop,dierobotsdie/hadoop,huafengw/hadoop,soumabrata-chakraborty/hadoop,JingchengDu/hadoop,ChetnaChaudhari/hadoop,steveloughran/hadoop,979969786/hadoop,littlezhou/hadoop,979969786/hadoop,nandakumar131/hadoop,szegedim/hadoop,979969786/hadoop,979969786/hadoop,xiao-chen/hadoop,wenxinhe/hadoop,wenxinhe/hadoop,soumabrata-chakraborty/hadoop,xiao-chen/hadoop,Ethanlm/hadoop,soumabrata-chakraborty/hadoop,GeLiXin/hadoop,979969786/hadoop,ctrezzo/hadoop,ronny-macmaster/hadoop,dierobotsdie/hadoop,JingchengDu/hadoop,dierobotsdie/hadoop,huafengw/hadoop,huafengw/hadoop,lukmajercak/hadoop,dennishuo/hadoop,ctrezzo/hadoop,huafengw/hadoop,ctrezzo/hadoop,dennishuo/hadoop,littlezhou/hadoop,ucare-uchicago/hadoop,mapr/hadoop-common,apache/hadoop,apurtell/hadoop,JingchengDu/hadoop,wwjiang007/hadoop,dennishuo/hadoop,wenxinhe/hadoop,dierobotsdie/hadoop,ucare-uchicago/hadoop,GeLiXin/hadoop,ucare-uchicago/hadoop,steveloughran/hadoop,nandakumar131/hadoop,ctrezzo/hadoop,steveloughran/hadoop,littlezhou/hadoop,nandakumar131/hadoop,nandakumar131/hadoop
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.minikdc.KerberosSecurityTestcase;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.yarn.api.ContainerManagementProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.SerializedException;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.client.NMProxy;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.security.NMTokenIdentifier;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
import org.apache.hadoop.yarn.server.nodemanager.security.NMTokenSecretManagerInNM;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.MockRMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.security.BaseNMTokenSecretManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Records;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
public class TestContainerManagerSecurity extends KerberosSecurityTestcase {
static Log LOG = LogFactory.getLog(TestContainerManagerSecurity.class);
static final RecordFactory recordFactory = RecordFactoryProvider
.getRecordFactory(null);
private static MiniYARNCluster yarnCluster;
private static final File testRootDir = new File("target",
TestContainerManagerSecurity.class.getName() + "-root");
private static File httpSpnegoKeytabFile = new File(testRootDir,
"httpSpnegoKeytabFile.keytab");
private static String httpSpnegoPrincipal = "HTTP/localhost@EXAMPLE.COM";
private Configuration conf;
@Before
public void setUp() throws Exception {
testRootDir.mkdirs();
httpSpnegoKeytabFile.deleteOnExit();
getKdc().createPrincipal(httpSpnegoKeytabFile, httpSpnegoPrincipal);
UserGroupInformation.setConfiguration(conf);
yarnCluster =
new MiniYARNCluster(TestContainerManagerSecurity.class.getName(), 1, 1,
1);
yarnCluster.init(conf);
yarnCluster.start();
}
@After
public void tearDown() {
if (yarnCluster != null) {
yarnCluster.stop();
yarnCluster = null;
}
testRootDir.delete();
}
/*
* Run two tests: one with no security ("simple") and one with "Secure"
* The first parameter is just the test name to make it easier to debug
* and to give details in say an IDE. The second is the configuraiton
* object to use.
*/
@Parameters(name = "{0}")
public static Collection<Object[]> configs() {
Configuration configurationWithoutSecurity = new Configuration();
configurationWithoutSecurity.set(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple");
Configuration configurationWithSecurity = new Configuration();
configurationWithSecurity.set(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
configurationWithSecurity.set(
YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, httpSpnegoPrincipal);
configurationWithSecurity.set(
YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY,
httpSpnegoKeytabFile.getAbsolutePath());
configurationWithSecurity.set(
YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, httpSpnegoPrincipal);
configurationWithSecurity.set(
YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY,
httpSpnegoKeytabFile.getAbsolutePath());
return Arrays.asList(new Object[][] {
{"Simple", configurationWithoutSecurity},
{"Secure", configurationWithSecurity}});
}
public TestContainerManagerSecurity(String name, Configuration conf) {
LOG.info("RUNNING TEST " + name);
conf.setLong(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 100000L);
this.conf = conf;
}
@Test
public void testContainerManager() throws Exception {
// TestNMTokens.
testNMTokens(conf);
// Testing for container token tampering
testContainerToken(conf);
// Testing for container token tampering with epoch
testContainerTokenWithEpoch(conf);
}
/**
* Run a series of tests using different NMTokens. A configuration is
* provided for managing creating of the tokens and rpc.
*/
private void testNMTokens(Configuration testConf) throws Exception {
NMTokenSecretManagerInRM nmTokenSecretManagerRM =
yarnCluster.getResourceManager().getRMContext()
.getNMTokenSecretManager();
NMTokenSecretManagerInNM nmTokenSecretManagerNM =
yarnCluster.getNodeManager(0).getNMContext().getNMTokenSecretManager();
RMContainerTokenSecretManager containerTokenSecretManager =
yarnCluster.getResourceManager().getRMContext().
getContainerTokenSecretManager();
NodeManager nm = yarnCluster.getNodeManager(0);
waitForNMToReceiveNMTokenKey(nmTokenSecretManagerNM);
// Both id should be equal.
Assert.assertEquals(nmTokenSecretManagerNM.getCurrentKey().getKeyId(),
nmTokenSecretManagerRM.getCurrentKey().getKeyId());
/*
* Below cases should be tested.
* 1) If Invalid NMToken is used then it should be rejected.
* 2) If valid NMToken but belonging to another Node is used then that
* too should be rejected.
* 3) NMToken for say appAttempt-1 is used for starting/stopping/retrieving
* status for container with containerId for say appAttempt-2 should
* be rejected.
* 4) After start container call is successful nmtoken should have been
* saved in NMTokenSecretManagerInNM.
* 5) If start container call was successful (no matter if container is
* still running or not), appAttempt->NMToken should be present in
* NMTokenSecretManagerInNM's cache. Any future getContainerStatus call
* for containerId belonging to that application attempt using
* applicationAttempt's older nmToken should not get any invalid
* nmToken error. (This can be best tested if we roll over NMToken
* master key twice).
*/
YarnRPC rpc = YarnRPC.create(testConf);
String user = "test";
Resource r = Resource.newInstance(1024, 1);
ApplicationId appId = ApplicationId.newInstance(1, 1);
MockRMApp m = new MockRMApp(appId.getId(), appId.getClusterTimestamp(),
RMAppState.NEW);
yarnCluster.getResourceManager().getRMContext().getRMApps().put(appId, m);
ApplicationAttemptId validAppAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId validContainerId =
ContainerId.newContainerId(validAppAttemptId, 0);
NodeId validNode = yarnCluster.getNodeManager(0).getNMContext().getNodeId();
NodeId invalidNode = NodeId.newInstance("InvalidHost", 1234);
org.apache.hadoop.yarn.api.records.Token validNMToken =
nmTokenSecretManagerRM.createNMToken(validAppAttemptId, validNode, user);
org.apache.hadoop.yarn.api.records.Token validContainerToken =
containerTokenSecretManager.createContainerToken(validContainerId,
0, validNode, user, r, Priority.newInstance(10), 1234);
ContainerTokenIdentifier identifier =
BuilderUtils.newContainerTokenIdentifier(validContainerToken);
Assert.assertEquals(Priority.newInstance(10), identifier.getPriority());
Assert.assertEquals(1234, identifier.getCreationTime());
StringBuilder sb;
// testInvalidNMToken ... creating NMToken using different secret manager.
NMTokenSecretManagerInRM tempManager = new NMTokenSecretManagerInRM(testConf);
tempManager.rollMasterKey();
do {
tempManager.rollMasterKey();
tempManager.activateNextMasterKey();
// Making sure key id is different.
} while (tempManager.getCurrentKey().getKeyId() == nmTokenSecretManagerRM
.getCurrentKey().getKeyId());
// Testing that NM rejects the requests when we don't send any token.
if (UserGroupInformation.isSecurityEnabled()) {
sb = new StringBuilder("Client cannot authenticate via:[TOKEN]");
} else {
sb =
new StringBuilder(
"SIMPLE authentication is not enabled. Available:[TOKEN]");
}
String errorMsg = testStartContainer(rpc, validAppAttemptId, validNode,
validContainerToken, null, true);
Assert.assertTrue("In calling " + validNode + " exception was '"
+ errorMsg + "' but doesn't contain '"
+ sb.toString() + "'", errorMsg.contains(sb.toString()));
org.apache.hadoop.yarn.api.records.Token invalidNMToken =
tempManager.createNMToken(validAppAttemptId, validNode, user);
sb = new StringBuilder("Given NMToken for application : ");
sb.append(validAppAttemptId.toString())
.append(" seems to have been generated illegally.");
Assert.assertTrue(sb.toString().contains(
testStartContainer(rpc, validAppAttemptId, validNode,
validContainerToken, invalidNMToken, true)));
// valid NMToken but belonging to other node
invalidNMToken =
nmTokenSecretManagerRM.createNMToken(validAppAttemptId, invalidNode,
user);
sb = new StringBuilder("Given NMToken for application : ");
sb.append(validAppAttemptId)
.append(" is not valid for current node manager.expected : ")
.append(validNode.toString())
.append(" found : ").append(invalidNode.toString());
Assert.assertTrue(sb.toString().contains(
testStartContainer(rpc, validAppAttemptId, validNode,
validContainerToken, invalidNMToken, true)));
// using correct tokens. nmtoken for app attempt should get saved.
testConf.setInt(YarnConfiguration.RM_CONTAINER_ALLOC_EXPIRY_INTERVAL_MS,
4 * 60 * 1000);
validContainerToken =
containerTokenSecretManager.createContainerToken(validContainerId,
0, validNode, user, r, Priority.newInstance(0), 0);
Assert.assertTrue(testStartContainer(rpc, validAppAttemptId, validNode,
validContainerToken, validNMToken, false).isEmpty());
Assert.assertTrue(nmTokenSecretManagerNM
.isAppAttemptNMTokenKeyPresent(validAppAttemptId));
// using a new compatible version nmtoken, expect container can be started
// successfully.
ApplicationAttemptId validAppAttemptId2 =
ApplicationAttemptId.newInstance(appId, 2);
ContainerId validContainerId2 =
ContainerId.newContainerId(validAppAttemptId2, 0);
org.apache.hadoop.yarn.api.records.Token validContainerToken2 =
containerTokenSecretManager.createContainerToken(validContainerId2,
0, validNode, user, r, Priority.newInstance(0), 0);
org.apache.hadoop.yarn.api.records.Token validNMToken2 =
nmTokenSecretManagerRM.createNMToken(validAppAttemptId2, validNode, user);
// First, get a new NMTokenIdentifier.
NMTokenIdentifier newIdentifier = new NMTokenIdentifier();
byte[] tokenIdentifierContent = validNMToken2.getIdentifier().array();
DataInputBuffer dib = new DataInputBuffer();
dib.reset(tokenIdentifierContent, tokenIdentifierContent.length);
newIdentifier.readFields(dib);
// Then, generate a new version NMTokenIdentifier (NMTokenIdentifierNewForTest)
// with additional field of message.
NMTokenIdentifierNewForTest newVersionIdentifier =
new NMTokenIdentifierNewForTest(newIdentifier, "message");
// check new version NMTokenIdentifier has correct info.
Assert.assertEquals("The ApplicationAttemptId is changed after set to " +
"newVersionIdentifier", validAppAttemptId2.getAttemptId(),
newVersionIdentifier.getApplicationAttemptId().getAttemptId()
);
Assert.assertEquals("The message is changed after set to newVersionIdentifier",
"message", newVersionIdentifier.getMessage());
Assert.assertEquals("The NodeId is changed after set to newVersionIdentifier",
validNode, newVersionIdentifier.getNodeId());
// create new Token based on new version NMTokenIdentifier.
org.apache.hadoop.yarn.api.records.Token newVersionedNMToken =
BaseNMTokenSecretManager.newInstance(
nmTokenSecretManagerRM.retrievePassword(newVersionIdentifier),
newVersionIdentifier);
// Verify startContainer is successful and no exception is thrown.
Assert.assertTrue(testStartContainer(rpc, validAppAttemptId2, validNode,
validContainerToken2, newVersionedNMToken, false).isEmpty());
Assert.assertTrue(nmTokenSecretManagerNM
.isAppAttemptNMTokenKeyPresent(validAppAttemptId2));
//Now lets wait till container finishes and is removed from node manager.
waitForContainerToFinishOnNM(validContainerId);
sb = new StringBuilder("Attempt to relaunch the same container with id ");
sb.append(validContainerId);
Assert.assertTrue(testStartContainer(rpc, validAppAttemptId, validNode,
validContainerToken, validNMToken, true).contains(sb.toString()));
// Container is removed from node manager's memory by this time.
// trying to stop the container. It should not throw any exception.
testStopContainer(rpc, validAppAttemptId, validNode, validContainerId,
validNMToken, false);
// Rolling over master key twice so that we can check whether older keys
// are used for authentication.
rollNMTokenMasterKey(nmTokenSecretManagerRM, nmTokenSecretManagerNM);
// Key rolled over once.. rolling over again
rollNMTokenMasterKey(nmTokenSecretManagerRM, nmTokenSecretManagerNM);
// trying get container status. Now saved nmToken should be used for
// authentication... It should complain saying container was recently
// stopped.
sb = new StringBuilder("Container ");
sb.append(validContainerId);
sb.append(" was recently stopped on node manager");
Assert.assertTrue(testGetContainer(rpc, validAppAttemptId, validNode,
validContainerId, validNMToken, true).contains(sb.toString()));
// Now lets remove the container from nm-memory
nm.getNodeStatusUpdater().clearFinishedContainersFromCache();
// This should fail as container is removed from recently tracked finished
// containers.
sb = new StringBuilder("Container ");
sb.append(validContainerId.toString());
sb.append(" is not handled by this NodeManager");
Assert.assertTrue(testGetContainer(rpc, validAppAttemptId, validNode,
validContainerId, validNMToken, false).contains(sb.toString()));
// using appAttempt-1 NMtoken for launching container for appAttempt-2
// should succeed.
ApplicationAttemptId attempt2 = ApplicationAttemptId.newInstance(appId, 2);
Token attempt1NMToken =
nmTokenSecretManagerRM
.createNMToken(validAppAttemptId, validNode, user);
org.apache.hadoop.yarn.api.records.Token newContainerToken =
containerTokenSecretManager.createContainerToken(
ContainerId.newContainerId(attempt2, 1), 0, validNode, user, r,
Priority.newInstance(0), 0);
Assert.assertTrue(testStartContainer(rpc, attempt2, validNode,
newContainerToken, attempt1NMToken, false).isEmpty());
}
private void waitForContainerToFinishOnNM(ContainerId containerId) {
Context nmContext = yarnCluster.getNodeManager(0).getNMContext();
int interval = 4 * 60; // Max time for container token to expire.
Assert.assertNotNull(nmContext.getContainers().containsKey(containerId));
// Get the container first, as it may be removed from the Context
// by asynchronous calls.
// This was leading to a flakey test as otherwise the container could
// be removed and end up null.
Container waitContainer = nmContext.getContainers().get(containerId);
while ((interval-- > 0)
&& !waitContainer.cloneAndGetContainerStatus()
.getState().equals(ContainerState.COMPLETE)) {
try {
LOG.info("Waiting for " + containerId + " to complete.");
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
// Normally, Containers will be removed from NM context after they are
// explicitly acked by RM. Now, manually remove it for testing.
yarnCluster.getNodeManager(0).getNodeStatusUpdater()
.addCompletedContainer(containerId);
LOG.info("Removing container from NMContext, containerID = " + containerId);
nmContext.getContainers().remove(containerId);
}
protected void waitForNMToReceiveNMTokenKey(
NMTokenSecretManagerInNM nmTokenSecretManagerNM)
throws InterruptedException {
int attempt = 60;
while (nmTokenSecretManagerNM.getNodeId() == null && attempt-- > 0) {
Thread.sleep(2000);
}
}
protected void rollNMTokenMasterKey(
NMTokenSecretManagerInRM nmTokenSecretManagerRM,
NMTokenSecretManagerInNM nmTokenSecretManagerNM) throws Exception {
int oldKeyId = nmTokenSecretManagerRM.getCurrentKey().getKeyId();
nmTokenSecretManagerRM.rollMasterKey();
int interval = 40;
while (nmTokenSecretManagerNM.getCurrentKey().getKeyId() == oldKeyId
&& interval-- > 0) {
Thread.sleep(1000);
}
nmTokenSecretManagerRM.activateNextMasterKey();
Assert.assertTrue((nmTokenSecretManagerNM.getCurrentKey().getKeyId()
== nmTokenSecretManagerRM.getCurrentKey().getKeyId()));
}
private String testStopContainer(YarnRPC rpc,
ApplicationAttemptId appAttemptId, NodeId nodeId,
ContainerId containerId, Token nmToken, boolean isExceptionExpected) {
try {
stopContainer(rpc, nmToken,
Arrays.asList(new ContainerId[] {containerId}), appAttemptId,
nodeId);
if (isExceptionExpected) {
fail("Exception was expected!!");
}
return "";
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
}
private String testGetContainer(YarnRPC rpc,
ApplicationAttemptId appAttemptId, NodeId nodeId,
ContainerId containerId,
org.apache.hadoop.yarn.api.records.Token nmToken,
boolean isExceptionExpected) {
try {
getContainerStatus(rpc, nmToken, containerId, appAttemptId, nodeId,
isExceptionExpected);
if (isExceptionExpected) {
fail("Exception was expected!!");
}
return "";
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
}
private String testStartContainer(YarnRPC rpc,
ApplicationAttemptId appAttemptId, NodeId nodeId,
org.apache.hadoop.yarn.api.records.Token containerToken,
org.apache.hadoop.yarn.api.records.Token nmToken,
boolean isExceptionExpected) {
try {
startContainer(rpc, nmToken, containerToken, nodeId,
appAttemptId.toString());
if (isExceptionExpected){
fail("Exception was expected!!");
}
return "";
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
}
private void stopContainer(YarnRPC rpc, Token nmToken,
List<ContainerId> containerId, ApplicationAttemptId appAttemptId,
NodeId nodeId) throws Exception {
StopContainersRequest request =
StopContainersRequest.newInstance(containerId);
ContainerManagementProtocol proxy = null;
try {
proxy =
getContainerManagementProtocolProxy(rpc, nmToken, nodeId,
appAttemptId.toString());
StopContainersResponse response = proxy.stopContainers(request);
if (response.getFailedRequests() != null &&
response.getFailedRequests().containsKey(containerId)) {
parseAndThrowException(response.getFailedRequests().get(containerId)
.deSerialize());
}
} catch (Exception e) {
if (proxy != null) {
rpc.stopProxy(proxy, conf);
}
}
}
private void
getContainerStatus(YarnRPC rpc,
org.apache.hadoop.yarn.api.records.Token nmToken,
ContainerId containerId,
ApplicationAttemptId appAttemptId, NodeId nodeId,
boolean isExceptionExpected) throws Exception {
List<ContainerId> containerIds = new ArrayList<ContainerId>();
containerIds.add(containerId);
GetContainerStatusesRequest request =
GetContainerStatusesRequest.newInstance(containerIds);
ContainerManagementProtocol proxy = null;
try {
proxy =
getContainerManagementProtocolProxy(rpc, nmToken, nodeId,
appAttemptId.toString());
GetContainerStatusesResponse statuses
= proxy.getContainerStatuses(request);
if (statuses.getFailedRequests() != null
&& statuses.getFailedRequests().containsKey(containerId)) {
parseAndThrowException(statuses.getFailedRequests().get(containerId)
.deSerialize());
}
} finally {
if (proxy != null) {
rpc.stopProxy(proxy, conf);
}
}
}
private void startContainer(final YarnRPC rpc,
org.apache.hadoop.yarn.api.records.Token nmToken,
org.apache.hadoop.yarn.api.records.Token containerToken,
NodeId nodeId, String user) throws Exception {
ContainerLaunchContext context =
Records.newRecord(ContainerLaunchContext.class);
StartContainerRequest scRequest =
StartContainerRequest.newInstance(context, containerToken);
List<StartContainerRequest> list = new ArrayList<StartContainerRequest>();
list.add(scRequest);
StartContainersRequest allRequests =
StartContainersRequest.newInstance(list);
ContainerManagementProtocol proxy = null;
try {
proxy = getContainerManagementProtocolProxy(rpc, nmToken, nodeId, user);
StartContainersResponse response = proxy.startContainers(allRequests);
for(SerializedException ex : response.getFailedRequests().values()){
parseAndThrowException(ex.deSerialize());
}
} finally {
if (proxy != null) {
rpc.stopProxy(proxy, conf);
}
}
}
private void parseAndThrowException(Throwable t) throws YarnException,
IOException {
if (t instanceof YarnException) {
throw (YarnException) t;
} else if (t instanceof InvalidToken) {
throw (InvalidToken) t;
} else {
throw (IOException) t;
}
}
protected ContainerManagementProtocol getContainerManagementProtocolProxy(
final YarnRPC rpc, org.apache.hadoop.yarn.api.records.Token nmToken,
NodeId nodeId, String user) {
ContainerManagementProtocol proxy;
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
final InetSocketAddress addr =
new InetSocketAddress(nodeId.getHost(), nodeId.getPort());
if (nmToken != null) {
ugi.addToken(ConverterUtils.convertFromYarn(nmToken, addr));
}
proxy =
NMProxy.createNMProxy(conf, ContainerManagementProtocol.class, ugi,
rpc, addr);
return proxy;
}
/**
* This tests a malice user getting a proper token but then messing with it by
* tampering with containerID/Resource etc.. His/her containers should be
* rejected.
*
* @throws IOException
* @throws InterruptedException
* @throws YarnException
*/
private void testContainerToken(Configuration conf) throws IOException,
InterruptedException, YarnException {
LOG.info("Running test for malice user");
/*
* We need to check for containerToken (authorization).
* Here we will be assuming that we have valid NMToken
* 1) ContainerToken used is expired.
* 2) ContainerToken is tampered (resource is modified).
*/
NMTokenSecretManagerInRM nmTokenSecretManagerInRM =
yarnCluster.getResourceManager().getRMContext()
.getNMTokenSecretManager();
ApplicationId appId = ApplicationId.newInstance(1, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 0);
NodeManager nm = yarnCluster.getNodeManager(0);
NMTokenSecretManagerInNM nmTokenSecretManagerInNM =
nm.getNMContext().getNMTokenSecretManager();
String user = "test";
waitForNMToReceiveNMTokenKey(nmTokenSecretManagerInNM);
NodeId nodeId = nm.getNMContext().getNodeId();
// Both id should be equal.
Assert.assertEquals(nmTokenSecretManagerInNM.getCurrentKey().getKeyId(),
nmTokenSecretManagerInRM.getCurrentKey().getKeyId());
RMContainerTokenSecretManager containerTokenSecretManager =
yarnCluster.getResourceManager().getRMContext().
getContainerTokenSecretManager();
Resource r = Resource.newInstance(1230, 2);
Token containerToken =
containerTokenSecretManager.createContainerToken(
cId, 0, nodeId, user, r, Priority.newInstance(0), 0);
ContainerTokenIdentifier containerTokenIdentifier =
getContainerTokenIdentifierFromToken(containerToken);
// Verify new compatible version ContainerTokenIdentifier
// can work successfully.
ContainerTokenIdentifierForTest newVersionTokenIdentifier =
new ContainerTokenIdentifierForTest(containerTokenIdentifier,
"message");
byte[] password =
containerTokenSecretManager.createPassword(newVersionTokenIdentifier);
Token newContainerToken = BuilderUtils.newContainerToken(
nodeId, password, newVersionTokenIdentifier);
Token nmToken =
nmTokenSecretManagerInRM.createNMToken(appAttemptId, nodeId, user);
YarnRPC rpc = YarnRPC.create(conf);
Assert.assertTrue(testStartContainer(rpc, appAttemptId, nodeId,
newContainerToken, nmToken, false).isEmpty());
// Creating a tampered Container Token
RMContainerTokenSecretManager tamperedContainerTokenSecretManager =
new RMContainerTokenSecretManager(conf);
tamperedContainerTokenSecretManager.rollMasterKey();
do {
tamperedContainerTokenSecretManager.rollMasterKey();
tamperedContainerTokenSecretManager.activateNextMasterKey();
} while (containerTokenSecretManager.getCurrentKey().getKeyId()
== tamperedContainerTokenSecretManager.getCurrentKey().getKeyId());
ContainerId cId2 = ContainerId.newContainerId(appAttemptId, 1);
// Creating modified containerToken
Token containerToken2 =
tamperedContainerTokenSecretManager.createContainerToken(cId2, 0,
nodeId, user, r, Priority.newInstance(0), 0);
StringBuilder sb = new StringBuilder("Given Container ");
sb.append(cId2);
sb.append(" seems to have an illegally generated token.");
Assert.assertTrue(testStartContainer(rpc, appAttemptId, nodeId,
containerToken2, nmToken, true).contains(sb.toString()));
}
private ContainerTokenIdentifier getContainerTokenIdentifierFromToken(
Token containerToken) throws IOException {
ContainerTokenIdentifier containerTokenIdentifier;
containerTokenIdentifier = new ContainerTokenIdentifier();
byte[] tokenIdentifierContent = containerToken.getIdentifier().array();
DataInputBuffer dib = new DataInputBuffer();
dib.reset(tokenIdentifierContent, tokenIdentifierContent.length);
containerTokenIdentifier.readFields(dib);
return containerTokenIdentifier;
}
/**
* This tests whether a containerId is serialized/deserialized with epoch.
*
* @throws IOException
* @throws InterruptedException
* @throws YarnException
*/
private void testContainerTokenWithEpoch(Configuration conf)
throws IOException, InterruptedException, YarnException {
LOG.info("Running test for serializing/deserializing containerIds");
NMTokenSecretManagerInRM nmTokenSecretManagerInRM =
yarnCluster.getResourceManager().getRMContext()
.getNMTokenSecretManager();
ApplicationId appId = ApplicationId.newInstance(1, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
ContainerId cId = ContainerId.newContainerId(appAttemptId, (5L << 40) | 3L);
NodeManager nm = yarnCluster.getNodeManager(0);
NMTokenSecretManagerInNM nmTokenSecretManagerInNM =
nm.getNMContext().getNMTokenSecretManager();
String user = "test";
waitForNMToReceiveNMTokenKey(nmTokenSecretManagerInNM);
NodeId nodeId = nm.getNMContext().getNodeId();
// Both id should be equal.
Assert.assertEquals(nmTokenSecretManagerInNM.getCurrentKey().getKeyId(),
nmTokenSecretManagerInRM.getCurrentKey().getKeyId());
// Creating a normal Container Token
RMContainerTokenSecretManager containerTokenSecretManager =
yarnCluster.getResourceManager().getRMContext().
getContainerTokenSecretManager();
Resource r = Resource.newInstance(1230, 2);
Token containerToken =
containerTokenSecretManager.createContainerToken(cId, 0, nodeId, user,
r, Priority.newInstance(0), 0);
ContainerTokenIdentifier containerTokenIdentifier =
new ContainerTokenIdentifier();
byte[] tokenIdentifierContent = containerToken.getIdentifier().array();
DataInputBuffer dib = new DataInputBuffer();
dib.reset(tokenIdentifierContent, tokenIdentifierContent.length);
containerTokenIdentifier.readFields(dib);
Assert.assertEquals(cId, containerTokenIdentifier.getContainerID());
Assert.assertEquals(
cId.toString(), containerTokenIdentifier.getContainerID().toString());
Token nmToken =
nmTokenSecretManagerInRM.createNMToken(appAttemptId, nodeId, user);
YarnRPC rpc = YarnRPC.create(conf);
testStartContainer(rpc, appAttemptId, nodeId, containerToken, nmToken,
false);
List<ContainerId> containerIds = new LinkedList<ContainerId>();
containerIds.add(cId);
ContainerManagementProtocol proxy
= getContainerManagementProtocolProxy(rpc, nmToken, nodeId, user);
GetContainerStatusesResponse res = proxy.getContainerStatuses(
GetContainerStatusesRequest.newInstance(containerIds));
Assert.assertNotNull(res.getContainerStatuses().get(0));
Assert.assertEquals(
cId, res.getContainerStatuses().get(0).getContainerId());
Assert.assertEquals(cId.toString(),
res.getContainerStatuses().get(0).getContainerId().toString());
}
}
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.minikdc.KerberosSecurityTestcase;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.yarn.api.ContainerManagementProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.SerializedException;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.client.NMProxy;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.security.NMTokenIdentifier;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
import org.apache.hadoop.yarn.server.nodemanager.security.NMTokenSecretManagerInNM;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.MockRMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.security.BaseNMTokenSecretManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Records;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
public class TestContainerManagerSecurity extends KerberosSecurityTestcase {
static Log LOG = LogFactory.getLog(TestContainerManagerSecurity.class);
static final RecordFactory recordFactory = RecordFactoryProvider
.getRecordFactory(null);
private static MiniYARNCluster yarnCluster;
private static final File testRootDir = new File("target",
TestContainerManagerSecurity.class.getName() + "-root");
private static File httpSpnegoKeytabFile = new File(testRootDir,
"httpSpnegoKeytabFile.keytab");
private static String httpSpnegoPrincipal = "HTTP/localhost@EXAMPLE.COM";
private Configuration conf;
@Before
public void setUp() throws Exception {
testRootDir.mkdirs();
httpSpnegoKeytabFile.deleteOnExit();
getKdc().createPrincipal(httpSpnegoKeytabFile, httpSpnegoPrincipal);
UserGroupInformation.setConfiguration(conf);
yarnCluster =
new MiniYARNCluster(TestContainerManagerSecurity.class.getName(), 1, 1,
1);
yarnCluster.init(conf);
yarnCluster.start();
}
@After
public void tearDown() {
if (yarnCluster != null) {
yarnCluster.stop();
yarnCluster = null;
}
testRootDir.delete();
}
@Parameters
public static Collection<Object[]> configs() {
Configuration configurationWithoutSecurity = new Configuration();
configurationWithoutSecurity.set(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple");
Configuration configurationWithSecurity = new Configuration();
configurationWithSecurity.set(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
configurationWithSecurity.set(
YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, httpSpnegoPrincipal);
configurationWithSecurity.set(
YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY,
httpSpnegoKeytabFile.getAbsolutePath());
configurationWithSecurity.set(
YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, httpSpnegoPrincipal);
configurationWithSecurity.set(
YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY,
httpSpnegoKeytabFile.getAbsolutePath());
return Arrays.asList(new Object[][] { { configurationWithoutSecurity },
{ configurationWithSecurity } });
}
public TestContainerManagerSecurity(Configuration conf) {
conf.setLong(YarnConfiguration.RM_AM_EXPIRY_INTERVAL_MS, 100000L);
this.conf = conf;
}
@Test (timeout = 120000)
public void testContainerManager() throws Exception {
// TestNMTokens.
testNMTokens(conf);
// Testing for container token tampering
testContainerToken(conf);
// Testing for container token tampering with epoch
testContainerTokenWithEpoch(conf);
}
private void testNMTokens(Configuration conf) throws Exception {
NMTokenSecretManagerInRM nmTokenSecretManagerRM =
yarnCluster.getResourceManager().getRMContext()
.getNMTokenSecretManager();
NMTokenSecretManagerInNM nmTokenSecretManagerNM =
yarnCluster.getNodeManager(0).getNMContext().getNMTokenSecretManager();
RMContainerTokenSecretManager containerTokenSecretManager =
yarnCluster.getResourceManager().getRMContext().
getContainerTokenSecretManager();
NodeManager nm = yarnCluster.getNodeManager(0);
waitForNMToReceiveNMTokenKey(nmTokenSecretManagerNM);
// Both id should be equal.
Assert.assertEquals(nmTokenSecretManagerNM.getCurrentKey().getKeyId(),
nmTokenSecretManagerRM.getCurrentKey().getKeyId());
/*
* Below cases should be tested.
* 1) If Invalid NMToken is used then it should be rejected.
* 2) If valid NMToken but belonging to another Node is used then that
* too should be rejected.
* 3) NMToken for say appAttempt-1 is used for starting/stopping/retrieving
* status for container with containerId for say appAttempt-2 should
* be rejected.
* 4) After start container call is successful nmtoken should have been
* saved in NMTokenSecretManagerInNM.
* 5) If start container call was successful (no matter if container is
* still running or not), appAttempt->NMToken should be present in
* NMTokenSecretManagerInNM's cache. Any future getContainerStatus call
* for containerId belonging to that application attempt using
* applicationAttempt's older nmToken should not get any invalid
* nmToken error. (This can be best tested if we roll over NMToken
* master key twice).
*/
YarnRPC rpc = YarnRPC.create(conf);
String user = "test";
Resource r = Resource.newInstance(1024, 1);
ApplicationId appId = ApplicationId.newInstance(1, 1);
MockRMApp m = new MockRMApp(appId.getId(), appId.getClusterTimestamp(),
RMAppState.NEW);
yarnCluster.getResourceManager().getRMContext().getRMApps().put(appId, m);
ApplicationAttemptId validAppAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId validContainerId =
ContainerId.newContainerId(validAppAttemptId, 0);
NodeId validNode = yarnCluster.getNodeManager(0).getNMContext().getNodeId();
NodeId invalidNode = NodeId.newInstance("InvalidHost", 1234);
org.apache.hadoop.yarn.api.records.Token validNMToken =
nmTokenSecretManagerRM.createNMToken(validAppAttemptId, validNode, user);
org.apache.hadoop.yarn.api.records.Token validContainerToken =
containerTokenSecretManager.createContainerToken(validContainerId,
0, validNode, user, r, Priority.newInstance(10), 1234);
ContainerTokenIdentifier identifier =
BuilderUtils.newContainerTokenIdentifier(validContainerToken);
Assert.assertEquals(Priority.newInstance(10), identifier.getPriority());
Assert.assertEquals(1234, identifier.getCreationTime());
StringBuilder sb;
// testInvalidNMToken ... creating NMToken using different secret manager.
NMTokenSecretManagerInRM tempManager = new NMTokenSecretManagerInRM(conf);
tempManager.rollMasterKey();
do {
tempManager.rollMasterKey();
tempManager.activateNextMasterKey();
// Making sure key id is different.
} while (tempManager.getCurrentKey().getKeyId() == nmTokenSecretManagerRM
.getCurrentKey().getKeyId());
// Testing that NM rejects the requests when we don't send any token.
if (UserGroupInformation.isSecurityEnabled()) {
sb = new StringBuilder("Client cannot authenticate via:[TOKEN]");
} else {
sb =
new StringBuilder(
"SIMPLE authentication is not enabled. Available:[TOKEN]");
}
String errorMsg = testStartContainer(rpc, validAppAttemptId, validNode,
validContainerToken, null, true);
Assert.assertTrue(errorMsg.contains(sb.toString()));
org.apache.hadoop.yarn.api.records.Token invalidNMToken =
tempManager.createNMToken(validAppAttemptId, validNode, user);
sb = new StringBuilder("Given NMToken for application : ");
sb.append(validAppAttemptId.toString())
.append(" seems to have been generated illegally.");
Assert.assertTrue(sb.toString().contains(
testStartContainer(rpc, validAppAttemptId, validNode,
validContainerToken, invalidNMToken, true)));
// valid NMToken but belonging to other node
invalidNMToken =
nmTokenSecretManagerRM.createNMToken(validAppAttemptId, invalidNode,
user);
sb = new StringBuilder("Given NMToken for application : ");
sb.append(validAppAttemptId)
.append(" is not valid for current node manager.expected : ")
.append(validNode.toString())
.append(" found : ").append(invalidNode.toString());
Assert.assertTrue(sb.toString().contains(
testStartContainer(rpc, validAppAttemptId, validNode,
validContainerToken, invalidNMToken, true)));
// using correct tokens. nmtoken for app attempt should get saved.
conf.setInt(YarnConfiguration.RM_CONTAINER_ALLOC_EXPIRY_INTERVAL_MS,
4 * 60 * 1000);
validContainerToken =
containerTokenSecretManager.createContainerToken(validContainerId,
0, validNode, user, r, Priority.newInstance(0), 0);
Assert.assertTrue(testStartContainer(rpc, validAppAttemptId, validNode,
validContainerToken, validNMToken, false).isEmpty());
Assert.assertTrue(nmTokenSecretManagerNM
.isAppAttemptNMTokenKeyPresent(validAppAttemptId));
// using a new compatible version nmtoken, expect container can be started
// successfully.
ApplicationAttemptId validAppAttemptId2 =
ApplicationAttemptId.newInstance(appId, 2);
ContainerId validContainerId2 =
ContainerId.newContainerId(validAppAttemptId2, 0);
org.apache.hadoop.yarn.api.records.Token validContainerToken2 =
containerTokenSecretManager.createContainerToken(validContainerId2,
0, validNode, user, r, Priority.newInstance(0), 0);
org.apache.hadoop.yarn.api.records.Token validNMToken2 =
nmTokenSecretManagerRM.createNMToken(validAppAttemptId2, validNode, user);
// First, get a new NMTokenIdentifier.
NMTokenIdentifier newIdentifier = new NMTokenIdentifier();
byte[] tokenIdentifierContent = validNMToken2.getIdentifier().array();
DataInputBuffer dib = new DataInputBuffer();
dib.reset(tokenIdentifierContent, tokenIdentifierContent.length);
newIdentifier.readFields(dib);
// Then, generate a new version NMTokenIdentifier (NMTokenIdentifierNewForTest)
// with additional field of message.
NMTokenIdentifierNewForTest newVersionIdentifier =
new NMTokenIdentifierNewForTest(newIdentifier, "message");
// check new version NMTokenIdentifier has correct info.
Assert.assertEquals("The ApplicationAttemptId is changed after set to " +
"newVersionIdentifier", validAppAttemptId2.getAttemptId(),
newVersionIdentifier.getApplicationAttemptId().getAttemptId()
);
Assert.assertEquals("The message is changed after set to newVersionIdentifier",
"message", newVersionIdentifier.getMessage());
Assert.assertEquals("The NodeId is changed after set to newVersionIdentifier",
validNode, newVersionIdentifier.getNodeId());
// create new Token based on new version NMTokenIdentifier.
org.apache.hadoop.yarn.api.records.Token newVersionedNMToken =
BaseNMTokenSecretManager.newInstance(
nmTokenSecretManagerRM.retrievePassword(newVersionIdentifier),
newVersionIdentifier);
// Verify startContainer is successful and no exception is thrown.
Assert.assertTrue(testStartContainer(rpc, validAppAttemptId2, validNode,
validContainerToken2, newVersionedNMToken, false).isEmpty());
Assert.assertTrue(nmTokenSecretManagerNM
.isAppAttemptNMTokenKeyPresent(validAppAttemptId2));
//Now lets wait till container finishes and is removed from node manager.
waitForContainerToFinishOnNM(validContainerId);
sb = new StringBuilder("Attempt to relaunch the same container with id ");
sb.append(validContainerId);
Assert.assertTrue(testStartContainer(rpc, validAppAttemptId, validNode,
validContainerToken, validNMToken, true).contains(sb.toString()));
// Container is removed from node manager's memory by this time.
// trying to stop the container. It should not throw any exception.
testStopContainer(rpc, validAppAttemptId, validNode, validContainerId,
validNMToken, false);
// Rolling over master key twice so that we can check whether older keys
// are used for authentication.
rollNMTokenMasterKey(nmTokenSecretManagerRM, nmTokenSecretManagerNM);
// Key rolled over once.. rolling over again
rollNMTokenMasterKey(nmTokenSecretManagerRM, nmTokenSecretManagerNM);
// trying get container status. Now saved nmToken should be used for
// authentication... It should complain saying container was recently
// stopped.
sb = new StringBuilder("Container ");
sb.append(validContainerId);
sb.append(" was recently stopped on node manager");
Assert.assertTrue(testGetContainer(rpc, validAppAttemptId, validNode,
validContainerId, validNMToken, true).contains(sb.toString()));
// Now lets remove the container from nm-memory
nm.getNodeStatusUpdater().clearFinishedContainersFromCache();
// This should fail as container is removed from recently tracked finished
// containers.
sb = new StringBuilder("Container ");
sb.append(validContainerId.toString());
sb.append(" is not handled by this NodeManager");
Assert.assertTrue(testGetContainer(rpc, validAppAttemptId, validNode,
validContainerId, validNMToken, false).contains(sb.toString()));
// using appAttempt-1 NMtoken for launching container for appAttempt-2 should
// succeed.
ApplicationAttemptId attempt2 = ApplicationAttemptId.newInstance(appId, 2);
Token attempt1NMToken =
nmTokenSecretManagerRM
.createNMToken(validAppAttemptId, validNode, user);
org.apache.hadoop.yarn.api.records.Token newContainerToken =
containerTokenSecretManager.createContainerToken(
ContainerId.newContainerId(attempt2, 1), 0, validNode, user, r,
Priority.newInstance(0), 0);
Assert.assertTrue(testStartContainer(rpc, attempt2, validNode,
newContainerToken, attempt1NMToken, false).isEmpty());
}
private void waitForContainerToFinishOnNM(ContainerId containerId) {
Context nmContet = yarnCluster.getNodeManager(0).getNMContext();
int interval = 4 * 60; // Max time for container token to expire.
Assert.assertNotNull(nmContet.getContainers().containsKey(containerId));
while ((interval-- > 0)
&& !nmContet.getContainers().get(containerId)
.cloneAndGetContainerStatus().getState()
.equals(ContainerState.COMPLETE)) {
try {
LOG.info("Waiting for " + containerId + " to complete.");
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
// Normally, Containers will be removed from NM context after they are
// explicitly acked by RM. Now, manually remove it for testing.
yarnCluster.getNodeManager(0).getNodeStatusUpdater()
.addCompletedContainer(containerId);
nmContet.getContainers().remove(containerId);
}
protected void waitForNMToReceiveNMTokenKey(
NMTokenSecretManagerInNM nmTokenSecretManagerNM)
throws InterruptedException {
int attempt = 60;
while (nmTokenSecretManagerNM.getNodeId() == null && attempt-- > 0) {
Thread.sleep(2000);
}
}
protected void rollNMTokenMasterKey(
NMTokenSecretManagerInRM nmTokenSecretManagerRM,
NMTokenSecretManagerInNM nmTokenSecretManagerNM) throws Exception {
int oldKeyId = nmTokenSecretManagerRM.getCurrentKey().getKeyId();
nmTokenSecretManagerRM.rollMasterKey();
int interval = 40;
while (nmTokenSecretManagerNM.getCurrentKey().getKeyId() == oldKeyId
&& interval-- > 0) {
Thread.sleep(1000);
}
nmTokenSecretManagerRM.activateNextMasterKey();
Assert.assertTrue((nmTokenSecretManagerNM.getCurrentKey().getKeyId()
== nmTokenSecretManagerRM.getCurrentKey().getKeyId()));
}
private String testStopContainer(YarnRPC rpc,
ApplicationAttemptId appAttemptId, NodeId nodeId,
ContainerId containerId, Token nmToken, boolean isExceptionExpected) {
try {
stopContainer(rpc, nmToken,
Arrays.asList(new ContainerId[] { containerId }), appAttemptId,
nodeId);
if (isExceptionExpected) {
fail("Exception was expected!!");
}
return "";
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
}
private String testGetContainer(YarnRPC rpc,
ApplicationAttemptId appAttemptId, NodeId nodeId,
ContainerId containerId,
org.apache.hadoop.yarn.api.records.Token nmToken,
boolean isExceptionExpected) {
try {
getContainerStatus(rpc, nmToken, containerId, appAttemptId, nodeId,
isExceptionExpected);
if (isExceptionExpected) {
fail("Exception was expected!!");
}
return "";
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
}
private String testStartContainer(YarnRPC rpc,
ApplicationAttemptId appAttemptId, NodeId nodeId,
org.apache.hadoop.yarn.api.records.Token containerToken,
org.apache.hadoop.yarn.api.records.Token nmToken,
boolean isExceptionExpected) {
try {
startContainer(rpc, nmToken, containerToken, nodeId,
appAttemptId.toString());
if (isExceptionExpected){
fail("Exception was expected!!");
}
return "";
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
}
private void stopContainer(YarnRPC rpc, Token nmToken,
List<ContainerId> containerId, ApplicationAttemptId appAttemptId,
NodeId nodeId) throws Exception {
StopContainersRequest request =
StopContainersRequest.newInstance(containerId);
ContainerManagementProtocol proxy = null;
try {
proxy =
getContainerManagementProtocolProxy(rpc, nmToken, nodeId,
appAttemptId.toString());
StopContainersResponse response = proxy.stopContainers(request);
if (response.getFailedRequests() != null &&
response.getFailedRequests().containsKey(containerId)) {
parseAndThrowException(response.getFailedRequests().get(containerId)
.deSerialize());
}
} catch (Exception e) {
if (proxy != null) {
rpc.stopProxy(proxy, conf);
}
}
}
private void
getContainerStatus(YarnRPC rpc,
org.apache.hadoop.yarn.api.records.Token nmToken,
ContainerId containerId,
ApplicationAttemptId appAttemptId, NodeId nodeId,
boolean isExceptionExpected) throws Exception {
List<ContainerId> containerIds = new ArrayList<ContainerId>();
containerIds.add(containerId);
GetContainerStatusesRequest request =
GetContainerStatusesRequest.newInstance(containerIds);
ContainerManagementProtocol proxy = null;
try {
proxy =
getContainerManagementProtocolProxy(rpc, nmToken, nodeId,
appAttemptId.toString());
GetContainerStatusesResponse statuses = proxy.getContainerStatuses(request);
if (statuses.getFailedRequests() != null
&& statuses.getFailedRequests().containsKey(containerId)) {
parseAndThrowException(statuses.getFailedRequests().get(containerId)
.deSerialize());
}
} finally {
if (proxy != null) {
rpc.stopProxy(proxy, conf);
}
}
}
private void startContainer(final YarnRPC rpc,
org.apache.hadoop.yarn.api.records.Token nmToken,
org.apache.hadoop.yarn.api.records.Token containerToken,
NodeId nodeId, String user) throws Exception {
ContainerLaunchContext context =
Records.newRecord(ContainerLaunchContext.class);
StartContainerRequest scRequest =
StartContainerRequest.newInstance(context,containerToken);
List<StartContainerRequest> list = new ArrayList<StartContainerRequest>();
list.add(scRequest);
StartContainersRequest allRequests =
StartContainersRequest.newInstance(list);
ContainerManagementProtocol proxy = null;
try {
proxy = getContainerManagementProtocolProxy(rpc, nmToken, nodeId, user);
StartContainersResponse response = proxy.startContainers(allRequests);
for(SerializedException ex : response.getFailedRequests().values()){
parseAndThrowException(ex.deSerialize());
}
} finally {
if (proxy != null) {
rpc.stopProxy(proxy, conf);
}
}
}
private void parseAndThrowException(Throwable t) throws YarnException,
IOException {
if (t instanceof YarnException) {
throw (YarnException) t;
} else if (t instanceof InvalidToken) {
throw (InvalidToken) t;
} else {
throw (IOException) t;
}
}
protected ContainerManagementProtocol getContainerManagementProtocolProxy(
final YarnRPC rpc, org.apache.hadoop.yarn.api.records.Token nmToken,
NodeId nodeId, String user) {
ContainerManagementProtocol proxy;
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
final InetSocketAddress addr =
NetUtils.createSocketAddr(nodeId.getHost(), nodeId.getPort());
if (nmToken != null) {
ugi.addToken(ConverterUtils.convertFromYarn(nmToken, addr));
}
proxy =
NMProxy.createNMProxy(conf, ContainerManagementProtocol.class, ugi,
rpc, addr);
return proxy;
}
/**
* This tests a malice user getting a proper token but then messing with it by
* tampering with containerID/Resource etc.. His/her containers should be
* rejected.
*
* @throws IOException
* @throws InterruptedException
* @throws YarnException
*/
private void testContainerToken(Configuration conf) throws IOException,
InterruptedException, YarnException {
LOG.info("Running test for malice user");
/*
* We need to check for containerToken (authorization).
* Here we will be assuming that we have valid NMToken
* 1) ContainerToken used is expired.
* 2) ContainerToken is tampered (resource is modified).
*/
NMTokenSecretManagerInRM nmTokenSecretManagerInRM =
yarnCluster.getResourceManager().getRMContext()
.getNMTokenSecretManager();
ApplicationId appId = ApplicationId.newInstance(1, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 0);
NodeManager nm = yarnCluster.getNodeManager(0);
NMTokenSecretManagerInNM nmTokenSecretManagerInNM =
nm.getNMContext().getNMTokenSecretManager();
String user = "test";
waitForNMToReceiveNMTokenKey(nmTokenSecretManagerInNM);
NodeId nodeId = nm.getNMContext().getNodeId();
// Both id should be equal.
Assert.assertEquals(nmTokenSecretManagerInNM.getCurrentKey().getKeyId(),
nmTokenSecretManagerInRM.getCurrentKey().getKeyId());
RMContainerTokenSecretManager containerTokenSecretManager =
yarnCluster.getResourceManager().getRMContext().
getContainerTokenSecretManager();
Resource r = Resource.newInstance(1230, 2);
Token containerToken =
containerTokenSecretManager.createContainerToken(
cId, 0, nodeId, user, r, Priority.newInstance(0), 0);
ContainerTokenIdentifier containerTokenIdentifier =
getContainerTokenIdentifierFromToken(containerToken);
// Verify new compatible version ContainerTokenIdentifier can work successfully.
ContainerTokenIdentifierForTest newVersionTokenIdentifier =
new ContainerTokenIdentifierForTest(containerTokenIdentifier, "message");
byte[] password =
containerTokenSecretManager.createPassword(newVersionTokenIdentifier);
Token newContainerToken = BuilderUtils.newContainerToken(
nodeId, password, newVersionTokenIdentifier);
Token nmToken =
nmTokenSecretManagerInRM.createNMToken(appAttemptId, nodeId, user);
YarnRPC rpc = YarnRPC.create(conf);
Assert.assertTrue(testStartContainer(rpc, appAttemptId, nodeId,
newContainerToken, nmToken, false).isEmpty());
// Creating a tampered Container Token
RMContainerTokenSecretManager tamperedContainerTokenSecretManager =
new RMContainerTokenSecretManager(conf);
tamperedContainerTokenSecretManager.rollMasterKey();
do {
tamperedContainerTokenSecretManager.rollMasterKey();
tamperedContainerTokenSecretManager.activateNextMasterKey();
} while (containerTokenSecretManager.getCurrentKey().getKeyId()
== tamperedContainerTokenSecretManager.getCurrentKey().getKeyId());
ContainerId cId2 = ContainerId.newContainerId(appAttemptId, 1);
// Creating modified containerToken
Token containerToken2 =
tamperedContainerTokenSecretManager.createContainerToken(cId2, 0,
nodeId, user, r, Priority.newInstance(0), 0);
StringBuilder sb = new StringBuilder("Given Container ");
sb.append(cId2);
sb.append(" seems to have an illegally generated token.");
Assert.assertTrue(testStartContainer(rpc, appAttemptId, nodeId,
containerToken2, nmToken, true).contains(sb.toString()));
}
private ContainerTokenIdentifier getContainerTokenIdentifierFromToken(
Token containerToken) throws IOException {
ContainerTokenIdentifier containerTokenIdentifier;
containerTokenIdentifier = new ContainerTokenIdentifier();
byte[] tokenIdentifierContent = containerToken.getIdentifier().array();
DataInputBuffer dib = new DataInputBuffer();
dib.reset(tokenIdentifierContent, tokenIdentifierContent.length);
containerTokenIdentifier.readFields(dib);
return containerTokenIdentifier;
}
/**
* This tests whether a containerId is serialized/deserialized with epoch.
*
* @throws IOException
* @throws InterruptedException
* @throws YarnException
*/
private void testContainerTokenWithEpoch(Configuration conf)
throws IOException, InterruptedException, YarnException {
LOG.info("Running test for serializing/deserializing containerIds");
NMTokenSecretManagerInRM nmTokenSecretManagerInRM =
yarnCluster.getResourceManager().getRMContext()
.getNMTokenSecretManager();
ApplicationId appId = ApplicationId.newInstance(1, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0);
ContainerId cId = ContainerId.newContainerId(appAttemptId, (5L << 40) | 3L);
NodeManager nm = yarnCluster.getNodeManager(0);
NMTokenSecretManagerInNM nmTokenSecretManagerInNM =
nm.getNMContext().getNMTokenSecretManager();
String user = "test";
waitForNMToReceiveNMTokenKey(nmTokenSecretManagerInNM);
NodeId nodeId = nm.getNMContext().getNodeId();
// Both id should be equal.
Assert.assertEquals(nmTokenSecretManagerInNM.getCurrentKey().getKeyId(),
nmTokenSecretManagerInRM.getCurrentKey().getKeyId());
// Creating a normal Container Token
RMContainerTokenSecretManager containerTokenSecretManager =
yarnCluster.getResourceManager().getRMContext().
getContainerTokenSecretManager();
Resource r = Resource.newInstance(1230, 2);
Token containerToken =
containerTokenSecretManager.createContainerToken(cId, 0, nodeId, user,
r, Priority.newInstance(0), 0);
ContainerTokenIdentifier containerTokenIdentifier =
new ContainerTokenIdentifier();
byte[] tokenIdentifierContent = containerToken.getIdentifier().array();
DataInputBuffer dib = new DataInputBuffer();
dib.reset(tokenIdentifierContent, tokenIdentifierContent.length);
containerTokenIdentifier.readFields(dib);
Assert.assertEquals(cId, containerTokenIdentifier.getContainerID());
Assert.assertEquals(
cId.toString(), containerTokenIdentifier.getContainerID().toString());
Token nmToken =
nmTokenSecretManagerInRM.createNMToken(appAttemptId, nodeId, user);
YarnRPC rpc = YarnRPC.create(conf);
testStartContainer(rpc, appAttemptId, nodeId, containerToken, nmToken,
false);
List<ContainerId> containerIds = new LinkedList<ContainerId>();
containerIds.add(cId);
ContainerManagementProtocol proxy
= getContainerManagementProtocolProxy(rpc, nmToken, nodeId, user);
GetContainerStatusesResponse res = proxy.getContainerStatuses(
GetContainerStatusesRequest.newInstance(containerIds));
Assert.assertNotNull(res.getContainerStatuses().get(0));
Assert.assertEquals(
cId, res.getContainerStatuses().get(0).getContainerId());
Assert.assertEquals(cId.toString(),
res.getContainerStatuses().get(0).getContainerId().toString());
}
}
|
YARN-6150. TestContainerManagerSecurity tests for Yarn Server are flakey. Contributed by Daniel Sturman and Ray Chiang.
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
|
YARN-6150. TestContainerManagerSecurity tests for Yarn Server are flakey. Contributed by Daniel Sturman and Ray Chiang.
|
|
Java
|
apache-2.0
|
b96e7bb0585eba74a8507f11dee659b3a8f0ec08
| 0
|
XiveZ/k-9,rollbrettler/k-9,gaionim/k-9,dpereira411/k-9,jberkel/k-9,leixinstar/k-9,ndew623/k-9,sonork/k-9,vt0r/k-9,jberkel/k-9,KitAway/k-9,tonytamsf/k-9,tsunli/k-9,suzp1984/k-9,dhootha/k-9,thuanpq/k-9,cketti/k-9,cketti/k-9,nilsbraden/k-9,sanderbaas/k-9,thuanpq/k-9,vatsalsura/k-9,indus1/k-9,rishabhbitsg/k-9,gnebsy/k-9,github201407/k-9,vasyl-khomko/k-9,moparisthebest/k-9,vasyl-khomko/k-9,XiveZ/k-9,sonork/k-9,gilbertw1/k-9,k9mail/k-9,ndew623/k-9,suzp1984/k-9,moparisthebest/k-9,sanderbaas/k-9,WenduanMou1/k-9,jca02266/k-9,imaeses/k-9,Eagles2F/k-9,GuillaumeSmaha/k-9,mawiegand/k-9,sedrubal/k-9,mawiegand/k-9,imaeses/k-9,philipwhiuk/k-9,cliniome/pki,nilsbraden/k-9,denim2x/k-9,vt0r/k-9,Eagles2F/k-9,rollbrettler/k-9,vatsalsura/k-9,torte71/k-9,sanderbaas/k-9,philipwhiuk/k-9,gilbertw1/k-9,herpiko/k-9,rtreffer/openpgp-k-9,rtreffer/openpgp-k-9,github201407/k-9,crr0004/k-9,suzp1984/k-9,farmboy0/k-9,gilbertw1/k-9,huhu/k-9,philipwhiuk/q-mail,deepworks/k-9,torte71/k-9,farmboy0/k-9,konfer/k-9,denim2x/k-9,torte71/k-9,G00fY2/k-9_material_design,mawiegand/k-9,KitAway/k-9,GuillaumeSmaha/k-9,farmboy0/k-9,leixinstar/k-9,Valodim/k-9,rishabhbitsg/k-9,jca02266/k-9,GuillaumeSmaha/k-9,huhu/k-9,sebkur/k-9,philipwhiuk/q-mail,moparisthebest/k-9,cooperpellaton/k-9,deepworks/k-9,msdgwzhy6/k-9,439teamwork/k-9,dpereira411/k-9,dgger/k-9,konfer/k-9,cketti/k-9,ndew623/k-9,nilsbraden/k-9,XiveZ/k-9,sedrubal/k-9,herpiko/k-9,dgger/k-9,github201407/k-9,sonork/k-9,k9mail/k-9,tsunli/k-9,dpereira411/k-9,gnebsy/k-9,gaionim/k-9,cooperpellaton/k-9,icedman21/k-9,icedman21/k-9,Eagles2F/k-9,tonytamsf/k-9,sebkur/k-9,icedman21/k-9,msdgwzhy6/k-9,denim2x/k-9,roscrazy/k-9,indus1/k-9,konfer/k-9,deepworks/k-9,cooperpellaton/k-9,bashrc/k-9,439teamwork/k-9,dhootha/k-9,cliniome/pki,jca02266/k-9,KitAway/k-9,tonytamsf/k-9,gaionim/k-9,WenduanMou1/k-9,roscrazy/k-9,dgger/k-9,leixinstar/k-9,rollbrettler/k-9,CodingRmy/k-9,huhu/k-9,bashrc/k-9,tsunli/k-9,sebkur/k-9,bashrc/k-9,cketti/k-9,philipwhiuk/q-mail,crr0004/k-9,k9mail/k-9,herpiko/k-9,G00fY2/k-9_material_design,crr0004/k-9,dhootha/k-9,msdgwzhy6/k-9,CodingRmy/k-9,439teamwork/k-9,imaeses/k-9,WenduanMou1/k-9,gnebsy/k-9,vasyl-khomko/k-9,cliniome/pki,thuanpq/k-9
|
package com.fsck.k9.fragment;
import java.io.File;
import java.util.Collections;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences.Editor;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.FragmentManager;
import android.util.Log;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Toast;
import com.actionbarsherlock.app.SherlockFragment;
import com.actionbarsherlock.view.Menu;
import com.actionbarsherlock.view.MenuInflater;
import com.actionbarsherlock.view.MenuItem;
import com.fsck.k9.Account;
import com.fsck.k9.K9;
import com.fsck.k9.Preferences;
import com.fsck.k9.R;
import com.fsck.k9.activity.ChooseFolder;
import com.fsck.k9.activity.MessageReference;
import com.fsck.k9.controller.MessagingController;
import com.fsck.k9.controller.MessagingListener;
import com.fsck.k9.crypto.CryptoProvider.CryptoDecryptCallback;
import com.fsck.k9.crypto.PgpData;
import com.fsck.k9.fragment.ConfirmationDialogFragment.ConfirmationDialogFragmentListener;
import com.fsck.k9.helper.FileBrowserHelper;
import com.fsck.k9.helper.FileBrowserHelper.FileBrowserFailOverCallback;
import com.fsck.k9.mail.Flag;
import com.fsck.k9.mail.Message;
import com.fsck.k9.mail.MessagingException;
import com.fsck.k9.mail.Part;
import com.fsck.k9.mail.store.LocalStore.LocalMessage;
import com.fsck.k9.view.AttachmentView;
import com.fsck.k9.view.AttachmentView.AttachmentFileDownloadCallback;
import com.fsck.k9.view.MessageHeader;
import com.fsck.k9.view.SingleMessageView;
public class MessageViewFragment extends SherlockFragment implements OnClickListener,
CryptoDecryptCallback, ConfirmationDialogFragmentListener {
private static final String ARG_REFERENCE = "reference";
private static final String STATE_MESSAGE_REFERENCE = "reference";
private static final String STATE_PGP_DATA = "pgpData";
private static final int ACTIVITY_CHOOSE_FOLDER_MOVE = 1;
private static final int ACTIVITY_CHOOSE_FOLDER_COPY = 2;
private static final int ACTIVITY_CHOOSE_DIRECTORY = 3;
public static MessageViewFragment newInstance(MessageReference reference) {
MessageViewFragment fragment = new MessageViewFragment();
Bundle args = new Bundle();
args.putParcelable(ARG_REFERENCE, reference);
fragment.setArguments(args);
return fragment;
}
private SingleMessageView mMessageView;
private PgpData mPgpData;
private Menu mMenu;
private Account mAccount;
private MessageReference mMessageReference;
private Message mMessage;
private MessagingController mController;
private Listener mListener = new Listener();
private MessageViewHandler mHandler = new MessageViewHandler();
private MenuItem mToggleMessageViewMenu;
/** this variable is used to save the calling AttachmentView
* until the onActivityResult is called.
* => with this reference we can identity the caller
*/
private AttachmentView attachmentTmpStore;
/**
* Used to temporarily store the destination folder for refile operations if a confirmation
* dialog is shown.
*/
private String mDstFolder;
private MessageViewFragmentListener mFragmentListener;
class MessageViewHandler extends Handler {
public void progress(final boolean progress) {
post(new Runnable() {
public void run() {
setProgress(progress);
}
});
}
public void addAttachment(final View attachmentView) {
post(new Runnable() {
public void run() {
mMessageView.addAttachment(attachmentView);
}
});
}
/* A helper for a set of "show a toast" methods */
private void showToast(final String message, final int toastLength) {
post(new Runnable() {
public void run() {
Toast.makeText(getActivity(), message, toastLength).show();
}
});
}
public void networkError() {
showToast(getString(R.string.status_network_error), Toast.LENGTH_LONG);
}
public void invalidIdError() {
showToast(getString(R.string.status_invalid_id_error), Toast.LENGTH_LONG);
}
public void fetchingAttachment() {
showToast(getString(R.string.message_view_fetching_attachment_toast), Toast.LENGTH_SHORT);
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mFragmentListener = (MessageViewFragmentListener) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity.getClass() +
" must implement MessageViewFragmentListener");
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// This fragments adds options to the action bar
setHasOptionsMenu(true);
mController = MessagingController.getInstance(getActivity().getApplication());
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.message, container, false);
mMessageView = (SingleMessageView) view.findViewById(R.id.message_view);
//set a callback for the attachment view. With this callback the attachmentview
//request the start of a filebrowser activity.
mMessageView.setAttachmentCallback(new AttachmentFileDownloadCallback() {
@Override
public void showFileBrowser(final AttachmentView caller) {
FileBrowserHelper.getInstance()
.showFileBrowserActivity(MessageViewFragment.this,
null,
ACTIVITY_CHOOSE_DIRECTORY,
callback);
attachmentTmpStore = caller;
}
FileBrowserFailOverCallback callback = new FileBrowserFailOverCallback() {
@Override
public void onPathEntered(String path) {
attachmentTmpStore.writeFile(new File(path));
}
@Override
public void onCancel() {
// canceled, do nothing
}
};
});
mMessageView.initialize(this);
mMessageView.downloadRemainderButton().setOnClickListener(this);
mFragmentListener.messageHeaderViewAvailable(mMessageView.getMessageHeaderView());
return view;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
MessageReference messageReference;
if (savedInstanceState != null) {
mPgpData = (PgpData) savedInstanceState.get(STATE_PGP_DATA);
messageReference = (MessageReference) savedInstanceState.get(STATE_MESSAGE_REFERENCE);
} else {
Bundle args = getArguments();
messageReference = (MessageReference) args.getParcelable(ARG_REFERENCE);
}
displayMessage(messageReference, (mPgpData == null));
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelable(STATE_MESSAGE_REFERENCE, mMessageReference);
outState.putSerializable(STATE_PGP_DATA, mPgpData);
}
public void displayMessage(MessageReference ref) {
displayMessage(ref, true);
}
private void displayMessage(MessageReference ref, boolean resetPgpData) {
mMessageReference = ref;
if (K9.DEBUG) {
Log.d(K9.LOG_TAG, "MessageView displaying message " + mMessageReference);
}
Context appContext = getActivity().getApplicationContext();
mAccount = Preferences.getPreferences(appContext).getAccount(mMessageReference.accountUuid);
if (resetPgpData) {
// start with fresh, empty PGP data
mPgpData = new PgpData();
}
// Clear previous message
mMessageView.resetView();
mMessageView.resetHeaderView();
mController.loadMessageForView(mAccount, mMessageReference.folderName, mMessageReference.uid, mListener);
configureMenu(mMenu);
}
/**
* Called from UI thread when user select Delete
*/
public void onDelete() {
if (K9.confirmDelete() || (K9.confirmDeleteStarred() && mMessage.isSet(Flag.FLAGGED))) {
showDialog(R.id.dialog_confirm_delete);
} else {
delete();
}
}
private void delete() {
if (mMessage != null) {
// Disable the delete button after it's tapped (to try to prevent
// accidental clicks)
mMenu.findItem(R.id.delete).setEnabled(false);
Message messageToDelete = mMessage;
mFragmentListener.showNextMessageOrReturn();
mController.deleteMessages(Collections.singletonList(messageToDelete), null);
}
}
public void onRefile(String dstFolder) {
if (!mController.isMoveCapable(mAccount)) {
return;
}
if (!mController.isMoveCapable(mMessage)) {
Toast toast = Toast.makeText(getActivity(), R.string.move_copy_cannot_copy_unsynced_message, Toast.LENGTH_LONG);
toast.show();
return;
}
if (K9.FOLDER_NONE.equalsIgnoreCase(dstFolder)) {
return;
}
if (mAccount.getSpamFolderName().equals(dstFolder) && K9.confirmSpam()) {
mDstFolder = dstFolder;
showDialog(R.id.dialog_confirm_spam);
} else {
refileMessage(dstFolder);
}
}
private void refileMessage(String dstFolder) {
String srcFolder = mMessageReference.folderName;
Message messageToMove = mMessage;
mFragmentListener.showNextMessageOrReturn();
mController.moveMessage(mAccount, srcFolder, messageToMove, dstFolder, null);
}
public void onReply() {
if (mMessage != null) {
mFragmentListener.onReply(mMessage, mPgpData);
}
}
public void onReplyAll() {
if (mMessage != null) {
mFragmentListener.onReplyAll(mMessage, mPgpData);
}
}
public void onForward() {
if (mMessage != null) {
mFragmentListener.onForward(mMessage, mPgpData);
}
}
public void onFlag() {
if (mMessage != null) {
boolean newState = !mMessage.isSet(Flag.FLAGGED);
mController.setFlag(mAccount, mMessage.getFolder().getName(),
new Message[] { mMessage }, Flag.FLAGGED, newState);
mMessageView.setHeaders(mMessage, mAccount);
}
}
public void onMove() {
if ((!mController.isMoveCapable(mAccount))
|| (mMessage == null)) {
return;
}
if (!mController.isMoveCapable(mMessage)) {
Toast toast = Toast.makeText(getActivity(), R.string.move_copy_cannot_copy_unsynced_message, Toast.LENGTH_LONG);
toast.show();
return;
}
startRefileActivity(ACTIVITY_CHOOSE_FOLDER_MOVE);
}
public void onCopy() {
if ((!mController.isCopyCapable(mAccount))
|| (mMessage == null)) {
return;
}
if (!mController.isCopyCapable(mMessage)) {
Toast toast = Toast.makeText(getActivity(), R.string.move_copy_cannot_copy_unsynced_message, Toast.LENGTH_LONG);
toast.show();
return;
}
startRefileActivity(ACTIVITY_CHOOSE_FOLDER_COPY);
}
private void onToggleColors() {
if (K9.getK9MessageViewTheme() == K9.THEME_DARK) {
K9.setK9MessageViewTheme(K9.THEME_LIGHT);
} else {
K9.setK9MessageViewTheme(K9.THEME_DARK);
}
new AsyncTask<Object, Object, Object>() {
@Override
protected Object doInBackground(Object... params) {
Context appContext = getActivity().getApplicationContext();
Preferences prefs = Preferences.getPreferences(appContext);
Editor editor = prefs.getPreferences().edit();
K9.save(editor);
editor.commit();
return null;
}
}.execute();
mFragmentListener.restartActivity();
}
private void startRefileActivity(int activity) {
Intent intent = new Intent(getActivity(), ChooseFolder.class);
intent.putExtra(ChooseFolder.EXTRA_ACCOUNT, mAccount.getUuid());
intent.putExtra(ChooseFolder.EXTRA_CUR_FOLDER, mMessageReference.folderName);
intent.putExtra(ChooseFolder.EXTRA_SEL_FOLDER, mAccount.getLastSelectedFolderName());
intent.putExtra(ChooseFolder.EXTRA_MESSAGE, mMessageReference);
startActivityForResult(intent, activity);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (mAccount.getCryptoProvider().onDecryptActivityResult(this, requestCode, resultCode, data, mPgpData)) {
return;
}
if (resultCode != Activity.RESULT_OK) {
return;
}
switch (requestCode) {
case ACTIVITY_CHOOSE_DIRECTORY: {
if (resultCode == Activity.RESULT_OK && data != null) {
// obtain the filename
Uri fileUri = data.getData();
if (fileUri != null) {
String filePath = fileUri.getPath();
if (filePath != null) {
attachmentTmpStore.writeFile(new File(filePath));
}
}
}
break;
}
case ACTIVITY_CHOOSE_FOLDER_MOVE:
case ACTIVITY_CHOOSE_FOLDER_COPY: {
if (data == null) {
return;
}
String destFolderName = data.getStringExtra(ChooseFolder.EXTRA_NEW_FOLDER);
MessageReference ref = data.getParcelableExtra(ChooseFolder.EXTRA_MESSAGE);
if (mMessageReference.equals(ref)) {
mAccount.setLastSelectedFolderName(destFolderName);
switch (requestCode) {
case ACTIVITY_CHOOSE_FOLDER_MOVE: {
mFragmentListener.showNextMessageOrReturn();
moveMessage(ref, destFolderName);
break;
}
case ACTIVITY_CHOOSE_FOLDER_COPY: {
copyMessage(ref, destFolderName);
break;
}
}
}
break;
}
}
}
private void onSendAlternate() {
if (mMessage != null) {
mController.sendAlternate(getActivity(), mAccount, mMessage);
}
}
private void onToggleRead() {
if (mMessage != null) {
mController.setFlag(mAccount, mMessage.getFolder().getName(),
new Message[] { mMessage }, Flag.SEEN, !mMessage.isSet(Flag.SEEN));
mMessageView.setHeaders(mMessage, mAccount);
String subject = mMessage.getSubject();
displayMessageSubject(subject);
updateUnreadToggleTitle();
}
}
private void onDownloadRemainder() {
if (mMessage.isSet(Flag.X_DOWNLOADED_FULL)) {
return;
}
mMessageView.downloadRemainderButton().setEnabled(false);
mController.loadMessageForViewRemote(mAccount, mMessageReference.folderName, mMessageReference.uid, mListener);
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.download: {
((AttachmentView)view).saveFile();
break;
}
case R.id.download_remainder: {
onDownloadRemainder();
break;
}
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.delete:
onDelete();
break;
case R.id.reply:
onReply();
break;
case R.id.reply_all:
onReplyAll();
break;
case R.id.forward:
onForward();
break;
case R.id.share:
onSendAlternate();
break;
case R.id.toggle_unread:
onToggleRead();
break;
case R.id.archive:
onRefile(mAccount.getArchiveFolderName());
break;
case R.id.spam:
onRefile(mAccount.getSpamFolderName());
break;
case R.id.move:
onMove();
break;
case R.id.copy:
onCopy();
break;
case R.id.select_text:
mMessageView.beginSelectingText();
break;
case R.id.toggle_message_view_theme:
onToggleColors();
break;
default:
return super.onOptionsItemSelected(item);
}
return true;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.message_view_fragment, menu);
mMenu = menu;
configureMenu(menu);
}
private void configureMenu(Menu menu) {
// In Android versions prior to 4.2 onCreateOptionsMenu() (which calls us) is called before
// onActivityCreated() when mAccount isn't initialized yet. In that case we do nothing and
// wait for displayMessage() to call us again.
if (menu == null || mAccount == null) {
return;
}
// enable them all
menu.findItem(R.id.copy).setVisible(true);
menu.findItem(R.id.move).setVisible(true);
menu.findItem(R.id.archive).setVisible(true);
menu.findItem(R.id.spam).setVisible(true);
mToggleMessageViewMenu = menu.findItem(R.id.toggle_message_view_theme);
if (K9.getK9MessageViewTheme() == K9.THEME_DARK) {
mToggleMessageViewMenu.setTitle(R.string.message_view_theme_action_light);
} else {
mToggleMessageViewMenu.setTitle(R.string.message_view_theme_action_dark);
}
toggleActionsState(menu, true);
updateUnreadToggleTitle();
// comply with the setting
if (!mAccount.getEnableMoveButtons()) {
menu.findItem(R.id.move).setVisible(false);
menu.findItem(R.id.archive).setVisible(false);
menu.findItem(R.id.spam).setVisible(false);
} else {
// check message, folder capability
if (!mController.isCopyCapable(mAccount)) {
menu.findItem(R.id.copy).setVisible(false);
}
if (mController.isMoveCapable(mAccount)) {
menu.findItem(R.id.move).setVisible(true);
menu.findItem(R.id.archive).setVisible(
!mMessageReference.folderName.equals(mAccount.getArchiveFolderName())
&& mAccount.hasArchiveFolder());
menu.findItem(R.id.spam).setVisible(
!mMessageReference.folderName.equals(mAccount.getSpamFolderName())
&& mAccount.hasSpamFolder());
} else {
menu.findItem(R.id.copy).setVisible(false);
menu.findItem(R.id.move).setVisible(false);
menu.findItem(R.id.archive).setVisible(false);
menu.findItem(R.id.spam).setVisible(false);
}
}
}
/**
* Set the title of the "Toggle Unread" menu item based upon the current read state of the message.
*/
public void updateUnreadToggleTitle() {
if (mMessage != null && mMenu != null) {
if (mMessage.isSet(Flag.SEEN)) {
mMenu.findItem(R.id.toggle_unread).setTitle(R.string.mark_as_unread_action);
} else {
mMenu.findItem(R.id.toggle_unread).setTitle(R.string.mark_as_read_action);
}
}
}
private void toggleActionsState(Menu menu, boolean state) {
for (int i = 0; i < menu.size(); ++i) {
menu.getItem(i).setEnabled(state);
}
}
private void setProgress(boolean enable) {
if (mFragmentListener != null) {
mFragmentListener.setProgress(enable);
}
}
private void displayMessageSubject(String subject) {
if (mFragmentListener != null) {
mFragmentListener.displayMessageSubject(subject);
}
}
public void moveMessage(MessageReference reference, String destFolderName) {
mController.moveMessage(mAccount, mMessageReference.folderName, mMessage,
destFolderName, null);
}
public void copyMessage(MessageReference reference, String destFolderName) {
mController.copyMessage(mAccount, mMessageReference.folderName, mMessage,
destFolderName, null);
}
class Listener extends MessagingListener {
@Override
public void loadMessageForViewHeadersAvailable(final Account account, String folder, String uid,
final Message message) {
if (!mMessageReference.uid.equals(uid) || !mMessageReference.folderName.equals(folder)
|| !mMessageReference.accountUuid.equals(account.getUuid())) {
return;
}
/*
* Clone the message object because the original could be modified by
* MessagingController later. This could lead to a ConcurrentModificationException
* when that same object is accessed by the UI thread (below).
*
* See issue 3953
*
* This is just an ugly hack to get rid of the most pressing problem. A proper way to
* fix this is to make Message thread-safe. Or, even better, rewriting the UI code to
* access messages via a ContentProvider.
*
*/
final Message clonedMessage = message.clone();
mHandler.post(new Runnable() {
public void run() {
if (!clonedMessage.isSet(Flag.X_DOWNLOADED_FULL) &&
!clonedMessage.isSet(Flag.X_DOWNLOADED_PARTIAL)) {
String text = getString(R.string.message_view_downloading);
mMessageView.showStatusMessage(text);
}
mMessageView.setHeaders(clonedMessage, account);
final String subject = clonedMessage.getSubject();
if (subject == null || subject.equals("")) {
displayMessageSubject(getString(R.string.general_no_subject));
} else {
displayMessageSubject(clonedMessage.getSubject());
}
mMessageView.setOnFlagListener(new OnClickListener() {
@Override
public void onClick(View v) {
onFlag();
}
});
}
});
}
@Override
public void loadMessageForViewBodyAvailable(final Account account, String folder,
String uid, final Message message) {
if (!mMessageReference.uid.equals(uid) ||
!mMessageReference.folderName.equals(folder) ||
!mMessageReference.accountUuid.equals(account.getUuid())) {
return;
}
mHandler.post(new Runnable() {
@Override
public void run() {
try {
mMessage = message;
mMessageView.setMessage(account, (LocalMessage) message, mPgpData,
mController, mListener);
updateUnreadToggleTitle();
} catch (MessagingException e) {
Log.v(K9.LOG_TAG, "loadMessageForViewBodyAvailable", e);
}
}
});
}
@Override
public void loadMessageForViewFailed(Account account, String folder, String uid, final Throwable t) {
if (!mMessageReference.uid.equals(uid) || !mMessageReference.folderName.equals(folder)
|| !mMessageReference.accountUuid.equals(account.getUuid())) {
return;
}
mHandler.post(new Runnable() {
public void run() {
setProgress(false);
if (t instanceof IllegalArgumentException) {
mHandler.invalidIdError();
} else {
mHandler.networkError();
}
if (mMessage == null || mMessage.isSet(Flag.X_DOWNLOADED_PARTIAL)) {
mMessageView.showStatusMessage(getString(R.string.webview_empty_message));
}
}
});
}
@Override
public void loadMessageForViewFinished(Account account, String folder, String uid, final Message message) {
if (!mMessageReference.uid.equals(uid) || !mMessageReference.folderName.equals(folder)
|| !mMessageReference.accountUuid.equals(account.getUuid())) {
return;
}
mHandler.post(new Runnable() {
public void run() {
setProgress(false);
mMessageView.setShowDownloadButton(message);
}
});
}
@Override
public void loadMessageForViewStarted(Account account, String folder, String uid) {
if (!mMessageReference.uid.equals(uid) || !mMessageReference.folderName.equals(folder)
|| !mMessageReference.accountUuid.equals(account.getUuid())) {
return;
}
mHandler.post(new Runnable() {
public void run() {
setProgress(true);
}
});
}
@Override
public void loadAttachmentStarted(Account account, Message message, Part part, Object tag, final boolean requiresDownload) {
if (mMessage != message) {
return;
}
mHandler.post(new Runnable() {
public void run() {
mMessageView.setAttachmentsEnabled(false);
showDialog(R.id.dialog_attachment_progress);
if (requiresDownload) {
mHandler.fetchingAttachment();
}
}
});
}
@Override
public void loadAttachmentFinished(Account account, Message message, Part part, final Object tag) {
if (mMessage != message) {
return;
}
mHandler.post(new Runnable() {
public void run() {
mMessageView.setAttachmentsEnabled(true);
removeDialog(R.id.dialog_attachment_progress);
Object[] params = (Object[]) tag;
boolean download = (Boolean) params[0];
AttachmentView attachment = (AttachmentView) params[1];
if (download) {
attachment.writeFile();
} else {
attachment.showFile();
}
}
});
}
@Override
public void loadAttachmentFailed(Account account, Message message, Part part, Object tag, String reason) {
if (mMessage != message) {
return;
}
mHandler.post(new Runnable() {
public void run() {
mMessageView.setAttachmentsEnabled(true);
removeDialog(R.id.dialog_attachment_progress);
mHandler.networkError();
}
});
}
}
// This REALLY should be in MessageCryptoView
@Override
public void onDecryptDone(PgpData pgpData) {
Account account = mAccount;
LocalMessage message = (LocalMessage) mMessage;
MessagingController controller = mController;
Listener listener = mListener;
try {
mMessageView.setMessage(account, message, pgpData, controller, listener);
} catch (MessagingException e) {
Log.e(K9.LOG_TAG, "displayMessageBody failed", e);
}
}
private void showDialog(int dialogId) {
DialogFragment fragment;
switch (dialogId) {
case R.id.dialog_confirm_delete: {
String title = getString(R.string.dialog_confirm_delete_title);
String message = getString(R.string.dialog_confirm_delete_message);
String confirmText = getString(R.string.dialog_confirm_delete_confirm_button);
String cancelText = getString(R.string.dialog_confirm_delete_cancel_button);
fragment = ConfirmationDialogFragment.newInstance(dialogId, title, message,
confirmText, cancelText);
break;
}
case R.id.dialog_confirm_spam: {
String title = getString(R.string.dialog_confirm_spam_title);
String message = getResources().getQuantityString(R.plurals.dialog_confirm_spam_message, 1);
String confirmText = getString(R.string.dialog_confirm_spam_confirm_button);
String cancelText = getString(R.string.dialog_confirm_spam_cancel_button);
fragment = ConfirmationDialogFragment.newInstance(dialogId, title, message,
confirmText, cancelText);
break;
}
case R.id.dialog_attachment_progress: {
String title = getString(R.string.dialog_attachment_progress_title);
fragment = ProgressDialogFragment.newInstance(title);
break;
}
default: {
throw new RuntimeException("Called showDialog(int) with unknown dialog id.");
}
}
fragment.setTargetFragment(this, dialogId);
fragment.show(getFragmentManager(), getDialogTag(dialogId));
}
private void removeDialog(int dialogId) {
FragmentManager fm = getFragmentManager();
// Make sure the "show dialog" transaction has been processed when we call
// findFragmentByTag() below. Otherwise the fragment won't be found and the dialog will
// never be dismissed.
fm.executePendingTransactions();
DialogFragment fragment = (DialogFragment) fm.findFragmentByTag(getDialogTag(dialogId));
if (fragment != null) {
fragment.dismiss();
}
}
private String getDialogTag(int dialogId) {
return String.format("dialog-%d", dialogId);
}
public void zoom(KeyEvent event) {
mMessageView.zoom(event);
}
@Override
public void doPositiveClick(int dialogId) {
switch (dialogId) {
case R.id.dialog_confirm_delete: {
delete();
break;
}
case R.id.dialog_confirm_spam: {
refileMessage(mDstFolder);
mDstFolder = null;
break;
}
}
}
@Override
public void doNegativeClick(int dialogId) {
/* do nothing */
}
@Override
public void dialogCancelled(int dialogId) {
/* do nothing */
}
public interface MessageViewFragmentListener {
public void onForward(Message mMessage, PgpData mPgpData);
public void onReplyAll(Message mMessage, PgpData mPgpData);
public void onReply(Message mMessage, PgpData mPgpData);
public void displayMessageSubject(String title);
public void setProgress(boolean b);
public void restartActivity();
public void showNextMessageOrReturn();
public void messageHeaderViewAvailable(MessageHeader messageHeaderView);
}
}
|
src/com/fsck/k9/fragment/MessageViewFragment.java
|
package com.fsck.k9.fragment;
import java.io.File;
import java.util.Collections;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences.Editor;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.FragmentManager;
import android.util.Log;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Toast;
import com.actionbarsherlock.app.SherlockFragment;
import com.actionbarsherlock.view.Menu;
import com.actionbarsherlock.view.MenuInflater;
import com.actionbarsherlock.view.MenuItem;
import com.fsck.k9.Account;
import com.fsck.k9.K9;
import com.fsck.k9.Preferences;
import com.fsck.k9.R;
import com.fsck.k9.activity.ChooseFolder;
import com.fsck.k9.activity.MessageReference;
import com.fsck.k9.controller.MessagingController;
import com.fsck.k9.controller.MessagingListener;
import com.fsck.k9.crypto.CryptoProvider.CryptoDecryptCallback;
import com.fsck.k9.crypto.PgpData;
import com.fsck.k9.fragment.ConfirmationDialogFragment.ConfirmationDialogFragmentListener;
import com.fsck.k9.helper.FileBrowserHelper;
import com.fsck.k9.helper.FileBrowserHelper.FileBrowserFailOverCallback;
import com.fsck.k9.mail.Flag;
import com.fsck.k9.mail.Message;
import com.fsck.k9.mail.MessagingException;
import com.fsck.k9.mail.Part;
import com.fsck.k9.mail.store.LocalStore.LocalMessage;
import com.fsck.k9.view.AttachmentView;
import com.fsck.k9.view.AttachmentView.AttachmentFileDownloadCallback;
import com.fsck.k9.view.MessageHeader;
import com.fsck.k9.view.SingleMessageView;
public class MessageViewFragment extends SherlockFragment implements OnClickListener,
CryptoDecryptCallback, ConfirmationDialogFragmentListener {
private static final String ARG_REFERENCE = "reference";
private static final String STATE_MESSAGE_REFERENCE = "reference";
private static final String STATE_PGP_DATA = "pgpData";
private static final int ACTIVITY_CHOOSE_FOLDER_MOVE = 1;
private static final int ACTIVITY_CHOOSE_FOLDER_COPY = 2;
private static final int ACTIVITY_CHOOSE_DIRECTORY = 3;
public static MessageViewFragment newInstance(MessageReference reference) {
MessageViewFragment fragment = new MessageViewFragment();
Bundle args = new Bundle();
args.putParcelable(ARG_REFERENCE, reference);
fragment.setArguments(args);
return fragment;
}
private SingleMessageView mMessageView;
private PgpData mPgpData;
private Menu mMenu;
private Account mAccount;
private MessageReference mMessageReference;
private Message mMessage;
private MessagingController mController;
private Listener mListener = new Listener();
private MessageViewHandler mHandler = new MessageViewHandler();
private MenuItem mToggleMessageViewMenu;
/** this variable is used to save the calling AttachmentView
* until the onActivityResult is called.
* => with this reference we can identity the caller
*/
private AttachmentView attachmentTmpStore;
/**
* Used to temporarily store the destination folder for refile operations if a confirmation
* dialog is shown.
*/
private String mDstFolder;
private MessageViewFragmentListener mFragmentListener;
class MessageViewHandler extends Handler {
public void progress(final boolean progress) {
post(new Runnable() {
public void run() {
setProgress(progress);
}
});
}
public void addAttachment(final View attachmentView) {
post(new Runnable() {
public void run() {
mMessageView.addAttachment(attachmentView);
}
});
}
/* A helper for a set of "show a toast" methods */
private void showToast(final String message, final int toastLength) {
post(new Runnable() {
public void run() {
Toast.makeText(getActivity(), message, toastLength).show();
}
});
}
public void networkError() {
showToast(getString(R.string.status_network_error), Toast.LENGTH_LONG);
}
public void invalidIdError() {
showToast(getString(R.string.status_invalid_id_error), Toast.LENGTH_LONG);
}
public void fetchingAttachment() {
showToast(getString(R.string.message_view_fetching_attachment_toast), Toast.LENGTH_SHORT);
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mFragmentListener = (MessageViewFragmentListener) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity.getClass() +
" must implement MessageViewFragmentListener");
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// This fragments adds options to the action bar
setHasOptionsMenu(true);
mController = MessagingController.getInstance(getActivity().getApplication());
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.message, container, false);
mMessageView = (SingleMessageView) view.findViewById(R.id.message_view);
//set a callback for the attachment view. With this callback the attachmentview
//request the start of a filebrowser activity.
mMessageView.setAttachmentCallback(new AttachmentFileDownloadCallback() {
@Override
public void showFileBrowser(final AttachmentView caller) {
FileBrowserHelper.getInstance()
.showFileBrowserActivity(MessageViewFragment.this,
null,
ACTIVITY_CHOOSE_DIRECTORY,
callback);
attachmentTmpStore = caller;
}
FileBrowserFailOverCallback callback = new FileBrowserFailOverCallback() {
@Override
public void onPathEntered(String path) {
attachmentTmpStore.writeFile(new File(path));
}
@Override
public void onCancel() {
// canceled, do nothing
}
};
});
mMessageView.initialize(this);
mMessageView.downloadRemainderButton().setOnClickListener(this);
mFragmentListener.messageHeaderViewAvailable(mMessageView.getMessageHeaderView());
return view;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
MessageReference messageReference;
if (savedInstanceState != null) {
mPgpData = (PgpData) savedInstanceState.get(STATE_PGP_DATA);
messageReference = (MessageReference) savedInstanceState.get(STATE_MESSAGE_REFERENCE);
} else {
Bundle args = getArguments();
messageReference = (MessageReference) args.getParcelable(ARG_REFERENCE);
}
displayMessage(messageReference, (mPgpData == null));
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelable(STATE_MESSAGE_REFERENCE, mMessageReference);
outState.putSerializable(STATE_PGP_DATA, mPgpData);
}
public void displayMessage(MessageReference ref) {
displayMessage(ref, true);
}
private void displayMessage(MessageReference ref, boolean resetPgpData) {
mMessageReference = ref;
if (K9.DEBUG) {
Log.d(K9.LOG_TAG, "MessageView displaying message " + mMessageReference);
}
Context appContext = getActivity().getApplicationContext();
mAccount = Preferences.getPreferences(appContext).getAccount(mMessageReference.accountUuid);
if (resetPgpData) {
// start with fresh, empty PGP data
mPgpData = new PgpData();
}
// Clear previous message
mMessageView.resetView();
mMessageView.resetHeaderView();
mController.loadMessageForView(mAccount, mMessageReference.folderName, mMessageReference.uid, mListener);
configureMenu(mMenu);
}
/**
* Called from UI thread when user select Delete
*/
public void onDelete() {
if (K9.confirmDelete() || (K9.confirmDeleteStarred() && mMessage.isSet(Flag.FLAGGED))) {
showDialog(R.id.dialog_confirm_delete);
} else {
delete();
}
}
private void delete() {
if (mMessage != null) {
// Disable the delete button after it's tapped (to try to prevent
// accidental clicks)
mMenu.findItem(R.id.delete).setEnabled(false);
Message messageToDelete = mMessage;
mFragmentListener.showNextMessageOrReturn();
mController.deleteMessages(Collections.singletonList(messageToDelete), null);
}
}
public void onRefile(String dstFolder) {
if (!mController.isMoveCapable(mAccount)) {
return;
}
if (!mController.isMoveCapable(mMessage)) {
Toast toast = Toast.makeText(getActivity(), R.string.move_copy_cannot_copy_unsynced_message, Toast.LENGTH_LONG);
toast.show();
return;
}
if (K9.FOLDER_NONE.equalsIgnoreCase(dstFolder)) {
return;
}
if (mAccount.getSpamFolderName().equals(dstFolder) && K9.confirmSpam()) {
mDstFolder = dstFolder;
showDialog(R.id.dialog_confirm_spam);
} else {
refileMessage(dstFolder);
}
}
private void refileMessage(String dstFolder) {
String srcFolder = mMessageReference.folderName;
Message messageToMove = mMessage;
mFragmentListener.showNextMessageOrReturn();
mController.moveMessage(mAccount, srcFolder, messageToMove, dstFolder, null);
}
public void onReply() {
if (mMessage != null) {
mFragmentListener.onReply(mMessage, mPgpData);
}
}
public void onReplyAll() {
if (mMessage != null) {
mFragmentListener.onReplyAll(mMessage, mPgpData);
}
}
public void onForward() {
if (mMessage != null) {
mFragmentListener.onForward(mMessage, mPgpData);
}
}
public void onFlag() {
if (mMessage != null) {
boolean newState = !mMessage.isSet(Flag.FLAGGED);
mController.setFlag(mAccount, mMessage.getFolder().getName(),
new Message[] { mMessage }, Flag.FLAGGED, newState);
mMessageView.setHeaders(mMessage, mAccount);
}
}
public void onMove() {
if ((!mController.isMoveCapable(mAccount))
|| (mMessage == null)) {
return;
}
if (!mController.isMoveCapable(mMessage)) {
Toast toast = Toast.makeText(getActivity(), R.string.move_copy_cannot_copy_unsynced_message, Toast.LENGTH_LONG);
toast.show();
return;
}
startRefileActivity(ACTIVITY_CHOOSE_FOLDER_MOVE);
}
public void onCopy() {
if ((!mController.isCopyCapable(mAccount))
|| (mMessage == null)) {
return;
}
if (!mController.isCopyCapable(mMessage)) {
Toast toast = Toast.makeText(getActivity(), R.string.move_copy_cannot_copy_unsynced_message, Toast.LENGTH_LONG);
toast.show();
return;
}
startRefileActivity(ACTIVITY_CHOOSE_FOLDER_COPY);
}
private void onToggleColors() {
if (K9.getK9MessageViewTheme() == K9.THEME_DARK) {
K9.setK9MessageViewTheme(K9.THEME_LIGHT);
} else {
K9.setK9MessageViewTheme(K9.THEME_DARK);
}
new AsyncTask<Object, Object, Object>() {
@Override
protected Object doInBackground(Object... params) {
Context appContext = getActivity().getApplicationContext();
Preferences prefs = Preferences.getPreferences(appContext);
Editor editor = prefs.getPreferences().edit();
K9.save(editor);
editor.commit();
return null;
}
}.execute();
mFragmentListener.restartActivity();
}
private void startRefileActivity(int activity) {
Intent intent = new Intent(getActivity(), ChooseFolder.class);
intent.putExtra(ChooseFolder.EXTRA_ACCOUNT, mAccount.getUuid());
intent.putExtra(ChooseFolder.EXTRA_CUR_FOLDER, mMessageReference.folderName);
intent.putExtra(ChooseFolder.EXTRA_SEL_FOLDER, mAccount.getLastSelectedFolderName());
intent.putExtra(ChooseFolder.EXTRA_MESSAGE, mMessageReference);
startActivityForResult(intent, activity);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (mAccount.getCryptoProvider().onDecryptActivityResult(this, requestCode, resultCode, data, mPgpData)) {
return;
}
if (resultCode != Activity.RESULT_OK) {
return;
}
switch (requestCode) {
case ACTIVITY_CHOOSE_DIRECTORY: {
if (resultCode == Activity.RESULT_OK && data != null) {
// obtain the filename
Uri fileUri = data.getData();
if (fileUri != null) {
String filePath = fileUri.getPath();
if (filePath != null) {
attachmentTmpStore.writeFile(new File(filePath));
}
}
}
break;
}
case ACTIVITY_CHOOSE_FOLDER_MOVE:
case ACTIVITY_CHOOSE_FOLDER_COPY: {
if (data == null) {
return;
}
String destFolderName = data.getStringExtra(ChooseFolder.EXTRA_NEW_FOLDER);
MessageReference ref = data.getParcelableExtra(ChooseFolder.EXTRA_MESSAGE);
if (mMessageReference.equals(ref)) {
mAccount.setLastSelectedFolderName(destFolderName);
switch (requestCode) {
case ACTIVITY_CHOOSE_FOLDER_MOVE: {
mFragmentListener.showNextMessageOrReturn();
moveMessage(ref, destFolderName);
break;
}
case ACTIVITY_CHOOSE_FOLDER_COPY: {
copyMessage(ref, destFolderName);
break;
}
}
}
break;
}
}
}
private void onSendAlternate() {
if (mMessage != null) {
mController.sendAlternate(getActivity(), mAccount, mMessage);
}
}
private void onToggleRead() {
if (mMessage != null) {
mController.setFlag(mAccount, mMessage.getFolder().getName(),
new Message[] { mMessage }, Flag.SEEN, !mMessage.isSet(Flag.SEEN));
mMessageView.setHeaders(mMessage, mAccount);
String subject = mMessage.getSubject();
displayMessageSubject(subject);
updateUnreadToggleTitle();
}
}
private void onDownloadRemainder() {
if (mMessage.isSet(Flag.X_DOWNLOADED_FULL)) {
return;
}
mMessageView.downloadRemainderButton().setEnabled(false);
mController.loadMessageForViewRemote(mAccount, mMessageReference.folderName, mMessageReference.uid, mListener);
}
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.download: {
((AttachmentView)view).saveFile();
break;
}
case R.id.download_remainder: {
onDownloadRemainder();
break;
}
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.delete:
onDelete();
break;
case R.id.reply:
onReply();
break;
case R.id.reply_all:
onReplyAll();
break;
case R.id.forward:
onForward();
break;
case R.id.share:
onSendAlternate();
break;
case R.id.toggle_unread:
onToggleRead();
break;
case R.id.archive:
onRefile(mAccount.getArchiveFolderName());
break;
case R.id.spam:
onRefile(mAccount.getSpamFolderName());
break;
case R.id.move:
onMove();
break;
case R.id.copy:
onCopy();
break;
case R.id.select_text:
mMessageView.beginSelectingText();
break;
case R.id.toggle_message_view_theme:
onToggleColors();
break;
default:
return super.onOptionsItemSelected(item);
}
return true;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.message_view_fragment, menu);
mMenu = menu;
configureMenu(menu);
}
private void configureMenu(Menu menu) {
// first run displayMessage() gets called before onCreateOptionMenu()
if (menu == null) {
return;
}
// enable them all
menu.findItem(R.id.copy).setVisible(true);
menu.findItem(R.id.move).setVisible(true);
menu.findItem(R.id.archive).setVisible(true);
menu.findItem(R.id.spam).setVisible(true);
mToggleMessageViewMenu = menu.findItem(R.id.toggle_message_view_theme);
if (K9.getK9MessageViewTheme() == K9.THEME_DARK) {
mToggleMessageViewMenu.setTitle(R.string.message_view_theme_action_light);
} else {
mToggleMessageViewMenu.setTitle(R.string.message_view_theme_action_dark);
}
toggleActionsState(menu, true);
updateUnreadToggleTitle();
// comply with the setting
if (!mAccount.getEnableMoveButtons()) {
menu.findItem(R.id.move).setVisible(false);
menu.findItem(R.id.archive).setVisible(false);
menu.findItem(R.id.spam).setVisible(false);
} else {
// check message, folder capability
if (!mController.isCopyCapable(mAccount)) {
menu.findItem(R.id.copy).setVisible(false);
}
if (mController.isMoveCapable(mAccount)) {
menu.findItem(R.id.move).setVisible(true);
menu.findItem(R.id.archive).setVisible(
!mMessageReference.folderName.equals(mAccount.getArchiveFolderName())
&& mAccount.hasArchiveFolder());
menu.findItem(R.id.spam).setVisible(
!mMessageReference.folderName.equals(mAccount.getSpamFolderName())
&& mAccount.hasSpamFolder());
} else {
menu.findItem(R.id.copy).setVisible(false);
menu.findItem(R.id.move).setVisible(false);
menu.findItem(R.id.archive).setVisible(false);
menu.findItem(R.id.spam).setVisible(false);
}
}
}
/**
* Set the title of the "Toggle Unread" menu item based upon the current read state of the message.
*/
public void updateUnreadToggleTitle() {
if (mMessage != null && mMenu != null) {
if (mMessage.isSet(Flag.SEEN)) {
mMenu.findItem(R.id.toggle_unread).setTitle(R.string.mark_as_unread_action);
} else {
mMenu.findItem(R.id.toggle_unread).setTitle(R.string.mark_as_read_action);
}
}
}
private void toggleActionsState(Menu menu, boolean state) {
for (int i = 0; i < menu.size(); ++i) {
menu.getItem(i).setEnabled(state);
}
}
private void setProgress(boolean enable) {
if (mFragmentListener != null) {
mFragmentListener.setProgress(enable);
}
}
private void displayMessageSubject(String subject) {
if (mFragmentListener != null) {
mFragmentListener.displayMessageSubject(subject);
}
}
public void moveMessage(MessageReference reference, String destFolderName) {
mController.moveMessage(mAccount, mMessageReference.folderName, mMessage,
destFolderName, null);
}
public void copyMessage(MessageReference reference, String destFolderName) {
mController.copyMessage(mAccount, mMessageReference.folderName, mMessage,
destFolderName, null);
}
class Listener extends MessagingListener {
@Override
public void loadMessageForViewHeadersAvailable(final Account account, String folder, String uid,
final Message message) {
if (!mMessageReference.uid.equals(uid) || !mMessageReference.folderName.equals(folder)
|| !mMessageReference.accountUuid.equals(account.getUuid())) {
return;
}
/*
* Clone the message object because the original could be modified by
* MessagingController later. This could lead to a ConcurrentModificationException
* when that same object is accessed by the UI thread (below).
*
* See issue 3953
*
* This is just an ugly hack to get rid of the most pressing problem. A proper way to
* fix this is to make Message thread-safe. Or, even better, rewriting the UI code to
* access messages via a ContentProvider.
*
*/
final Message clonedMessage = message.clone();
mHandler.post(new Runnable() {
public void run() {
if (!clonedMessage.isSet(Flag.X_DOWNLOADED_FULL) &&
!clonedMessage.isSet(Flag.X_DOWNLOADED_PARTIAL)) {
String text = getString(R.string.message_view_downloading);
mMessageView.showStatusMessage(text);
}
mMessageView.setHeaders(clonedMessage, account);
final String subject = clonedMessage.getSubject();
if (subject == null || subject.equals("")) {
displayMessageSubject(getString(R.string.general_no_subject));
} else {
displayMessageSubject(clonedMessage.getSubject());
}
mMessageView.setOnFlagListener(new OnClickListener() {
@Override
public void onClick(View v) {
onFlag();
}
});
}
});
}
@Override
public void loadMessageForViewBodyAvailable(final Account account, String folder,
String uid, final Message message) {
if (!mMessageReference.uid.equals(uid) ||
!mMessageReference.folderName.equals(folder) ||
!mMessageReference.accountUuid.equals(account.getUuid())) {
return;
}
mHandler.post(new Runnable() {
@Override
public void run() {
try {
mMessage = message;
mMessageView.setMessage(account, (LocalMessage) message, mPgpData,
mController, mListener);
updateUnreadToggleTitle();
} catch (MessagingException e) {
Log.v(K9.LOG_TAG, "loadMessageForViewBodyAvailable", e);
}
}
});
}
@Override
public void loadMessageForViewFailed(Account account, String folder, String uid, final Throwable t) {
if (!mMessageReference.uid.equals(uid) || !mMessageReference.folderName.equals(folder)
|| !mMessageReference.accountUuid.equals(account.getUuid())) {
return;
}
mHandler.post(new Runnable() {
public void run() {
setProgress(false);
if (t instanceof IllegalArgumentException) {
mHandler.invalidIdError();
} else {
mHandler.networkError();
}
if (mMessage == null || mMessage.isSet(Flag.X_DOWNLOADED_PARTIAL)) {
mMessageView.showStatusMessage(getString(R.string.webview_empty_message));
}
}
});
}
@Override
public void loadMessageForViewFinished(Account account, String folder, String uid, final Message message) {
if (!mMessageReference.uid.equals(uid) || !mMessageReference.folderName.equals(folder)
|| !mMessageReference.accountUuid.equals(account.getUuid())) {
return;
}
mHandler.post(new Runnable() {
public void run() {
setProgress(false);
mMessageView.setShowDownloadButton(message);
}
});
}
@Override
public void loadMessageForViewStarted(Account account, String folder, String uid) {
if (!mMessageReference.uid.equals(uid) || !mMessageReference.folderName.equals(folder)
|| !mMessageReference.accountUuid.equals(account.getUuid())) {
return;
}
mHandler.post(new Runnable() {
public void run() {
setProgress(true);
}
});
}
@Override
public void loadAttachmentStarted(Account account, Message message, Part part, Object tag, final boolean requiresDownload) {
if (mMessage != message) {
return;
}
mHandler.post(new Runnable() {
public void run() {
mMessageView.setAttachmentsEnabled(false);
showDialog(R.id.dialog_attachment_progress);
if (requiresDownload) {
mHandler.fetchingAttachment();
}
}
});
}
@Override
public void loadAttachmentFinished(Account account, Message message, Part part, final Object tag) {
if (mMessage != message) {
return;
}
mHandler.post(new Runnable() {
public void run() {
mMessageView.setAttachmentsEnabled(true);
removeDialog(R.id.dialog_attachment_progress);
Object[] params = (Object[]) tag;
boolean download = (Boolean) params[0];
AttachmentView attachment = (AttachmentView) params[1];
if (download) {
attachment.writeFile();
} else {
attachment.showFile();
}
}
});
}
@Override
public void loadAttachmentFailed(Account account, Message message, Part part, Object tag, String reason) {
if (mMessage != message) {
return;
}
mHandler.post(new Runnable() {
public void run() {
mMessageView.setAttachmentsEnabled(true);
removeDialog(R.id.dialog_attachment_progress);
mHandler.networkError();
}
});
}
}
// This REALLY should be in MessageCryptoView
@Override
public void onDecryptDone(PgpData pgpData) {
Account account = mAccount;
LocalMessage message = (LocalMessage) mMessage;
MessagingController controller = mController;
Listener listener = mListener;
try {
mMessageView.setMessage(account, message, pgpData, controller, listener);
} catch (MessagingException e) {
Log.e(K9.LOG_TAG, "displayMessageBody failed", e);
}
}
private void showDialog(int dialogId) {
DialogFragment fragment;
switch (dialogId) {
case R.id.dialog_confirm_delete: {
String title = getString(R.string.dialog_confirm_delete_title);
String message = getString(R.string.dialog_confirm_delete_message);
String confirmText = getString(R.string.dialog_confirm_delete_confirm_button);
String cancelText = getString(R.string.dialog_confirm_delete_cancel_button);
fragment = ConfirmationDialogFragment.newInstance(dialogId, title, message,
confirmText, cancelText);
break;
}
case R.id.dialog_confirm_spam: {
String title = getString(R.string.dialog_confirm_spam_title);
String message = getResources().getQuantityString(R.plurals.dialog_confirm_spam_message, 1);
String confirmText = getString(R.string.dialog_confirm_spam_confirm_button);
String cancelText = getString(R.string.dialog_confirm_spam_cancel_button);
fragment = ConfirmationDialogFragment.newInstance(dialogId, title, message,
confirmText, cancelText);
break;
}
case R.id.dialog_attachment_progress: {
String title = getString(R.string.dialog_attachment_progress_title);
fragment = ProgressDialogFragment.newInstance(title);
break;
}
default: {
throw new RuntimeException("Called showDialog(int) with unknown dialog id.");
}
}
fragment.setTargetFragment(this, dialogId);
fragment.show(getFragmentManager(), getDialogTag(dialogId));
}
private void removeDialog(int dialogId) {
FragmentManager fm = getFragmentManager();
// Make sure the "show dialog" transaction has been processed when we call
// findFragmentByTag() below. Otherwise the fragment won't be found and the dialog will
// never be dismissed.
fm.executePendingTransactions();
DialogFragment fragment = (DialogFragment) fm.findFragmentByTag(getDialogTag(dialogId));
if (fragment != null) {
fragment.dismiss();
}
}
private String getDialogTag(int dialogId) {
return String.format("dialog-%d", dialogId);
}
public void zoom(KeyEvent event) {
mMessageView.zoom(event);
}
@Override
public void doPositiveClick(int dialogId) {
switch (dialogId) {
case R.id.dialog_confirm_delete: {
delete();
break;
}
case R.id.dialog_confirm_spam: {
refileMessage(mDstFolder);
mDstFolder = null;
break;
}
}
}
@Override
public void doNegativeClick(int dialogId) {
/* do nothing */
}
@Override
public void dialogCancelled(int dialogId) {
/* do nothing */
}
public interface MessageViewFragmentListener {
public void onForward(Message mMessage, PgpData mPgpData);
public void onReplyAll(Message mMessage, PgpData mPgpData);
public void onReply(Message mMessage, PgpData mPgpData);
public void displayMessageSubject(String title);
public void setProgress(boolean b);
public void restartActivity();
public void showNextMessageOrReturn();
public void messageHeaderViewAvailable(MessageHeader messageHeaderView);
}
}
|
Fix NPE related to changed initialization order in Android 4.2
|
src/com/fsck/k9/fragment/MessageViewFragment.java
|
Fix NPE related to changed initialization order in Android 4.2
|
|
Java
|
apache-2.0
|
91118d28ff9704a0f6ab542fd0c5a007a9fd866a
| 0
|
mpakhomov/algorithms-data-structures
|
package com.mpakhomov.bst;
/**
* Based on Introduction to Algorithms, third edition
* By Thomas H. Cormen, Charles E. Leiserson, Ronald L. Rivest and Clifford Stein
*
* @author mpakhomov
* @since: 7/6/2015
*/
public class RedBlackTree<K extends Comparable<K>, V> {
private final static boolean BLACK = true;
private final static boolean RED = false;
private int size;
private Entry<K, V> root;
private static class Entry<K, V> {
private K key;
private V value;
private boolean color = BLACK;
private Entry<K, V> left;
private Entry<K, V> right;
private Entry<K, V> parent;
public Entry() {
}
public Entry(K key, V value) {
this.key = key;
this.value = value;
}
public Entry(K key, V value, Entry<K, V> parent) {
this(key, value);
this.parent = parent;
}
public Entry(K key, V value, boolean color) {
this(key, value);
this.color = color;
}
public Entry(K key, V value, Entry<K, V> parent, boolean color) {
this(key, value, parent);
this.color = color;
}
}
public int getSize() {
return size;
}
public Entry<K, V> getRoot() {
return root;
}
/**
* Insert an entry to Binary Search Tree (BST). It's the first step of {@link #put(Comparable, Object)}
* At this step we insert a node to RBT as it were an ordinary Binary Search Tree, we ignore coloring, balancing etc.
* All RBT invariants are to be fixed later on.
*
* @param entry an entry to ne inserted
*/
void insertIntoBst(Entry<K, V> entry) {
if (root == null) {
root = entry;
size++;
return;
}
Entry<K, V> curr = root;
while (true) {
if (curr.key.compareTo(entry.key) < 0) {
// search in the right subtree
if (curr.right != null) {
curr = curr.right;
} else {
// found
curr.right = entry;
entry.parent = curr;
size++;
break;
}
} else {
// search in the left subtree
if (curr.left != null) {
curr = curr.left;
} else {
// found
curr.left = entry;
entry.parent = curr;
size++;
break;
}
}
}
}
/**
* Insert an entry into Red Black Tree
*
* @param key key with which the specified value is to be associated
* @param value value to be associated with the specified key
* *
*/
public void put(K key, V value) {
Entry<K, V> entry = new Entry<K, V>(key, value, RED);
insertIntoBst(entry);
rbInsertFixUp(entry);
}
// utility methods to avoid NPE when p is null
private static <K,V> Entry<K,V> parentOf(Entry<K,V> p) {
return (p == null ? null: p.parent);
}
private static <K,V> Entry<K,V> leftOf(Entry<K,V> p) {
return (p == null) ? null: p.left;
}
private static <K,V> Entry<K,V> rightOf(Entry<K,V> p) {
return (p == null) ? null: p.right;
}
private static <K,V> boolean colorOf(Entry<K,V> p) {
return (p == null) ? BLACK: p.color;
}
private static <K,V> void setColor(Entry<K,V> p, boolean c) {
if (p != null) {
p.color = c;
}
}
/**
* Fix rbt properties after insertion of a new {@code entry}
*
* @param z red-black tree entry
*/
public void rbInsertFixUp(Entry<K, V> z) {
z.color = RED;
// TODO: is it needed to check that z!= null && z != root
while(/*z != null &&*/ z != root && z.parent.color == RED) {
if (parentOf(z) == leftOf(parentOf(parentOf(z)))) { // TODO: is NPE possible without leftOf/parentOf?
// z and its parent (p) is in the left subtree of its grandparent (g)
// y is an uncle of z (its a right child of its grandparent)
Entry<K, V> y = rightOf(parentOf(parentOf(z)));
if (colorOf(y) == RED) {
// Case 1. recoloring
setColor(parentOf(z), BLACK);
setColor(y, BLACK);
setColor(parentOf(parentOf(z)), RED);
// move z two levels up in the tree
z = parentOf(parentOf(z));
} else {
if (z == rightOf(parentOf(z))) {
// Case 2: z is a right child of its parent
z = parentOf(z);
rotateLeft(z);
}
// Case 3: z is a left child of its parent
setColor(parentOf(z), BLACK);
setColor(parentOf(parentOf(z)), RED);
rotateRight(parentOf(parentOf(z)));
}
} else {
// z and its parent (p) is in the right subtree of its grandparent (g)
// y is an uncle of z (it's a left child of its grandparent)
Entry<K, V> y = leftOf(parentOf(parentOf(z)));
if (colorOf(y) == RED) {
// Case 1. recoloring
setColor(parentOf(z), BLACK);
setColor(y, BLACK);
setColor(parentOf(parentOf(z)), RED);
// move z two levels up in the tree
z = parentOf(parentOf(z));
} else {
if (z == leftOf(parentOf(z))) {
// Case 2: z is a left child of its parent
z = parentOf(z);
rotateRight(z);
}
// Case 3: z is a left child of its parent
setColor(parentOf(z), BLACK);
setColor(parentOf(parentOf(z)), RED);
rotateLeft(parentOf(parentOf(z)));
}
} // if (parentOf(z) == leftOf(parentOf(parentOf(z)))) {
} // while
root.color = BLACK;
}
/**
* Left rotation is used to swap a parent node x with its <em>right</em> child y, so that
* y becomes a new parent of x and x becomes y's right child. It's an operation symmetric to
* {@link #rotateRight(Entry)}
* Left rotation preserves BST properties, i.e modified tree is still a Binary Search Tree
*
* x y
* / \ / \
* a y => x g
* / \ / \
* b g a b
*
* Keys comparison: a < x < y < b < g
*
* @param x a tree node (parent) to run the rotation on
*/
void rotateLeft(Entry<K, V> x) {
Entry<K, V> y = x.right; // set y, we want to exchange x and its right child y
x.right = y.left; // turn y's left subtree into x's right subtree
if (y.left != null) {
y.left.parent = x;
}
y.parent = x.parent; // link x's parent to y
if (x.parent == null) {
this.root = y;
} else if (x == x.parent.left) {
x.parent.left = y;
} else if (x == x.parent.right) {
x.parent.right = y;
}
y.left = x; // put x on y's left
x.parent = y;
}
/**
* Right rotation is used to swap a parent node x with its <em>left</em> child y, so that
* y becomes a new parent of x and x becomes y's right child. It's an operation symmmetric to
* {@link #rotateLeft(Entry)}
* Right rotation preserves BST properties, i.e modified tree is still a Binary Search Tree
*
* x y
* / \ / \
* y g => a x
* / \ / \
* a b b g
*
* Keys comparison: a < y < b < x < g
*
* @param x a tree node (parent) to run the rotation on
*/
void rotateRight(Entry<K, V> x) {
Entry<K, V> y = x.left; // set y
x.left = y.right; // turn y's right subtree into x's left subtree
if (y.right != null) {
y.right.parent = x;
}
y.parent = x.parent; // link x's parent to y
if (x.parent == null) {
this.root = y;
} else if (x == x.parent.left) {
x.parent.left = y;
} else if (x == x.parent.right) {
x.parent.right = y;
}
y.right = x; // put x on y's right
x.parent = y;
}
public static void main(String[] args) {
// rbt 1, 2, 3, 4, 5, 6, 7
RedBlackTree.Entry<Integer, Integer> root = new RedBlackTree.Entry<>(4, 4, BLACK);
RedBlackTree.Entry<Integer, Integer> two = new RedBlackTree.Entry<>(2, 2, BLACK);
RedBlackTree.Entry<Integer, Integer> six = new RedBlackTree.Entry<>(6, 6, BLACK);
RedBlackTree.Entry<Integer, Integer> one = new RedBlackTree.Entry<>(1, 1, RED);
RedBlackTree.Entry<Integer, Integer> three = new RedBlackTree.Entry<>(3, 3, RED);
RedBlackTree.Entry<Integer, Integer> five = new RedBlackTree.Entry<>(5, 5, RED);
RedBlackTree.Entry<Integer, Integer> seven = new RedBlackTree.Entry<>(7, 7, RED);
RedBlackTree<Integer, Integer> tree = new RedBlackTree<>();
tree.insertIntoBst(root);
tree.insertIntoBst(two);
tree.insertIntoBst(six);
tree.insertIntoBst(one);
tree.insertIntoBst(three);
tree.insertIntoBst(five);
tree.insertIntoBst(seven);
printInOrder(tree.getRoot());
tree.rotateLeft(six);
printInOrder(tree.getRoot());
tree.rotateRight(seven);
printInOrder(tree.getRoot());
testTreeFromTheBook();
testInsertion();
}
static void testTreeFromTheBook() {
RedBlackTree.Entry<Integer, Integer> root = new RedBlackTree.Entry<>(11, 11, BLACK);
RedBlackTree.Entry<Integer, Integer> n14 = new RedBlackTree.Entry<>(14, 14, BLACK);
RedBlackTree.Entry<Integer, Integer> n2 = new RedBlackTree.Entry<>(2, 2, RED);
RedBlackTree.Entry<Integer, Integer> n1 = new RedBlackTree.Entry<>(1, 1, BLACK);
RedBlackTree.Entry<Integer, Integer> n7 = new RedBlackTree.Entry<>(7, 7, BLACK);
RedBlackTree.Entry<Integer, Integer> n5 = new RedBlackTree.Entry<>(5, 5, RED);
RedBlackTree.Entry<Integer, Integer> n8 = new RedBlackTree.Entry<>(8, 8, RED);
RedBlackTree.Entry<Integer, Integer> n15 = new RedBlackTree.Entry<>(15, 15, RED);
RedBlackTree tree = new RedBlackTree();
tree.insertIntoBst(root);
tree.insertIntoBst(n14);
tree.insertIntoBst(n2);
tree.insertIntoBst(n1);
tree.insertIntoBst(n7);
tree.insertIntoBst(n5);
tree.insertIntoBst(n8);
tree.insertIntoBst(n15);
System.out.println("");
printInOrder(root);
}
static void testInsertion() {
RedBlackTree tree = new RedBlackTree();
tree.put(11, 11);
tree.put(14, 14);
tree.put(2, 2);
tree.put(1, 1);
tree.put(7, 7);
tree.put(5, 5);
tree.put(8, 8);
tree.put(15, 15);
tree.put(4, 4);
System.out.println("");
printInOrder(tree.getRoot());
}
static private void printInOrder(RedBlackTree.Entry node) {
if (node == null) {
return;
}
if (node == null ){
return;
}
printInOrder(node.left);
System.out.print(node.value+", ");
printInOrder(node.right);
}
}
|
src/main/java/com/mpakhomov/bst/RedBlackTree.java
|
package com.mpakhomov.bst;
/**
* Based on Introduction to Algorithms, third edition
* By Thomas H. Cormen, Charles E. Leiserson, Ronald L. Rivest and Clifford Stein
*
* @author mpakhomov
* @since: 7/6/2015
*/
public class RedBlackTree<K extends Comparable<K>, V> {
private final static boolean BLACK = true;
private final static boolean RED = false;
private int size;
private Entry<K, V> root;
private static class Entry<K, V> {
private K key;
private V value;
private boolean color = BLACK;
private Entry<K, V> left;
private Entry<K, V> right;
private Entry<K, V> parent;
public Entry() {
}
public Entry(K key, V value) {
this.key = key;
this.value = value;
}
public Entry(K key, V value, Entry<K, V> parent) {
this(key, value);
this.parent = parent;
}
public Entry(K key, V value, boolean color) {
this(key, value);
this.color = color;
}
public Entry(K key, V value, Entry<K, V> parent, boolean color) {
this(key, value, parent);
this.color = color;
}
}
public int getSize() {
return size;
}
public Entry<K, V> getRoot() {
return root;
}
/**
* Insert an entry to Binary Search Tree (BST). It's the first step of {@link #put(Comparable, Object)}
* At this step we insert a node to RBT as it were an ordinary Binary Search Tree, we ignore coloring, balancing etc.
* All RBT invariants are to be fixed later on.
*
* @param entry an entry to ne inserted
*/
void insertIntoBst(Entry<K, V> entry) {
if (root == null) {
root = entry;
size++;
return;
}
Entry<K, V> curr = root;
while (true) {
if (curr.key.compareTo(entry.key) < 0) {
// search in the right subtree
if (curr.right != null) {
curr = curr.right;
} else {
// found
curr.right = entry;
entry.parent = curr;
size++;
break;
}
} else {
// search in the left subtree
if (curr.left != null) {
curr = curr.left;
} else {
// found
curr.left = entry;
entry.parent = curr;
size++;
break;
}
}
}
}
/**
* Insert an entry into Red Black Tree
*
* @param key key with which the specified value is to be associated
* @param value value to be associated with the specified key
* *
*/
public void put(K key, V value) {
Entry<K, V> entry = new Entry<K, V>(key, value, RED);
insertIntoBst(entry);
rbInsertFixUp(entry);
}
private static <K,V> Entry<K,V> parentOf(Entry<K,V> p) {
return (p == null ? null: p.parent);
}
private static <K,V> Entry<K,V> leftOf(Entry<K,V> p) {
return (p == null) ? null: p.left;
}
private static <K,V> Entry<K,V> rightOf(Entry<K,V> p) {
return (p == null) ? null: p.right;
}
/**
* Fix rbt properties after insertion of a new {@code entry}
*
* @param entry red-black tree entry
*/
public void rbInsertFixUp(Entry<K, V> entry) {
}
/**
* Left rotation is used to swap a parent node x with its <em>right</em> child y, so that
* y becomes a new parent of x and x becomes y's right child. It's an operation symmetric to
* {@link #rotateRight(Entry)}
* Left rotation preserves BST properties, i.e modified tree is still a Binary Search Tree
*
* x y
* / \ / \
* a y => x g
* / \ / \
* b g a b
*
* Keys comparison: a < x < y < b < g
*
* @param x a tree node (parent) to run the rotation on
*/
void rotateLeft(Entry<K, V> x) {
Entry<K, V> y = x.right; // set y, we want to exchange x and its right child y
x.right = y.left; // turn y's left subtree into x's right subtree
if (y.left != null) {
y.left.parent = x;
}
y.parent = x.parent; // link x's parent to y
if (x.parent == null) {
this.root = y;
} else if (x == x.parent.left) {
x.parent.left = y;
} else if (x == x.parent.right) {
x.parent.right = y;
}
y.left = x; // put x on y's left
x.parent = y;
}
/**
* Right rotation is used to swap a parent node x with its <em>left</em> child y, so that
* y becomes a new parent of x and x becomes y's right child. It's an operation symmmetric to
* {@link #rotateLeft(Entry)}
* Right rotation preserves BST properties, i.e modified tree is still a Binary Search Tree
*
* x y
* / \ / \
* y g => a x
* / \ / \
* a b b g
*
* Keys comparison: a < y < b < x < g
*
* @param x a tree node (parent) to run the rotation on
*/
void rotateRight(Entry<K, V> x) {
Entry<K, V> y = x.left; // set y
x.left = y.right; // turn y's right subtree into x's left subtree
if (y.right != null) {
y.right.parent = x;
}
y.parent = x.parent; // link x's parent to y
if (x.parent == null) {
this.root = y;
} else if (x == x.parent.left) {
x.parent.left = y;
} else if (x == x.parent.right) {
x.parent.right = y;
}
y.right = x; // put x on y's right
x.parent = y;
}
public static void main(String[] args) {
// rbt 1, 2, 3, 4, 5, 6, 7
RedBlackTree.Entry<Integer, Integer> root = new RedBlackTree.Entry<>(4, 4, BLACK);
RedBlackTree.Entry<Integer, Integer> two = new RedBlackTree.Entry<>(2, 2, BLACK);
RedBlackTree.Entry<Integer, Integer> six = new RedBlackTree.Entry<>(6, 6, BLACK);
RedBlackTree.Entry<Integer, Integer> one = new RedBlackTree.Entry<>(1, 1, RED);
RedBlackTree.Entry<Integer, Integer> three = new RedBlackTree.Entry<>(3, 3, RED);
RedBlackTree.Entry<Integer, Integer> five = new RedBlackTree.Entry<>(5, 5, RED);
RedBlackTree.Entry<Integer, Integer> seven = new RedBlackTree.Entry<>(7, 7, RED);
RedBlackTree<Integer, Integer> tree = new RedBlackTree<>();
tree.insertIntoBst(root);
tree.insertIntoBst(two);
tree.insertIntoBst(six);
tree.insertIntoBst(one);
tree.insertIntoBst(three);
tree.insertIntoBst(five);
tree.insertIntoBst(seven);
printInOrder(tree.getRoot());
tree.rotateLeft(six);
printInOrder(tree.getRoot());
tree.rotateRight(seven);
printInOrder(tree.getRoot());
testTreeFromTheBook();
}
static void testTreeFromTheBook() {
RedBlackTree.Entry<Integer, Integer> root = new RedBlackTree.Entry<>(11, 11, BLACK);
RedBlackTree.Entry<Integer, Integer> n14 = new RedBlackTree.Entry<>(14, 14, BLACK);
RedBlackTree.Entry<Integer, Integer> n2 = new RedBlackTree.Entry<>(2, 2, RED);
RedBlackTree.Entry<Integer, Integer> n1 = new RedBlackTree.Entry<>(1, 1, BLACK);
RedBlackTree.Entry<Integer, Integer> n7 = new RedBlackTree.Entry<>(7, 7, BLACK);
RedBlackTree.Entry<Integer, Integer> n5 = new RedBlackTree.Entry<>(5, 5, RED);
RedBlackTree.Entry<Integer, Integer> n8 = new RedBlackTree.Entry<>(8, 8, RED);
RedBlackTree.Entry<Integer, Integer> n15 = new RedBlackTree.Entry<>(15, 15, RED);
RedBlackTree tree = new RedBlackTree();
tree.insertIntoBst(root);
tree.insertIntoBst(n14);
tree.insertIntoBst(n2);
tree.insertIntoBst(n1);
tree.insertIntoBst(n7);
tree.insertIntoBst(n5);
tree.insertIntoBst(n8);
tree.insertIntoBst(n15);
System.out.println("");
printInOrder(root);
}
static private void printInOrder(RedBlackTree.Entry node) {
if (node == null) {
return;
}
if (node == null ){
return;
}
printInOrder(node.left);
System.out.print(node.value+", ");
printInOrder(node.right);
}
}
|
RBT: implemented insertion
|
src/main/java/com/mpakhomov/bst/RedBlackTree.java
|
RBT: implemented insertion
|
|
Java
|
apache-2.0
|
9c4b7dc486c7bb2f7d9ea62502abb24ca1516605
| 0
|
cuba-platform/cuba,dimone-kun/cuba,dimone-kun/cuba,dimone-kun/cuba,cuba-platform/cuba,cuba-platform/cuba
|
/*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.haulmont.cuba.gui.components.filter;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.haulmont.bali.datastruct.Node;
import com.haulmont.bali.util.Dom4j;
import com.haulmont.bali.util.ParamsMap;
import com.haulmont.chile.core.datatypes.Datatypes;
import com.haulmont.chile.core.model.utils.InstanceUtils;
import com.haulmont.cuba.client.ClientConfig;
import com.haulmont.cuba.client.sys.PersistenceManagerClient;
import com.haulmont.cuba.core.app.DataService;
import com.haulmont.cuba.core.entity.AbstractSearchFolder;
import com.haulmont.cuba.core.entity.AppFolder;
import com.haulmont.cuba.core.entity.Entity;
import com.haulmont.cuba.core.global.*;
import com.haulmont.cuba.core.global.filter.DenyingClause;
import com.haulmont.cuba.core.global.filter.QueryFilter;
import com.haulmont.cuba.gui.ComponentsHelper;
import com.haulmont.cuba.gui.WindowManager;
import com.haulmont.cuba.gui.WindowManagerProvider;
import com.haulmont.cuba.gui.WindowParams;
import com.haulmont.cuba.gui.components.*;
import com.haulmont.cuba.gui.components.Action.Status;
import com.haulmont.cuba.gui.components.DialogAction.Type;
import com.haulmont.cuba.gui.components.KeyCombination.Key;
import com.haulmont.cuba.gui.components.actions.BaseAction;
import com.haulmont.cuba.gui.components.actions.ItemTrackingAction;
import com.haulmont.cuba.gui.components.filter.condition.AbstractCondition;
import com.haulmont.cuba.gui.components.filter.condition.CustomCondition;
import com.haulmont.cuba.gui.components.filter.edit.FilterEditor;
import com.haulmont.cuba.gui.components.filter.filterselect.FilterSelectWindow;
import com.haulmont.cuba.gui.config.WindowConfig;
import com.haulmont.cuba.gui.config.WindowInfo;
import com.haulmont.cuba.gui.data.CollectionDatasource;
import com.haulmont.cuba.gui.data.HierarchicalDatasource;
import com.haulmont.cuba.gui.presentations.Presentations;
import com.haulmont.cuba.gui.settings.SettingsImpl;
import com.haulmont.cuba.gui.theme.ThemeConstants;
import com.haulmont.cuba.gui.theme.ThemeConstantsManager;
import com.haulmont.cuba.gui.xml.layout.ComponentsFactory;
import com.haulmont.cuba.security.entity.FilterEntity;
import com.haulmont.cuba.security.entity.SearchFolder;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.BooleanUtils;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.dom4j.Attribute;
import org.dom4j.Element;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Scope;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*/
@org.springframework.stereotype.Component(FilterDelegate.NAME)
@Scope("prototype")
public class FilterDelegateImpl implements FilterDelegate {
protected static final String GLOBAL_FILTER_PERMISSION = "cuba.gui.filter.global";
protected static final String GLOBAL_APP_FOLDERS_PERMISSION = "cuba.gui.appFolder.global";
protected static final String FILTER_EDIT_PERMISSION = "cuba.gui.filter.edit";
protected static final String CONDITIONS_LOCATION_TOP = "top";
protected static final Logger log = LoggerFactory.getLogger(FilterDelegateImpl.class);
protected static final String MODIFIED_INDICATOR_SYMBOL = " *";
@Inject
protected ComponentsFactory componentsFactory;
@Inject
protected ThemeConstantsManager themeConstantsManager;
@Inject
protected Messages messages;
@Inject
protected WindowManagerProvider windowManagerProvider;
@Inject
protected Metadata metadata;
@Inject
protected WindowConfig windowConfig;
@Inject
protected UserSessionSource userSessionSource;
@Inject
protected Configuration configuration;
@Inject
protected Security security;
@Inject
protected FilterHelper filterHelper;
@Inject
protected FilterParser filterParser;
protected FtsFilterHelper ftsFilterHelper;
protected DataService dataService;
protected PersistenceManagerClient persistenceManager;
protected ClientConfig clientConfig;
protected GlobalConfig globalConfig;
protected FtsConfig ftsConfig;
protected AddConditionHelper addConditionHelper;
protected ThemeConstants theme;
protected WindowManager windowManager;
protected Filter filter;
protected FilterEntity adHocFilter;
protected ConditionsTree conditions = new ConditionsTree();
protected ConditionsTree prevConditions = new ConditionsTree();
protected List<AbstractCondition> initialConditions = new ArrayList<>();
protected FilterEntity filterEntity;
protected FilterEntity initialFilterEntity;
protected CollectionDatasource datasource;
protected QueryFilter dsQueryFilter;
protected List<FilterEntity> filterEntities = new ArrayList<>();
protected AppliedFilter lastAppliedFilter;
protected LinkedList<AppliedFilterHolder> appliedFilters = new LinkedList<>();
protected List<Filter.FilterEntityChangeListener> filterEntityChangeListeners = new ArrayList<>();
protected GroupBoxLayout groupBoxLayout;
protected BoxLayout layout;
protected PopupButton filtersPopupButton;
protected Component.Container conditionsLayout;
protected BoxLayout maxResultsLayout;
protected Field maxResultsField;
protected TextField maxResultsTextField;
protected LookupField maxResultsLookupField;
protected BoxLayout controlsLayout;
protected Component.Container appliedFiltersLayout;
protected PopupButton settingsBtn;
protected Component applyTo;
protected SaveAction saveAction;
protected TextField ftsSearchCriteriaField;
protected CheckBox ftsSwitch;
protected LinkButton addConditionBtn;
protected HBoxLayout filtersPopupBox;
protected Button searchBtn;
protected Component controlsLayoutGap;
protected Object paramEditComponentToFocus;
protected String caption;
protected int maxResults = -1;
protected boolean useMaxResults;
protected boolean textMaxResults;
protected Boolean manualApplyRequired;
protected boolean folderActionsEnabled = true;
protected boolean filtersLookupListenerEnabled = true;
protected boolean filtersPopupDisplayed = false;
protected boolean filtersLookupDisplayed = false;
protected boolean maxResultsAddedToLayout = false;
protected boolean editable = true;
protected FilterMode filterMode;
protected boolean filterSavingPossible = true;
protected Integer columnsCount;
protected String initialWindowCaption;
protected String conditionsLocation;
protected boolean filterActionsCreated = false;
protected boolean delayedFocus;
protected boolean modeSwitchVisible = true;
protected SaveAsAction saveAsAction;
protected EditAction editAction;
protected MakeDefaultAction makeDefaultAction;
protected RemoveAction removeAction;
protected PinAppliedAction pinAppliedAction;
protected SaveAsFolderAction saveAsAppFolderAction;
protected SaveAsFolderAction saveAsSearchFolderAction;
protected LookupField filtersLookup;
protected List<FDExpandedStateChangeListener> expandedStateChangeListeners;
protected enum ConditionsFocusType {
NONE,
FIRST,
LAST
}
@PostConstruct
public void init() {
theme = themeConstantsManager.getConstants();
windowManager = windowManagerProvider.get();
dataService = AppBeans.get(DataService.class);
persistenceManager = AppBeans.get(PersistenceManagerClient.class);
globalConfig = configuration.getConfig(GlobalConfig.class);
clientConfig = configuration.getConfig(ClientConfig.class);
ftsConfig = configuration.getConfig(FtsConfig.class);
if (AppBeans.containsBean(FtsFilterHelper.NAME)) {
ftsFilterHelper = AppBeans.get(FtsFilterHelper.class);
}
filterMode = FilterMode.GENERIC_MODE;
conditionsLocation = clientConfig.getGenericFilterConditionsLocation();
createLayout();
}
protected void createLayout() {
if (layout == null) {
groupBoxLayout = componentsFactory.createComponent(GroupBoxLayout.class);
groupBoxLayout.addExpandedStateChangeListener(e -> fireExpandStateChange());
groupBoxLayout.setOrientation(GroupBoxLayout.Orientation.VERTICAL);
groupBoxLayout.setStyleName("cuba-generic-filter");
groupBoxLayout.setWidth("100%");
layout = componentsFactory.createComponent(VBoxLayout.class);
layout.setWidth("100%");
groupBoxLayout.add(layout);
if (caption == null)
setCaption(getMainMessage("filter.groupBoxCaption"));
} else {
Collection<Component> components = layout.getComponents();
for (Component component : components) {
layout.remove(component);
}
}
layout.setSpacing(false);
appliedFiltersLayout = componentsFactory.createComponent(VBoxLayout.class);
conditionsLayout = componentsFactory.createComponent(HBoxLayout.class);
conditionsLayout.setWidth("100%");
conditionsLayout.setStyleName("filter-conditions");
if (filterMode == FilterMode.GENERIC_MODE) {
createControlsLayoutForGeneric();
} else {
createControlsLayoutForFts();
}
if (CONDITIONS_LOCATION_TOP.equals(conditionsLocation)) {
layout.add(conditionsLayout);
layout.add(controlsLayout);
} else {
layout.add(controlsLayout);
layout.add(conditionsLayout);
}
}
protected void createControlsLayoutForGeneric() {
controlsLayout = componentsFactory.createComponent(HBoxLayout.class);
controlsLayout.setSpacing(true);
controlsLayout.setWidth("100%");
filterHelper.setInternalDebugId(controlsLayout, "controlsLayout");
filtersPopupBox = componentsFactory.createComponent(HBoxLayout.class);
filtersPopupBox.setStyleName("filter-search-button-layout");
filterHelper.setInternalDebugId(filtersPopupBox, "filtersPopupBox");
searchBtn = componentsFactory.createComponent(Button.class);
filtersPopupBox.add(searchBtn);
searchBtn.setStyleName("filter-search-button");
searchBtn.setCaption(getMainMessage("filter.search"));
searchBtn.setIcon("icons/search.png");
searchBtn.setAction(new AbstractAction("search") {
@Override
public void actionPerform(Component component) {
apply(false);
}
});
filterHelper.setInternalDebugId(searchBtn, "searchBtn");
filtersPopupButton = componentsFactory.createComponent(PopupButton.class);
filterHelper.setInternalDebugId(filtersPopupButton, "filtersPopupButton");
filtersPopupBox.add(filtersPopupButton);
filtersLookup = componentsFactory.createComponent(LookupField.class);
filtersLookup.setWidth(theme.get("cuba.gui.filter.select.width"));
filtersLookup.addValueChangeListener(new FiltersLookupChangeListener());
filterHelper.setLookupNullSelectionAllowed(filtersLookup, false);
filterHelper.setInternalDebugId(filtersLookup, "filtersLookup");
addConditionBtn = componentsFactory.createComponent(LinkButton.class);
addConditionBtn.setAlignment(Component.Alignment.MIDDLE_LEFT);
addConditionBtn.setCaption(getMainMessage("filter.addCondition"));
addConditionBtn.setAction(new AbstractAction("openAddConditionDlg") {
@Override
public void actionPerform(Component component) {
addConditionHelper.addCondition(conditions);
}
});
filterHelper.setInternalDebugId(addConditionBtn, "addConditionBtn");
controlsLayoutGap = componentsFactory.createComponent(Label.class);
filterHelper.setInternalDebugId(controlsLayoutGap, "controlsLayoutGap");
controlsLayout.add(controlsLayoutGap);
controlsLayout.expand(controlsLayoutGap);
settingsBtn = componentsFactory.createComponent(PopupButton.class);
settingsBtn.setIcon("icons/gear.png");
filterHelper.setInternalDebugId(settingsBtn, "settingsBtn");
createFilterActions();
createMaxResultsLayout();
createFtsSwitch();
ftsSwitch.setAlignment(Component.Alignment.MIDDLE_RIGHT);
filterHelper.setInternalDebugId(ftsSwitch, "ftsSwitch");
String layoutDescription = clientConfig.getGenericFilterControlsLayout();
ControlsLayoutBuilder controlsLayoutBuilder = createControlsLayoutBuilder(layoutDescription);
controlsLayoutBuilder.build();
maxResultsLayout.setVisible(isMaxResultsLayoutVisible());
filterHelper.setInternalDebugId(maxResultsLayout, "maxResultsLayout");
}
protected void createControlsLayoutForFts() {
controlsLayout = componentsFactory.createComponent(HBoxLayout.class);
controlsLayout.setSpacing(true);
controlsLayout.setWidth("100%");
ftsSearchCriteriaField = componentsFactory.createComponent(TextField.class);
ftsSearchCriteriaField.setAlignment(Component.Alignment.MIDDLE_LEFT);
ftsSearchCriteriaField.setWidth(theme.get("cuba.gui.filter.ftsSearchCriteriaField.width"));
ftsSearchCriteriaField.setInputPrompt(getMainMessage("filter.enterSearchPhrase"));
paramEditComponentToFocus = ftsSearchCriteriaField;
filterHelper.addShortcutListener(ftsSearchCriteriaField, createFtsSearchShortcutListener());
controlsLayout.add(ftsSearchCriteriaField);
searchBtn = componentsFactory.createComponent(Button.class);
searchBtn.setCaption(getMainMessage("filter.search"));
searchBtn.setIcon("icons/search.png");
searchBtn.setAction(new AbstractAction("search") {
@Override
public void actionPerform(Component component) {
applyFts();
}
});
controlsLayout.add(searchBtn);
controlsLayoutGap = componentsFactory.createComponent(Label.class);
controlsLayout.add(controlsLayoutGap);
controlsLayout.expand(controlsLayoutGap);
createMaxResultsLayout();
if (isMaxResultsLayoutVisible()) {
controlsLayout.add(maxResultsLayout);
initMaxResults();
}
createFtsSwitch();
ftsSwitch.setAlignment(Component.Alignment.MIDDLE_RIGHT);
controlsLayout.add(ftsSwitch);
}
protected FilterHelper.ShortcutListener createFtsSearchShortcutListener() {
return new FilterHelper.ShortcutListener("ftsSearch", new KeyCombination(Key.ENTER)) {
@Override
public void handleShortcutPressed() {
applyFts();
}
};
}
protected void createFtsSwitch() {
ftsSwitch = componentsFactory.createComponent(CheckBox.class);
ftsSwitch.setCaption(getMainMessage("filter.ftsSwitch"));
ftsSwitch.setValue(filterMode == FilterMode.FTS_MODE);
ftsSwitch.addValueChangeListener(e -> {
filterMode = Boolean.TRUE.equals(e.getValue()) ? FilterMode.FTS_MODE : FilterMode.GENERIC_MODE;
switchFilterMode(filterMode);
});
ftsSwitch.setVisible(modeSwitchVisible);
}
@Override
public void switchFilterMode(FilterMode filterMode) {
if (filterMode == FilterMode.FTS_MODE && !isFtsModeEnabled()) {
log.warn("Unable to switch to the FTS filter mode. FTS mode is not supported for the " + datasource.getMetaClass().getName() + " entity");
return;
}
this.filterMode = filterMode;
if (filterMode == FilterMode.FTS_MODE) {
prevConditions = conditions;
((CollectionDatasource.SupportsApplyToSelected) datasource).unpinAllQuery();
appliedFilters.clear();
lastAppliedFilter = null;
}
conditions = (filterMode == FilterMode.GENERIC_MODE) ? prevConditions : new ConditionsTree();
createLayout();
// initMaxResults();
if (filterMode == FilterMode.GENERIC_MODE) {
fillConditionsLayout(ConditionsFocusType.FIRST);
addConditionBtn.setVisible(editable && userCanEditFilers());
setFilterActionsEnabled();
initFilterSelectComponents();
}
if (paramEditComponentToFocus != null)
requestFocusToParamEditComponent();
else if (filtersPopupDisplayed)
filtersPopupButton.requestFocus();
else if (filtersLookupDisplayed) {
filtersLookup.requestFocus();
}
updateWindowCaption();
}
protected void createMaxResultsLayout() {
maxResultsLayout = componentsFactory.createComponent(HBoxLayout.class);
maxResultsLayout.setSpacing(true);
maxResultsLayout.setAlignment(Component.Alignment.MIDDLE_RIGHT);
Label maxResultsLabel1 = componentsFactory.createComponent(Label.class);
maxResultsLabel1.setValue(messages.getMainMessage("filter.maxResults.label1"));
maxResultsLabel1.setAlignment(Component.Alignment.MIDDLE_RIGHT);
maxResultsLayout.add(maxResultsLabel1);
maxResultsTextField = componentsFactory.createComponent(TextField.class);
maxResultsTextField.setAlignment(Component.Alignment.MIDDLE_RIGHT);
maxResultsTextField.setMaxLength(4);
maxResultsTextField.setWidth(theme.get("cuba.gui.Filter.maxResults.width"));
maxResultsTextField.setDatatype(Datatypes.get("int"));
maxResultsLookupField = componentsFactory.createComponent(LookupField.class);
maxResultsLookupField.setAlignment(Component.Alignment.MIDDLE_RIGHT);
maxResultsLookupField.setWidth(theme.get("cuba.gui.Filter.maxResults.lookup.width"));
filterHelper.setLookupTextInputAllowed(maxResultsLookupField, false);
filterHelper.setLookupNullSelectionAllowed(maxResultsLookupField, false);
List<Integer> maxResultOptions = new ArrayList<>();
String maxResultOptionsStr = clientConfig.getGenericFilterMaxResultsOptions();
Iterable<String> split = Splitter.on(",").trimResults().split(maxResultOptionsStr);
for (String option : split) {
if ("NULL".equals(option)) {
filterHelper.setLookupNullSelectionAllowed(maxResultsLookupField, true);
}
else {
try {
Integer value = Integer.valueOf(option);
maxResultOptions.add(value);
} catch (NumberFormatException ignored) {}
}
}
maxResultsLookupField.setOptionsList(maxResultOptions);
maxResultsField = textMaxResults ? maxResultsTextField : maxResultsLookupField;
maxResultsLayout.add(maxResultsField);
}
/**
* Loads filter entities, finds default filter and applies it if found
*/
@Override
public void loadFiltersAndApplyDefault() {
initShortcutActions();
initAdHocFilter();
loadFilterEntities();
FilterEntity defaultFilter = getDefaultFilter(filterEntities);
initFilterSelectComponents();
if (defaultFilter == null) {
defaultFilter = adHocFilter;
}
try {
setFilterEntity(defaultFilter);
} catch (Exception e) {
log.error("Exception on loading default filter '" + defaultFilter.getName() + "'", e);
windowManager.showNotification(messages.formatMainMessage("filter.errorLoadingDefaultFilter", defaultFilter.getName()), Frame.NotificationType.ERROR);
defaultFilter = adHocFilter;
setFilterEntity(adHocFilter);
}
if (defaultFilter != adHocFilter) {
Window window = ComponentsHelper.getWindow(filter);
if (!WindowParams.DISABLE_AUTO_REFRESH.getBool(window.getContext())) {
if (getResultingManualApplyRequired()) {
if (BooleanUtils.isTrue(defaultFilter.getApplyDefault())) {
apply(true);
}
} else
apply(true);
if (filterEntity != null) {
window.setDescription(getFilterCaption(filterEntity));
} else
window.setDescription(null);
}
}
}
/**
* Sets filter entity, creates condition editor components and applies filter if necessary
*/
@Override
public void setFilterEntity(FilterEntity filterEntity) {
this.filterEntity = filterEntity;
conditions = filterParser.getConditions(filter, filterEntity.getXml());
initialConditions = conditions.toConditionsList();
for (AbstractCondition condition : conditions.toConditionsList()) {
condition.addListener(new AbstractCondition.Listener() {
@Override
public void captionChanged() {}
@Override
public void paramChanged(Param oldParam, Param newParam) {
updateFilterModifiedIndicator();
}
});
}
// If there are window parameters named as filter parameters, assign values to the corresponding
// filter params. Together with passing a filter code in 'filter' window parameter it allows to open an
// arbitrary filter with parameters regardless of a user defined default filter.
Window window = ComponentsHelper.getWindow(filter);
for (AbstractCondition condition : conditions.toConditionsList()) {
if (condition.getParam() != null) {
for (Map.Entry<String, Object> entry : window.getContext().getParams().entrySet()) {
if (entry.getKey().equals(condition.getParam().getName()))
condition.getParam().parseValue((String) entry.getValue());
}
}
}
saveInitialFilterState();
if (filtersLookupDisplayed) {
filtersLookupListenerEnabled = false;
filtersLookup.setValue(filterEntity);
filtersLookupListenerEnabled = true;
}
setFilterActionsEnabled();
setFilterActionsVisible();
fillConditionsLayout(ConditionsFocusType.FIRST);
if (delayedFocus) {
delayedFocus = false;
requestFocus();
} else {
requestFocusToParamEditComponent();
}
setConditionsLayoutVisible(true);
if (BooleanUtils.isTrue(filterEntity.getIsSet())
|| (filterEntity.getFolder() != null && BooleanUtils.isNotFalse(filterEntity.getApplyDefault()))) {
apply(true);
}
for (Filter.FilterEntityChangeListener listener : filterEntityChangeListeners) {
listener.filterEntityChanged(filterEntity);
}
updateWindowCaption();
}
/**
* Saves initial filter state. It is used for indicating of filter modifications
*/
protected void saveInitialFilterState() {
initialFilterEntity = metadata.create(FilterEntity.class);
initialFilterEntity.setName(filterEntity.getName());
initialFilterEntity.setCode(filterEntity.getCode());
initialFilterEntity.setUser(filterEntity.getUser());
initialFilterEntity.setXml(filterEntity.getXml());
}
/**
* Sets conditionsLayout visibility and shows/hides top border of controlsLayout
*/
protected void setConditionsLayoutVisible(boolean visible) {
conditionsLayout.setVisible(visible);
controlsLayout.setStyleName(getControlsLayoutStyleName());
}
protected void setFilterActionsEnabled() {
boolean isGlobal = filterEntity.getUser() == null;
boolean userCanEditGlobalFilter = uerCanEditGlobalFilter();
boolean userCanEditFilters = userCanEditFilers();
boolean filterEditable = isEditable();
boolean userCanEditGlobalAppFolder = userSessionSource.getUserSession().isSpecificPermitted(GLOBAL_APP_FOLDERS_PERMISSION);
boolean createdByCurrentUser = userSessionSource.getUserSession().getCurrentOrSubstitutedUser().equals(filterEntity.getUser());
boolean hasCode = !Strings.isNullOrEmpty(filterEntity.getCode());
boolean isFolder = filterEntity.getFolder() != null;
boolean isSearchFolder = isFolder && (filterEntity.getFolder() instanceof SearchFolder);
boolean isAppFolder = isFolder && (filterEntity.getFolder() instanceof AppFolder);
boolean isSet = BooleanUtils.isTrue(filterEntity.getIsSet());
boolean isDefault = BooleanUtils.isTrue(filterEntity.getIsDefault());
boolean isAdHocFilter = filterEntity == adHocFilter;
boolean editActionEnabled = !isSet && filterEditable && userCanEditFilters && (!isGlobal || userCanEditGlobalFilter);
filterSavingPossible = editActionEnabled &&
((isGlobal && userCanEditGlobalFilter) || (!isGlobal && createdByCurrentUser)) &&
((!isFolder && !hasCode) || isSearchFolder || (isAppFolder && userCanEditGlobalAppFolder));
boolean saveActionEnabled = filterSavingPossible && (isFolder || isFilterModified());
boolean saveAsActionEnabled = !isSet && filterEditable && userCanEditFilters;
boolean removeActionEnabled = !isSet &&
(!hasCode && !isFolder) &&
((isGlobal && userCanEditGlobalFilter) || (!isGlobal && createdByCurrentUser)) &&
!isAdHocFilter && filterEditable && userCanEditFilters;
boolean makeDefaultActionEnabled = !isDefault && !isFolder && !isSet && !isAdHocFilter && (!isGlobal || userCanEditGlobalFilter);
boolean pinAppliedActionEnabled = lastAppliedFilter != null
&& !(lastAppliedFilter.getFilterEntity() == adHocFilter && lastAppliedFilter.getConditions().getRoots().size() == 0);
boolean saveAsSearchFolderActionEnabled = folderActionsEnabled && !isFolder && !hasCode;
boolean saveAsAppFolderActionEnabled = folderActionsEnabled && !isFolder && !hasCode && userCanEditGlobalAppFolder;
saveAction.setEnabled(saveActionEnabled);
saveAsAction.setEnabled(saveAsActionEnabled);
editAction.setEnabled(editActionEnabled);
removeAction.setEnabled(removeActionEnabled);
makeDefaultAction.setEnabled(makeDefaultActionEnabled);
pinAppliedAction.setEnabled(pinAppliedActionEnabled);
saveAsSearchFolderAction.setEnabled(saveAsSearchFolderActionEnabled);
saveAsAppFolderAction.setEnabled(saveAsAppFolderActionEnabled);
if (filterHelper.isTableActionsEnabled()) {
fillTableActions();
}
}
protected void setFilterActionsVisible() {
saveAsSearchFolderAction.setVisible(folderActionsEnabled);
saveAsAppFolderAction.setVisible(folderActionsEnabled);
saveAction.setVisible(editable);
saveAsAction.setVisible(editable);
editAction.setVisible(editable);
removeAction.setVisible(editable);
}
protected void createFilterActions() {
saveAction = new SaveAction();
saveAsAction = new SaveAsAction();
editAction = new EditAction();
makeDefaultAction = new MakeDefaultAction();
removeAction = new RemoveAction();
pinAppliedAction = new PinAppliedAction();
saveAsAppFolderAction = new SaveAsFolderAction(true);
saveAsSearchFolderAction = new SaveAsFolderAction(false);
filterActionsCreated = true;
}
protected boolean uerCanEditGlobalFilter() {
return userSessionSource.getUserSession().isSpecificPermitted(GLOBAL_FILTER_PERMISSION);
}
protected boolean userCanEditFilers() {
return security.isSpecificPermitted(FILTER_EDIT_PERMISSION);
}
protected void saveFilterEntity() {
Boolean isDefault = filterEntity.getIsDefault();
Boolean applyDefault = filterEntity.getApplyDefault();
if (filterEntity.getFolder() == null) {
CommitContext ctx = new CommitContext(Collections.singletonList(filterEntity));
Set<Entity> result = dataService.commit(ctx);
FilterEntity savedFilterEntity = (FilterEntity) result.iterator().next();
filterEntities.remove(filterEntity);
filterEntity = savedFilterEntity;
filterEntities.add(filterEntity);
filterEntity.setApplyDefault(applyDefault);
filterEntity.setIsDefault(isDefault);
} else {
filterEntity.getFolder().setName(filterEntity.getName());
filterEntity.getFolder().setFilterXml(filterEntity.getXml());
AbstractSearchFolder folder = saveFolder(filterEntity.getFolder());
filterEntity.setFolder(folder);
}
saveInitialFilterState();
setFilterActionsEnabled();
updateFilterModifiedIndicator();
}
@Nullable
protected AbstractSearchFolder saveFolder(AbstractSearchFolder folder) {
return filterHelper.saveFolder(folder);
}
protected void saveAsFolder(boolean isAppFolder) {
final AbstractSearchFolder folder;
if (isAppFolder)
folder = (metadata.create(AppFolder.class));
else
folder = (metadata.create(SearchFolder.class));
if (filterEntity.getCode() == null) {
String folderName = filterEntity != adHocFilter ? filterEntity.getName() : "";
folder.setName(folderName);
folder.setTabName(folderName);
} else {
String name = messages.getMainMessage(filterEntity.getCode());
folder.setName(name);
folder.setTabName(name);
}
String newXml = filterParser.getXml(conditions, Param.ValueProperty.VALUE);
folder.setFilterComponentId(filterEntity.getComponentId());
folder.setFilterXml(newXml);
if (!isAppFolder) {
if (uerCanEditGlobalFilter())
((SearchFolder) folder).setUser(filterEntity.getUser());
else
((SearchFolder) folder).setUser(userSessionSource.getUserSession().getCurrentOrSubstitutedUser());
}
Presentations presentations;
if (applyTo != null && applyTo instanceof Component.HasPresentations) {
final Component.HasPresentations presentationsOwner = (Component.HasPresentations) applyTo;
presentations = presentationsOwner.isUsePresentations()
? presentationsOwner.getPresentations() : null;
} else {
presentations = null;
}
Runnable commitHandler;
if (isAppFolder) {
commitHandler = () -> {
AbstractSearchFolder savedFolder = saveFolder(folder);
filterEntity.setFolder(savedFolder);
};
} else {
commitHandler = () -> {
AbstractSearchFolder savedFolder = saveFolder(folder);
filterEntity.setFolder(savedFolder);
};
}
filterHelper.openFolderEditWindow(isAppFolder, folder, presentations, commitHandler);
}
/**
* Removes all components from conditionsLayout and fills it with components for editing filter conditions
*
* @param conditionsFocusType where to set focus (first condition, last condition, no focus)
*/
protected void fillConditionsLayout(ConditionsFocusType conditionsFocusType) {
layout.setSpacing(false);
for (Component component : conditionsLayout.getComponents()) {
conditionsLayout.remove(component);
}
paramEditComponentToFocus = null;
recursivelyCreateConditionsLayout(conditionsFocusType, false, conditions.getRootNodes(), conditionsLayout, 0);
if (!conditionsLayout.getComponents().isEmpty()) layout.setSpacing(true);
}
protected void recursivelyCreateConditionsLayout(ConditionsFocusType conditionsFocusType,
boolean initialFocusSet,
List<Node<AbstractCondition>> nodes,
Component.Container parentContainer,
int level) {
List<Node<AbstractCondition>> visibleConditionNodes = fetchVisibleNodes(nodes);
if (visibleConditionNodes.isEmpty()) {
if (level == 0)
controlsLayout.setStyleName("filter-control-no-border");
return;
}
//note that this is not grid columns count, but number of conditions (label cell + value cell) in one row
int conditionsCount = getColumnsCount();
int row = 0;
int nextColumnStart = 0;
GridLayout grid = componentsFactory.createComponent(GridLayout.class);
grid.setColumns(conditionsCount * 2);
//set expand ratio only for cells with param edit components
for (int i = 0; i < conditionsCount; i++) {
grid.setColumnExpandRatio(i * 2 + 1, 1);
}
grid.setRows(1);
grid.setSpacing(true);
grid.setWidth("100%");
ParamEditor firstParamEditor = null;
ParamEditor lastParamEditor = null;
boolean currentFocusSet = initialFocusSet;
for (int i = 0; i < visibleConditionNodes.size(); i++) {
Node<AbstractCondition> node = visibleConditionNodes.get(i);
final AbstractCondition condition = node.getData();
BoxLayout labelAndOperationCellContent = null;
Component paramEditComponentCellContent = null;
Component groupCellContent = null;
if (condition.isGroup()) {
groupCellContent = createGroupConditionBox(condition, node, conditionsFocusType, currentFocusSet, level);
level++;
} else {
if (condition.getParam().getJavaClass() != null) {
ParamEditor paramEditor = createParamEditor(condition);
if (firstParamEditor == null) firstParamEditor = paramEditor;
lastParamEditor = paramEditor;
currentFocusSet = true;
labelAndOperationCellContent = paramEditor.getLabelAndOperationLayout();
paramEditComponentCellContent = paramEditor.getParamEditComponentLayout();
} else {
BoxLayout paramLayout = componentsFactory.createComponent(HBoxLayout.class);
paramLayout.setSpacing(true);
paramLayout.setMargin(false);
labelAndOperationCellContent = paramLayout;
}
}
//groupBox for group conditions must occupy the whole line in conditions grid
Integer conditionWidth = condition.isGroup() ? (Integer)conditionsCount : condition.getWidth();
int nextColumnEnd = nextColumnStart + conditionWidth - 1;
if (nextColumnEnd >= conditionsCount) {
//complete current row in grid with gaps if next cell will be on next row
completeGridRowWithGaps(grid, row, nextColumnStart, false);
//place cell to next row in grid
nextColumnStart = 0;
nextColumnEnd = conditionWidth - 1;
row++;
grid.setRows(row + 1);
}
if (groupCellContent != null) {
grid.add(groupCellContent, nextColumnStart * 2, row, nextColumnEnd * 2 + 1, row);
}
if (labelAndOperationCellContent != null) {
grid.add(labelAndOperationCellContent, nextColumnStart * 2, row, nextColumnStart * 2, row);
if (nextColumnStart != 0)
labelAndOperationCellContent.setMargin(false, false, false, true);
}
if (paramEditComponentCellContent != null) {
paramEditComponentCellContent.setAlignment(Component.Alignment.MIDDLE_LEFT);
grid.add(paramEditComponentCellContent, nextColumnStart * 2 + 1, row, nextColumnEnd * 2 + 1, row);
}
nextColumnStart = nextColumnEnd + 1;
//add next row if necessary
if (i < visibleConditionNodes.size() - 1) {
if (nextColumnStart >= conditionsCount) {
nextColumnStart = 0;
row++;
grid.setRows(row + 1);
}
}
}
if (!initialFocusSet) {
switch (conditionsFocusType) {
case FIRST:
if (firstParamEditor != null)
paramEditComponentToFocus = firstParamEditor;
break;
case LAST:
if (lastParamEditor != null)
paramEditComponentToFocus = lastParamEditor;
}
}
//complete last row in grid with gaps
completeGridRowWithGaps(grid, row, nextColumnStart, true);
if (parentContainer != null) {
parentContainer.add(grid);
}
if (level == 0)
controlsLayout.setStyleName(getControlsLayoutStyleName());
}
protected List<Node<AbstractCondition>> fetchVisibleNodes(List<Node<AbstractCondition>> nodes) {
List<Node<AbstractCondition>> visibleConditionNodes = new ArrayList<>();
for (Node<AbstractCondition> node : nodes) {
AbstractCondition condition = node.getData();
if (!condition.getHidden())
visibleConditionNodes.add(node);
}
return visibleConditionNodes;
}
protected Component createGroupConditionBox(AbstractCondition condition, Node<AbstractCondition> node, ConditionsFocusType conditionsFocusType, boolean focusSet, int level) {
Component groupCellContent;
GroupBoxLayout groupBox = componentsFactory.createComponent(GroupBoxLayout.class);
groupBox.setWidth("100%");
groupBox.setCaption(condition.getLocCaption());
if (!node.getChildren().isEmpty()) {
recursivelyCreateConditionsLayout(conditionsFocusType, focusSet, node.getChildren(), groupBox, level);
}
groupCellContent = groupBox;
return groupCellContent;
}
protected ParamEditor createParamEditor(final AbstractCondition condition) {
boolean conditionRemoveEnabled = !initialConditions.contains(condition);
ParamEditor paramEditor = new ParamEditor(condition, conditionRemoveEnabled, isEditable() && userCanEditFilers());
AbstractAction removeConditionAction = new AbstractAction("") {
@Override
public void actionPerform(Component component) {
conditions.removeCondition(condition);
fillConditionsLayout(ConditionsFocusType.NONE);
updateFilterModifiedIndicator();
}
};
removeConditionAction.setVisible(conditionRemoveEnabled);
paramEditor.setRemoveButtonAction(removeConditionAction);
return paramEditor;
}
/**
* Adds empty containers to grid row. If not to complete the row with gaps then in case of grid with one element (element width = 1)
* this element will occupy 100% of grid width, but expected behaviour is to occupy 1/3 of grid width
*/
protected void completeGridRowWithGaps(GridLayout grid, int row, int startColumn, boolean lastRow) {
for (int i = startColumn * 2; i < grid.getColumns(); i++) {
Component gap = componentsFactory.createComponent(Label.class);
gap.setWidth("100%");
grid.add(gap, i, row);
}
}
protected String getControlsLayoutStyleName() {
String styleName = "filter-control-no-border";
if (conditionsLayout.isVisible() && !conditionsLayout.getComponents().isEmpty()) {
styleName = CONDITIONS_LOCATION_TOP.equals(conditionsLocation) ? "filter-control-with-top-border"
: "filter-control-with-bottom-border";
}
return styleName;
}
protected boolean isFilterModified() {
boolean filterPropertiesModified =
!Objects.equals(initialFilterEntity.getName(), filterEntity.getName()) ||
!Objects.equals(initialFilterEntity.getCode(), filterEntity.getCode()) ||
!Objects.equals(initialFilterEntity.getUser(), filterEntity.getUser());
if (filterPropertiesModified) return true;
String filterXml = filterEntity.getFolder() == null ? filterParser.getXml(conditions, Param.ValueProperty.DEFAULT_VALUE)
: filterParser.getXml(conditions, Param.ValueProperty.VALUE);
return !StringUtils.equals(filterXml, initialFilterEntity.getXml());
}
protected void updateFilterModifiedIndicator() {
boolean filterModified = isFilterModified();
saveAction.setEnabled(filterSavingPossible && filterModified);
String currentCaption = groupBoxLayout.getCaption();
if (filterModified && !currentCaption.endsWith(MODIFIED_INDICATOR_SYMBOL)) {
groupBoxLayout.setCaption(currentCaption + MODIFIED_INDICATOR_SYMBOL);
}
if (!filterModified && currentCaption.endsWith(MODIFIED_INDICATOR_SYMBOL)) {
groupBoxLayout.setCaption(currentCaption.substring(0, currentCaption.length() - 1));
}
}
/**
* Load filter entities from database and saves them in {@code filterEntities} collection.
*/
protected void loadFilterEntities() {
LoadContext<FilterEntity> ctx = LoadContext.create(FilterEntity.class);
ctx.setView("app");
ctx.setQueryString("select f from sec$Filter f left join f.user u " +
"where f.componentId = :component and (u.id = :userId or u is null) order by f.name")
.setParameter("component", ComponentsHelper.getFilterComponentPath(filter))
.setParameter("userId", userSessionSource.getUserSession().getCurrentOrSubstitutedUser().getId());
filterEntities = new ArrayList<>(dataService.loadList(ctx));
}
protected FilterEntity getDefaultFilter(List<FilterEntity> filters) {
Window window = ComponentsHelper.getWindow(filter);
// First check if there is parameter with name equal to this filter component id, containing a filter code to apply
Map<String, Object> params = filter.getFrame().getContext().getParams();
String code = (String) params.get(filter.getId());
if (!StringUtils.isBlank(code)) {
for (FilterEntity filter : filters) {
if (code.equals(filter.getCode()))
return filter;
}
}
// No 'filter' parameter found, load default filter
SettingsImpl settings = new SettingsImpl(window.getId());
String componentPath = ComponentsHelper.getFilterComponentPath(filter);
String[] strings = ValuePathHelper.parse(componentPath);
String name = ValuePathHelper.format((String[]) ArrayUtils.subarray(strings, 1, strings.length));
Element e = settings.get(name).element("defaultFilter");
if (e != null) {
String defIdStr = e.attributeValue("id");
Boolean applyDefault = Boolean.valueOf(e.attributeValue("applyDefault"));
if (!StringUtils.isBlank(defIdStr)) {
UUID defaultId = null;
try {
defaultId = UUID.fromString(defIdStr);
} catch (IllegalArgumentException ex) {
//
}
if (defaultId != null) {
for (FilterEntity filter : filters) {
if (defaultId.equals(filter.getId())) {
filter.setIsDefault(true);
filter.setApplyDefault(applyDefault);
return filter;
}
}
}
}
}
return null;
}
protected void initFiltersPopupButton() {
filtersPopupButton.removeAllActions();
addFiltersPopupActions();
}
protected void addFiltersPopupActions() {
addResetFilterAction(filtersPopupButton);
Collections.sort(
filterEntities,
new Comparator<FilterEntity>() {
@Override
public int compare(FilterEntity f1, FilterEntity f2) {
return getFilterCaption(f1).compareTo(getFilterCaption(f2));
}
}
);
Iterator<FilterEntity> it = filterEntities.iterator();
int addedEntitiesCount = 0;
while (it.hasNext() && addedEntitiesCount < clientConfig.getGenericFilterPopupListSize()) {
final FilterEntity fe = it.next();
addSetFilterEntityAction(filtersPopupButton, fe);
addedEntitiesCount++;
}
if (filterEntities.size() > clientConfig.getGenericFilterPopupListSize()) {
addShowMoreFilterEntitiesAction(filtersPopupButton);
}
}
protected void addSetFilterEntityAction(PopupButton popupButton, final FilterEntity fe) {
popupButton.addAction(new AbstractAction("setEntity" + fe.getId()) {
@Override
public void actionPerform(Component component) {
if (fe != filterEntity) {
setFilterEntity(fe);
}
}
@Override
public String getCaption() {
return getFilterCaption(fe);
}
});
}
protected void addShowMoreFilterEntitiesAction(PopupButton popupButton) {
popupButton.addAction(new AbstractAction("showMoreFilterEntities") {
@Override
public void actionPerform(Component component) {
WindowInfo windowInfo = windowConfig.getWindowInfo("filterSelect");
FilterSelectWindow window = (FilterSelectWindow) windowManager.openWindow(windowInfo,
WindowManager.OpenType.DIALOG,
ParamsMap.of("filterEntities", filterEntities));
window.addCloseListener(actionId -> {
if (Window.COMMIT_ACTION_ID.equals(actionId)) {
FilterEntity selectedEntity = window.getFilterEntity();
setFilterEntity(selectedEntity);
}
});
}
@Override
public String getCaption() {
return formatMainMessage("filter.showMore", filterEntities.size());
}
});
}
protected void addResetFilterAction(PopupButton popupButton) {
popupButton.addAction(new AbstractAction("resetFilter") {
@Override
public void actionPerform(Component component) {
conditions = new ConditionsTree();
setFilterEntity(adHocFilter);
}
@Override
public String getCaption() {
return getMainMessage("filter.resetFilter");
}
});
}
protected void initFiltersLookup() {
Map<Object, String> captionsMap = new LinkedHashMap<>();
for (FilterEntity entity : filterEntities) {
String caption = getFilterCaption(entity);
if (entity.getIsDefault()) {
caption += " " + getMainMessage("filter.default");
}
captionsMap.put(entity, caption);
}
captionsMap.put(adHocFilter, getFilterCaption(adHocFilter));
filtersLookupListenerEnabled = false;
//set null to remove previous value from lookup options list
filtersLookup.setValue(null);
List<Object> optionsList = new ArrayList<>();
optionsList.add(adHocFilter);
optionsList.addAll(filterEntities);
filtersLookup.setOptionsList(optionsList);
filterHelper.setLookupCaptions(filtersLookup, captionsMap);
filtersLookup.setValue(filterEntity);
filtersLookupListenerEnabled = true;
}
protected void initFilterSelectComponents() {
if (filtersPopupDisplayed) {
initFiltersPopupButton();
}
if (filtersLookupDisplayed) {
initFiltersLookup();
}
}
protected void initAdHocFilter() {
adHocFilter = metadata.create(FilterEntity.class);
String emptyXml = filterParser.getXml(new ConditionsTree(), Param.ValueProperty.VALUE);
adHocFilter.setXml(emptyXml);
adHocFilter.setComponentId(ComponentsHelper.getFilterComponentPath(filter));
adHocFilter.setUser(userSessionSource.getUserSession().getCurrentOrSubstitutedUser());
adHocFilter.setName(getMainMessage("filter.adHocFilter"));
}
protected void addAppliedFilter() {
if (lastAppliedFilter == null)
return;
if (!appliedFilters.isEmpty() && appliedFilters.getLast().filter.equals(lastAppliedFilter))
return;
this.layout.add(appliedFiltersLayout, CONDITIONS_LOCATION_TOP.equals(conditionsLocation) ? 0 : 1);
BoxLayout layout = componentsFactory.createComponent(HBoxLayout.class);
layout.setSpacing(true);
if (!appliedFilters.isEmpty()) {
AppliedFilterHolder holder = appliedFilters.getLast();
holder.layout.remove(holder.button);
}
Label label = componentsFactory.createComponent(Label.class);
label.setValue(lastAppliedFilter.getText());
layout.add(label);
label.setAlignment(Component.Alignment.MIDDLE_LEFT);
LinkButton button = componentsFactory.createComponent(LinkButton.class);
button.setIcon("icons/item-remove.png");
button.setAction(new AbstractAction("") {
@Override
public void actionPerform(Component component) {
removeAppliedFilter();
}
});
layout.add(button);
addAppliedFilterLayoutHook(layout);
appliedFiltersLayout.add(layout);
appliedFilters.add(new AppliedFilterHolder(lastAppliedFilter, layout, button));
}
protected void addAppliedFilterLayoutHook(Component.Container layout) {
//nothing
}
protected void removeAppliedFilter() {
if (!appliedFilters.isEmpty()) {
if (appliedFilters.size() == 1) {
AppliedFilterHolder holder = appliedFilters.removeLast();
appliedFiltersLayout.remove(holder.layout);
((CollectionDatasource.SupportsApplyToSelected) datasource).unpinAllQuery();
this.layout.remove(appliedFiltersLayout);
} else {
windowManager.showOptionDialog(messages.getMainMessage("removeApplied.title"),
messages.getMainMessage("removeApplied.message"), Frame.MessageType.WARNING,
new Action[]{
new DialogAction(Type.YES) {
@Override
public void actionPerform(Component component) {
for (AppliedFilterHolder holder : appliedFilters) {
appliedFiltersLayout.remove(holder.layout);
FilterDelegateImpl.this.layout.remove(appliedFiltersLayout);
}
appliedFilters.clear();
((CollectionDatasource.SupportsApplyToSelected) datasource).unpinAllQuery();
}
},
new DialogAction(Type.NO, Status.PRIMARY)
});
}
}
}
protected String getFilterCaption(FilterEntity filterEntity) {
String name;
if (filterEntity != null) {
if (filterEntity.getCode() == null)
name = InstanceUtils.getInstanceName(filterEntity);
else {
name = messages.getMainMessage(filterEntity.getCode());
}
AbstractSearchFolder folder = filterEntity.getFolder();
if (folder != null) {
if (!StringUtils.isBlank(folder.getTabName()))
name = messages.getMainMessage(folder.getTabName());
else if (!StringUtils.isBlank(folder.getName())) {
name = messages.getMainMessage(folder.getName());
}
if (BooleanUtils.isTrue(filterEntity.getIsSet()))
name = getMainMessage("filter.setPrefix") + " " + name;
else
name = getMainMessage("filter.folderPrefix") + " " + name;
}
} else
name = "";
return name;
}
protected String formatMainMessage(String key, Object... params) {
return messages.formatMainMessage(key, params);
}
protected String getMainMessage(String key) {
return messages.getMainMessage(key);
}
@Override
public Component.Container getLayout() {
return groupBoxLayout;
}
@Override
public void setDatasource(CollectionDatasource datasource) {
this.datasource = datasource;
this.dsQueryFilter = datasource.getQueryFilter();
if (getResultingManualApplyRequired()) {
// set initial denying condition to get empty datasource before explicit filter applying
QueryFilter queryFilter = new QueryFilter(new DenyingClause(), datasource.getMetaClass().getName());
if (dsQueryFilter != null) {
queryFilter = QueryFilter.merge(dsQueryFilter, queryFilter);
}
datasource.setQueryFilter(queryFilter);
}
if (datasource instanceof CollectionDatasource.Lazy || datasource instanceof HierarchicalDatasource) {
setUseMaxResults(false);
} else if (useMaxResults) {
initMaxResults();
}
if (!isFtsModeEnabled()) {
controlsLayout.remove(ftsSwitch);
}
}
@Override
public CollectionDatasource getDatasource() {
return datasource;
}
protected void initMaxResults() {
int maxResults;
if (this.maxResults != -1) {
maxResults = this.maxResults;
} else {
maxResults = datasource.getMaxResults();
}
if (maxResults == 0 || maxResults == persistenceManager.getMaxFetchUI(datasource.getMetaClass().getName())) {
maxResults = persistenceManager.getFetchUI(datasource.getMetaClass().getName());
}
if (maxResultsAddedToLayout) {
if (!textMaxResults) {
List<Integer> optionsList = ((LookupField) maxResultsField).getOptionsList();
if (!optionsList.contains(maxResults)) {
ArrayList<Integer> newOptions = new ArrayList<>(optionsList);
newOptions.add(maxResults);
Collections.sort(newOptions);
((LookupField) maxResultsField).setOptionsList(newOptions);
}
}
maxResultsField.setValue(maxResults);
}
datasource.setMaxResults(maxResults);
}
protected boolean isFtsModeEnabled() {
return ftsConfig.getEnabled()
&& ftsFilterHelper != null
&& datasource != null
&& ftsFilterHelper.isEntityIndexed(datasource.getMetaClass().getName());
}
@Override
public int getMaxResults() {
return maxResults;
}
@Override
public void setMaxResults(int maxResults) {
this.maxResults = maxResults;
initMaxResults();
}
@Override
public void setUseMaxResults(boolean useMaxResults) {
this.useMaxResults = useMaxResults;
if (maxResultsLayout != null)
maxResultsLayout.setVisible(isMaxResultsLayoutVisible());
}
protected boolean isMaxResultsLayoutVisible() {
return useMaxResults && security.isSpecificPermitted("cuba.gui.filter.maxResults") && maxResultsAddedToLayout;
}
@Override
public boolean getUseMaxResults() {
return useMaxResults;
}
@Override
public void setTextMaxResults(boolean textMaxResults) {
boolean valueChanged = this.textMaxResults != textMaxResults;
this.textMaxResults = textMaxResults;
if (maxResultsAddedToLayout && valueChanged) {
maxResultsLayout.remove(maxResultsField);
maxResultsField = textMaxResults ? maxResultsTextField : maxResultsLookupField;
maxResultsLayout.add(maxResultsField);
}
}
@Override
public boolean getTextMaxResults() {
return textMaxResults;
}
@Override
public boolean apply(boolean isNewWindow) {
if (clientConfig.getGenericFilterChecking()) {
if (filterEntity != null && conditions.getRoots().size() > 0) {
boolean haveCorrectCondition = hasCorrectCondition();
if (!haveCorrectCondition) {
if (!isNewWindow) {
windowManager.showNotification(messages.getMainMessage("filter.emptyConditions"),
Frame.NotificationType.HUMANIZED);
}
return false;
}
}
}
if (filterEntity != null) {
lastAppliedFilter = new AppliedFilter(filterEntity, conditions);
} else {
lastAppliedFilter = null;
}
if (filterEntity != null) {
boolean haveRequiredConditions = haveFilledRequiredConditions();
if (!haveRequiredConditions) {
if (!isNewWindow) {
windowManager.showNotification(messages.getMainMessage("filter.emptyRequiredConditions"),
Frame.NotificationType.HUMANIZED);
}
return false;
}
setFilterActionsEnabled();
}
applyDatasourceFilter();
initDatasourceMaxResults();
refreshDatasource();
if ((applyTo != null) && (Table.class.isAssignableFrom(applyTo.getClass()))) {
filterHelper.removeTableFtsTooltips((Table) applyTo);
}
return true;
}
protected void applyFts() {
if (ftsFilterHelper == null)
return;
String searchTerm = ftsSearchCriteriaField.getValue();
if (Strings.isNullOrEmpty(searchTerm) && clientConfig.getGenericFilterChecking()) {
windowManager.showNotification(getMainMessage("filter.fillSearchCondition"), Frame.NotificationType.TRAY);
return;
}
Map<String, Object> params = new HashMap<>();
if (!Strings.isNullOrEmpty(searchTerm)) {
FtsFilterHelper.FtsSearchResult ftsSearchResult = ftsFilterHelper.search(searchTerm, datasource.getMetaClass().getName());
int queryKey = ftsSearchResult.getQueryKey();
params.put("sessionId", userSessionSource.getUserSession().getId());
params.put("queryKey", queryKey);
CustomCondition ftsCondition = ftsFilterHelper.createFtsCondition(queryKey);
conditions = new ConditionsTree();
conditions.getRootNodes().add(new Node<>(ftsCondition));
if ((applyTo != null) && (Table.class.isAssignableFrom(applyTo.getClass()))) {
filterHelper.initTableFtsTooltips((Table) applyTo, ftsSearchResult.getHitInfos());
}
} else if ((applyTo != null) && (Table.class.isAssignableFrom(applyTo.getClass()))) {
filterHelper.initTableFtsTooltips((Table) applyTo, Collections.<UUID, String>emptyMap());
}
applyDatasourceFilter();
initDatasourceMaxResults();
datasource.refresh(params);
}
/**
* The method is invoked before search. It sets datasource {@code maxResults} value based on the maxResults
* field (if visible) or on maxFetchUI value.
* Method also resets the datasource {@code firstResult} value
*/
protected void initDatasourceMaxResults() {
if (datasource == null) {
throw new DevelopmentException("Filter datasource is not set");
}
if (this.maxResults != -1) {
datasource.setMaxResults(maxResults);
} else if (maxResultsAddedToLayout && useMaxResults) {
Integer maxResults = maxResultsField.getValue();
if (maxResults != null && maxResults > 0) {
datasource.setMaxResults(maxResults);
} else {
datasource.setMaxResults(persistenceManager.getMaxFetchUI(datasource.getMetaClass().getName()));
}
}
if (datasource instanceof CollectionDatasource.SupportsPaging) {
((CollectionDatasource.SupportsPaging) datasource).setFirstResult(0);
}
}
protected void applyDatasourceFilter() {
if (datasource != null) {
String currentFilterXml = filterParser.getXml(conditions, Param.ValueProperty.VALUE);
if (!Strings.isNullOrEmpty(currentFilterXml)) {
Element element = Dom4j.readDocument(currentFilterXml).getRootElement();
QueryFilter queryFilter = new QueryFilter(element, datasource.getMetaClass().getName());
if (dsQueryFilter != null) {
queryFilter = QueryFilter.merge(dsQueryFilter, queryFilter);
}
datasource.setQueryFilter(queryFilter);
} else {
datasource.setQueryFilter(dsQueryFilter);
}
} else {
log.warn("Unable to apply datasource filter with null datasource");
}
}
protected boolean haveFilledRequiredConditions() {
for (AbstractCondition condition : conditions.toConditionsList()) {
if ((condition.getRequired())
&& (condition.getParam() != null)
&& (condition.getParam().getValue() == null)) {
return false;
}
}
return true;
}
protected boolean hasCorrectCondition() {
boolean haveCorrectCondition = false;
for (AbstractCondition condition : conditions.toConditionsList()) {
if ((condition.getParam() != null) && (condition.getParam().getValue() != null)
|| condition instanceof CustomCondition && condition.getHidden()) {
haveCorrectCondition = true;
break;
}
}
return haveCorrectCondition;
}
/**
* extenders should be able to modify the datasource
* before it will be refreshed
*/
protected void refreshDatasource() {
if (datasource instanceof CollectionDatasource.Suspendable)
((CollectionDatasource.Suspendable) datasource).refreshIfNotSuspended();
else
datasource.refresh();
}
@Override
public String getCaption() {
return caption;
}
@Override
public void setCaption(String caption) {
this.caption = caption;
groupBoxLayout.setCaption(caption);
}
@Override
public void setManualApplyRequired(Boolean manualApplyRequired) {
this.manualApplyRequired = manualApplyRequired;
}
@Override
public Boolean getManualApplyRequired() {
return manualApplyRequired;
}
protected boolean getResultingManualApplyRequired() {
return manualApplyRequired != null ? manualApplyRequired : clientConfig.getGenericFilterManualApplyRequired();
}
@Override
public Component getOwnComponent(String id) {
for (AbstractCondition condition : conditions.toConditionsList()) {
if (condition.getParam() != null) {
String paramName = condition.getParam().getName();
String componentName = paramName.substring(paramName.lastIndexOf('.') + 1);
if (id.equals(componentName)) {
ParamWrapper wrapper = new ParamWrapper(condition, condition.getParam());
return wrapper;
}
}
}
return null;
}
@Override
@Nullable
public Component getComponent(String id) {
String[] elements = ValuePathHelper.parse(id);
if (elements.length == 1) {
return getOwnComponent(id);
} else {
throw new UnsupportedOperationException("Filter contains only one level of subcomponents");
}
}
@Override
public void applySettings(Element element) {
Element groupBoxExpandedEl = element.element("groupBoxExpanded");
if (groupBoxExpandedEl != null) {
Boolean expanded = Boolean.valueOf(groupBoxExpandedEl.getText());
groupBoxLayout.setExpanded(expanded);
}
}
@Override
public boolean saveSettings(Element element) {
Boolean changed = false;
Element e = element.element("defaultFilter");
if (e == null)
e = element.addElement("defaultFilter");
UUID defaultId = null;
Boolean applyDefault = false;
for (FilterEntity filter : filterEntities) {
if (BooleanUtils.isTrue(filter.getIsDefault())) {
defaultId = filter.getId();
applyDefault = filter.getApplyDefault();
break;
}
}
String newDef = defaultId != null ? defaultId.toString() : null;
Attribute attr = e.attribute("id");
String oldDef = attr != null ? attr.getValue() : null;
if (!ObjectUtils.equals(oldDef, newDef)) {
if (newDef == null && attr != null) {
e.remove(attr);
} else {
if (attr == null)
e.addAttribute("id", newDef);
else
attr.setValue(newDef);
}
changed = true;
}
Boolean newApplyDef = BooleanUtils.isTrue(applyDefault);
Attribute applyDefaultAttr = e.attribute("applyDefault");
Boolean oldApplyDef = applyDefaultAttr != null ? Boolean.valueOf(applyDefaultAttr.getValue()) : false;
if (!ObjectUtils.equals(oldApplyDef, newApplyDef)) {
if (applyDefaultAttr != null) {
applyDefaultAttr.setValue(newApplyDef.toString());
} else {
e.addAttribute("applyDefault", newApplyDef.toString());
}
changed = true;
}
Element groupBoxExpandedEl = element.element("groupBoxExpanded");
if (groupBoxExpandedEl == null)
groupBoxExpandedEl = element.addElement("groupBoxExpanded");
Boolean oldGroupBoxExpandedValue = Boolean.valueOf(groupBoxExpandedEl.getText());
Boolean newGroupBoxExpandedValue = groupBoxLayout.isExpanded();
if (!ObjectUtils.equals(oldGroupBoxExpandedValue, newGroupBoxExpandedValue)) {
groupBoxExpandedEl.setText(newGroupBoxExpandedValue.toString());
changed = true;
}
return changed;
}
@Override
public Component getApplyTo() {
return applyTo;
}
@Override
public void setApplyTo(Component applyTo) {
this.applyTo = applyTo;
}
@Override
public void setFolderActionsEnabled(boolean folderActionsEnabled) {
this.folderActionsEnabled = folderActionsEnabled;
}
@Override
public boolean isFolderActionsEnabled() {
return folderActionsEnabled;
}
/**
* Adds actions of 'Entities Set' functionality to Table component
*/
protected void fillTableActions() {
Table table;
if ((applyTo != null) && (Table.class.isAssignableFrom(applyTo.getClass()))) {
table = (Table) applyTo;
} else {
return;
}
ButtonsPanel buttons = table.getButtonsPanel();
if (buttons == null) {
return; // in lookup windows, there is no button panel
}
com.haulmont.cuba.gui.components.Button addToSetBtn = (Button) buttons.getComponent("addToSetBtn");
com.haulmont.cuba.gui.components.Button addToCurSetBtn = (Button) buttons.getComponent("addToCurSetBtn");
com.haulmont.cuba.gui.components.Button removeFromCurSetBtn = (Button) buttons.getComponent("removeFromCurSetBtn");
Action addToSet = table.getAction("filter.addToSet");
Action addToCurrSet = table.getAction("filter.addToCurSet");
Action removeFromCurrSet = table.getAction("filter.removeFromCurSet");
if (addToSet != null)
table.removeAction(addToSet);
if (addToSetBtn != null)
addToSetBtn.setVisible(false);
if (addToCurrSet != null) {
table.removeAction(addToCurrSet);
}
if (addToCurSetBtn != null) {
addToCurSetBtn.setVisible(false);
}
if (removeFromCurrSet != null) {
table.removeAction(removeFromCurrSet);
}
if (removeFromCurSetBtn != null) {
removeFromCurSetBtn.setVisible(false);
}
if ((filterEntity != null) && (BooleanUtils.isTrue(filterEntity.getIsSet()))) {
addToCurrSet = new AddToCurrSetAction();
if (addToCurSetBtn == null) {
addToCurSetBtn = componentsFactory.createComponent(Button.class);
addToCurSetBtn.setId("addToCurSetBtn");
addToCurSetBtn.setCaption(getMainMessage("filter.addToCurSet"));
buttons.add(addToCurSetBtn);
} else {
addToCurSetBtn.setVisible(true);
}
if (StringUtils.isEmpty(addToCurSetBtn.getIcon())) {
addToCurSetBtn.setIcon("icons/join-to-set.png");
}
addToCurSetBtn.setAction(addToCurrSet);
table.addAction(addToCurrSet);
removeFromCurrSet = new RemoveFromSetAction(table);
if (removeFromCurSetBtn == null) {
removeFromCurSetBtn = componentsFactory.createComponent(Button.class);
removeFromCurSetBtn.setId("removeFromCurSetBtn");
removeFromCurSetBtn.setCaption(getMainMessage("filter.removeFromCurSet"));
buttons.add(removeFromCurSetBtn);
} else {
removeFromCurSetBtn.setVisible(true);
}
if (StringUtils.isEmpty(removeFromCurSetBtn.getIcon())) {
removeFromCurSetBtn.setIcon("icons/delete-from-set.png");
}
removeFromCurSetBtn.setAction(removeFromCurrSet);
table.addAction(removeFromCurrSet);
} else {
addToSet = new AddToSetAction(table);
if (addToSetBtn == null) {
addToSetBtn = componentsFactory.createComponent(Button.class);
addToSetBtn.setId("addToSetBtn");
addToSetBtn.setCaption(getMainMessage("filter.addToSet"));
buttons.add(addToSetBtn);
} else {
addToSetBtn.setVisible(true);
}
if (StringUtils.isEmpty(addToSetBtn.getIcon())) {
addToSetBtn.setIcon("icons/insert-to-set.png");
}
addToSetBtn.setAction(addToSet);
table.addAction(addToSet);
}
}
@Override
public void setEditable(boolean editable) {
this.editable = editable;
addConditionBtn.setVisible(editable && userCanEditFilers());
//do not process actions if method is invoked from filter loader
if (filterActionsCreated && filterEntity != null) {
setFilterActionsEnabled();
setFilterActionsVisible();
}
}
@Override
public boolean isEditable() {
return editable;
}
@Override
public Object getParamValue(String paramName) {
Component component = getOwnComponent(paramName);
if (component instanceof Component.HasValue) {
return ((Component.HasValue) component).getValue();
}
return null;
}
@Override
public void setParamValue(String paramName, Object value) {
Component component = getOwnComponent(paramName);
if (component instanceof Component.HasValue) {
((Component.HasValue) component).setValue(value);
}
}
@Override
public void addFilterEntityChangeListener(Filter.FilterEntityChangeListener listener) {
filterEntityChangeListeners.add(listener);
}
@Override
public List<Filter.FilterEntityChangeListener> getFilterEntityChangeListeners() {
return filterEntityChangeListeners;
}
@Override
public Integer getColumnsCount() {
return columnsCount != null ? columnsCount : clientConfig.getGenericFilterColumnsCount();
}
@Override
public void setColumnsCount(int columnsCount) {
this.columnsCount = columnsCount;
}
@Override
public boolean isExpanded() {
return groupBoxLayout.isExpanded();
}
@Override
public void setExpanded(boolean expanded) {
groupBoxLayout.setExpanded(expanded);
}
@Override
public boolean isCollapsable() {
return groupBoxLayout.isCollapsable();
}
@Override
public void setCollapsable(boolean collapsable) {
groupBoxLayout.setCollapsable(collapsable);
}
@Override
public void setModeSwitchVisible(boolean modeSwitchVisible) {
this.modeSwitchVisible = modeSwitchVisible;
ftsSwitch.setVisible(modeSwitchVisible && isFtsModeEnabled());
}
@Override
public void requestFocus() {
if (filterEntity == null) {
delayedFocus = true;
return;
}
if (paramEditComponentToFocus != null) {
requestFocusToParamEditComponent();
} else if (filtersLookupDisplayed) {
filtersLookup.requestFocus();
} else if (filtersPopupDisplayed) {
filtersPopupButton.requestFocus();
}
}
protected void requestFocusToParamEditComponent() {
if (paramEditComponentToFocus instanceof ParamEditor) {
((ParamEditor) paramEditComponentToFocus).requestFocus();
} else if (paramEditComponentToFocus instanceof TextField) {
((TextField) paramEditComponentToFocus).requestFocus();
}
}
@Override
public void addExpandedStateChangeListener(FDExpandedStateChangeListener listener) {
if (expandedStateChangeListeners == null) {
expandedStateChangeListeners = new ArrayList<>();
}
if (!expandedStateChangeListeners.contains(listener)) {
expandedStateChangeListeners.add(listener);
}
}
@Override
public void removeExpandedStateChangeListener(FDExpandedStateChangeListener listener) {
if (expandedStateChangeListeners != null) {
expandedStateChangeListeners.remove(listener);
}
}
protected void fireExpandStateChange() {
if (expandedStateChangeListeners != null) {
FDExpandedStateChangeEvent event = new FDExpandedStateChangeEvent(this, isExpanded());
for (FDExpandedStateChangeListener listener : expandedStateChangeListeners) {
listener.expandedStateChanged(event);
}
}
}
@Override
public void setFilter(Filter filter) {
this.filter = filter;
addConditionHelper = new AddConditionHelper(filter, new AddConditionHelper.Handler() {
@Override
public void handle(AbstractCondition condition) {
try {
addCondition(condition);
} catch (Exception e) {
conditions.removeCondition(condition);
throw e;
}
}
});
}
protected void addCondition(AbstractCondition condition) {
conditions.getRootNodes().add(new Node<>(condition));
fillConditionsLayout(ConditionsFocusType.LAST);
requestFocusToParamEditComponent();
updateFilterModifiedIndicator();
condition.addListener(new AbstractCondition.Listener() {
@Override
public void captionChanged() {
}
@Override
public void paramChanged(Param oldParam, Param newParam) {
updateFilterModifiedIndicator();
}
});
}
protected void initShortcutActions() {
if (filter.getFrame().getAction(Filter.APPLY_ACTION_ID) == null) {
filter.getFrame().addAction(new AbstractAction(Filter.APPLY_ACTION_ID, clientConfig.getFilterApplyShortcut()) {
@Override
public void actionPerform(Component component) {
if (isVisible() && datasource != null) {
if (filterMode == FilterMode.GENERIC_MODE) {
apply(false);
} else {
applyFts();
}
}
}
});
}
if (filter.getFrame().getAction(Filter.SELECT_ACTION_ID) == null) {
filter.getFrame().addAction(new AbstractAction(Filter.SELECT_ACTION_ID, clientConfig.getFilterSelectShortcut()) {
@Override
public void actionPerform(Component component) {
if (isVisible() && datasource != null && filtersPopupButton.isEnabled()) {
filtersPopupButton.setPopupVisible(true);
}
}
});
}
}
protected void updateWindowCaption() {
Window window = ComponentsHelper.getWindow(filter);
String filterTitle;
if (filterMode == FilterMode.GENERIC_MODE && filterEntity != null && filterEntity != adHocFilter) {
filterTitle = getFilterCaption(filterEntity);
} else {
filterTitle = null;
}
window.setDescription(filterTitle);
if (initialWindowCaption == null) {
initialWindowCaption = window.getCaption();
}
windowManager.setWindowCaption(window, initialWindowCaption, filterTitle);
groupBoxLayout.setCaption(Strings.isNullOrEmpty(filterTitle) ? caption : caption + ": " + filterTitle);
}
protected ControlsLayoutBuilder createControlsLayoutBuilder(String layoutDescription) {
return new ControlsLayoutBuilder(layoutDescription);
}
/**
* Method sets default = false to all filters except the current one
*/
protected void resetDefaultFilters() {
for (FilterEntity filter : filterEntities) {
if (!ObjectUtils.equals(filter, filterEntity)) {
if (BooleanUtils.isTrue(filter.getIsDefault())) {
filter.setIsDefault(false);
}
}
}
}
protected class FiltersLookupChangeListener implements Component.ValueChangeListener {
public FiltersLookupChangeListener() {
}
@Override
public void valueChanged(Component.ValueChangeEvent e) {
if (!filtersLookupListenerEnabled) return;
if (e.getValue() instanceof FilterEntity) {
setFilterEntity((FilterEntity) e.getValue());
}
}
}
protected class SaveAction extends AbstractAction {
public SaveAction() {
super("save");
}
@Override
public void actionPerform(Component component) {
if (PersistenceHelper.isNew(filterEntity) && filterEntity.getFolder() == null) {
WindowInfo windowInfo = windowConfig.getWindowInfo("saveFilter");
Map<String, Object> params = new HashMap<>();
if (!getMainMessage("filter.adHocFilter").equals(filterEntity.getName())) {
params.put("filterName", filterEntity.getName());
}
final SaveFilterWindow window = (SaveFilterWindow) windowManager.openWindow(windowInfo, WindowManager.OpenType.DIALOG, params);
window.addCloseListener(actionId -> {
if (Window.COMMIT_ACTION_ID.equals(actionId)) {
String filterName = window.getFilterName();
filterEntity.setName(filterName);
filterEntity.setXml(filterParser.getXml(conditions, Param.ValueProperty.DEFAULT_VALUE));
saveFilterEntity();
initAdHocFilter();
initFilterSelectComponents();
updateWindowCaption();
//recreate layout to remove delete conditions buttons
initialConditions = conditions.toConditionsList();
fillConditionsLayout(ConditionsFocusType.NONE);
}
});
} else {
String xml = filterEntity.getFolder() == null ? filterParser.getXml(conditions, Param.ValueProperty.DEFAULT_VALUE)
: filterParser.getXml(conditions, Param.ValueProperty.VALUE);
filterEntity.setXml(xml);
saveFilterEntity();
}
}
@Override
public String getCaption() {
return getMainMessage("filter.save");
}
@Override
public String getIcon() {
return "icons/save.png";
}
}
protected class SaveAsAction extends AbstractAction {
protected SaveAsAction() {
super("saveAs");
}
@Override
public void actionPerform(Component component) {
WindowInfo windowInfo = windowConfig.getWindowInfo("saveFilter");
final SaveFilterWindow window = (SaveFilterWindow) windowManager.openWindow(windowInfo, WindowManager.OpenType.DIALOG);
window.addCloseListener(actionId -> {
if (Window.COMMIT_ACTION_ID.equals(actionId)) {
String filterName = window.getFilterName();
FilterEntity newFilterEntity = metadata.create(FilterEntity.class);
metadata.getTools().copy(filterEntity, newFilterEntity);
newFilterEntity.setCode(null);
newFilterEntity.setId(UuidProvider.createUuid());
//if filter was global but current user cannot create global filter then new filter
//will be connected with current user
if (newFilterEntity.getUser() == null && !uerCanEditGlobalFilter()) {
newFilterEntity.setUser(userSessionSource.getUserSession().getCurrentOrSubstitutedUser());
}
String xml = filterEntity.getFolder() == null ? filterParser.getXml(conditions, Param.ValueProperty.DEFAULT_VALUE)
: filterParser.getXml(conditions, Param.ValueProperty.VALUE);
filterEntity = newFilterEntity;
filterEntity.setName(filterName);
filterEntity.setXml(xml);
saveFilterEntity();
initFilterSelectComponents();
updateWindowCaption();
//recreate layout to remove delete conditions buttons
initialConditions = conditions.toConditionsList();
fillConditionsLayout(ConditionsFocusType.NONE);
}
});
}
@Override
public String getCaption() {
return getMainMessage("filter.saveAs");
}
}
protected class EditAction extends AbstractAction {
protected EditAction() {
super("edit");
}
@Override
public void actionPerform(Component component) {
WindowInfo windowInfo = windowConfig.getWindowInfo("filterEditor");
Map<String, Object> params = new HashMap<>();
params.put("filterEntity", filterEntity);
params.put("filter", filter);
params.put("conditions", conditions);
FilterEditor window = (FilterEditor) windowManager.openWindow(windowInfo, WindowManager.OpenType.DIALOG, params);
window.addCloseListener(actionId -> {
if (Window.COMMIT_ACTION_ID.equals(actionId)) {
conditions = window.getConditions();
filterEntity.setXml(filterParser.getXml(conditions, Param.ValueProperty.DEFAULT_VALUE));
if (filterEntity.getIsDefault()) {
resetDefaultFilters();
}
saveFilterEntity();
initAdHocFilter();
initFilterSelectComponents();
updateWindowCaption();
fillConditionsLayout(ConditionsFocusType.FIRST);
requestFocusToParamEditComponent();
updateFilterModifiedIndicator();
} else {
requestFocusToParamEditComponent();
}
});
}
@Override
public String getCaption() {
return getMainMessage("filter.edit");
}
@Override
public String getIcon() {
return "icons/edit.png";
}
}
protected class MakeDefaultAction extends AbstractAction {
public MakeDefaultAction() {
super("filter.makeDefault");
}
@Override
public void actionPerform(Component component) {
setDefaultFilter();
}
protected void setDefaultFilter() {
if (filterEntity != null) {
filterEntity.setIsDefault(true);
}
resetDefaultFilters();
initFilterSelectComponents();
setFilterActionsEnabled();
}
}
protected class RemoveAction extends AbstractAction {
public RemoveAction() {
super("remove");
}
@Override
public void actionPerform(Component component) {
if (filterEntity == adHocFilter) return;
windowManager.showOptionDialog(
getMainMessage("filter.removeDialogTitle"),
getMainMessage("filter.removeDialogMessage"),
Frame.MessageType.CONFIRMATION,
new Action[]{
new DialogAction(Type.YES) {
@Override
public void actionPerform(Component component) {
removeFilterEntity();
}
},
new DialogAction(Type.NO, Status.PRIMARY)
});
}
@Override
public String getCaption() {
return getMainMessage("filter.remove");
}
@Override
public String getIcon() {
return "icons/remove.png";
}
}
protected void removeFilterEntity() {
CommitContext ctx = new CommitContext(Collections.emptyList(), Collections.singletonList(filterEntity));
dataService.commit(ctx);
filterEntities.remove(filterEntity);
setFilterEntity(adHocFilter);
initFilterSelectComponents();
updateWindowCaption();
}
protected class PinAppliedAction extends AbstractAction {
public PinAppliedAction() {
super("pinApplied");
}
@Override
public void actionPerform(Component component) {
if (datasource instanceof CollectionDatasource.SupportsApplyToSelected) {
((CollectionDatasource.SupportsApplyToSelected) datasource).pinQuery();
addAppliedFilter();
}
}
@Override
public String getCaption() {
return getMainMessage("filter.pinApplied");
}
@Override
public String getIcon() {
return "icons/pin.png";
}
}
protected class SaveAsFolderAction extends AbstractAction {
public static final String SAVE_AS_APP_FOLDER = "saveAsAppFolder";
public static final String SAVE_AS_SEARCH_FOLDER = "saveAsSearchFolder";
protected boolean isAppFolder;
protected SaveAsFolderAction(boolean isAppFolder) {
super(isAppFolder ? SAVE_AS_APP_FOLDER : SAVE_AS_SEARCH_FOLDER);
this.isAppFolder = isAppFolder;
}
@Override
public String getCaption() {
return getMainMessage("filter." + getId());
}
@Override
public void actionPerform(Component component) {
saveAsFolder(isAppFolder);
}
}
protected class AddToSetAction extends ItemTrackingAction {
protected AddToSetAction(Table table) {
super(table, "filter.addToSet");
}
@Override
public String getCaption() {
return getMainMessage(getId());
}
@Override
public void actionPerform(Component component) {
Set<Entity> ownerSelection = target.getSelected();
if (!ownerSelection.isEmpty()) {
String entityType = target.getDatasource().getMetaClass().getName();
Map<String, Object> params = new HashMap<>();
params.put("entityType", entityType);
params.put("items", ownerSelection);
params.put("componentPath", ComponentsHelper.getFilterComponentPath(filter));
String[] strings = ValuePathHelper.parse(ComponentsHelper.getFilterComponentPath(filter));
String componentId = ValuePathHelper.format(Arrays.copyOfRange(strings, 1, strings.length));
params.put("componentId", componentId);
params.put("foldersPane", filterHelper.getFoldersPane());
params.put("entityClass", datasource.getMetaClass().getJavaClass().getName());
params.put("query", datasource.getQuery());
filter.getFrame().openWindow("saveSetInFolder",
WindowManager.OpenType.DIALOG,
params);
}
}
}
protected class RemoveFromSetAction extends ItemTrackingAction {
protected RemoveFromSetAction(Table table) {
super(table, "filter.removeFromCurSet");
}
@Override
public String getCaption() {
return getMainMessage(getId());
}
@Override
public void actionPerform(Component component) {
if (filterEntity == null) {
// todo add notification 'Filter not selected'
return;
}
Set selected = target.getSelected();
if (selected.isEmpty()) {
return;
}
if (target.getDatasource().getItemIds().size() == 1) {
filterHelper.removeFolderFromFoldersPane(filterEntity.getFolder());
removeFilterEntity();
Window window = ComponentsHelper.getWindow(filter);
windowManager.close(window);
} else {
String filterXml = filterEntity.getXml();
filterEntity.setXml(UserSetHelper.removeEntities(filterXml, selected));
filterEntity.getFolder().setFilterXml(filterEntity.getXml());
filterEntity.setFolder(saveFolder((filterEntity.getFolder())));
setFilterEntity(filterEntity);
}
}
}
protected class AddToCurrSetAction extends BaseAction {
protected AddToCurrSetAction() {
super("filter.addToCurSet");
}
@Override
public String getCaption() {
return getMainMessage(getId());
}
@Override
public void actionPerform(Component component) {
if (filterEntity == null) {
// todo add notification 'Filter not selected'
return;
}
Frame frame = filter.getFrame();
String[] strings = ValuePathHelper.parse(ComponentsHelper.getFilterComponentPath(filter));
String windowAlias = strings[0];
StringBuilder lookupAlias = new StringBuilder(windowAlias);
if (windowAlias.endsWith(Window.BROWSE_WINDOW_SUFFIX)) {
int index = lookupAlias.lastIndexOf(Window.BROWSE_WINDOW_SUFFIX);
lookupAlias.delete(index, lookupAlias.length());
lookupAlias.append(Window.LOOKUP_WINDOW_SUFFIX);
}
frame.openLookup(lookupAlias.toString(), new Window.Lookup.Handler() {
@Override
public void handleLookup(Collection items) {
String filterXml = filterEntity.getXml();
filterEntity.setXml(UserSetHelper.addEntities(filterXml, items));
filterEntity.getFolder().setFilterXml(filterEntity.getXml());
filterEntity.setFolder(saveFolder(filterEntity.getFolder()));
setFilterEntity(filterEntity);
}
}, WindowManager.OpenType.THIS_TAB);
}
}
protected static class AppliedFilterHolder {
public final AppliedFilter filter;
public final Component.Container layout;
public final Button button;
protected AppliedFilterHolder(AppliedFilter filter, Component.Container layout, Button button) {
this.filter = filter;
this.layout = layout;
this.button = button;
}
}
/**
* Class creates filter controls layout based on template.
* See template format in documentation for {@link ClientConfig#getGenericFilterControlsLayout()}
*/
protected class ControlsLayoutBuilder {
protected Map<String, List<String>> components = new LinkedHashMap<>();
protected Map<String, AbstractAction> filterActions = new HashMap<>();
public ControlsLayoutBuilder(String layoutDescription) {
initFilterActions();
parseLayoutDescription(layoutDescription);
}
protected void initFilterActions() {
filterActions.put("save", saveAction);
filterActions.put("save_as", saveAsAction);
filterActions.put("edit", editAction);
filterActions.put("remove", removeAction);
filterActions.put("pin", pinAppliedAction);
filterActions.put("save_search_folder", saveAsSearchFolderAction);
filterActions.put("save_app_folder", saveAsAppFolderAction);
filterActions.put("make_default", makeDefaultAction);
}
protected void parseLayoutDescription(String layoutDescription) {
Pattern panelComponentPattern = Pattern.compile("\\[(.*?)\\]");
Matcher matcher = panelComponentPattern.matcher(layoutDescription);
Splitter componentDescriptionSplitter = Splitter.on("|").trimResults();
Splitter optionsSplitter = Splitter.on(",").trimResults();
while (matcher.find()) {
String componentDescription = matcher.group(1);
Iterable<String> parts = componentDescriptionSplitter.split(componentDescription);
Iterator<String> iterator = parts.iterator();
String componentName = iterator.next();
String componentOptions = iterator.hasNext() ? iterator.next() : "";
Iterable<String> options = optionsSplitter.split(componentOptions);
components.put(componentName, Lists.newArrayList(options));
}
}
public void build() {
for (Map.Entry<String, List<String>> entry : components.entrySet()) {
Component component = getControlsLayoutComponent(entry.getKey(), entry.getValue());
if (component == null) {
log.warn("Filter controls layout component " + entry.getKey() + " not supported");
continue;
}
controlsLayout.add(component);
if (component == controlsLayoutGap) {
controlsLayout.expand(component);
}
}
}
@Nullable
protected Component getControlsLayoutComponent(String name, List<String> options) {
switch (name) {
case "filters_popup":
filtersPopupDisplayed = true;
return filtersPopupBox;
case "filters_lookup":
filtersLookupDisplayed = true;
return filtersLookup;
case "search":
searchBtn.setParent(null);
return searchBtn;
case "add_condition":
return addConditionBtn;
case "settings":
fillSettingsBtn(options);
return settingsBtn;
case "max_results":
maxResultsAddedToLayout = true;
return maxResultsLayout;
case "fts_switch":
return ftsSwitch;
case "spacer":
return controlsLayoutGap;
case "pin":
case "save":
case "save_as":
case "edit":
case "remove":
case "make_default":
case "save_search_folder":
case "save_app_folder":
return createActionBtn(name, options);
}
return null;
}
protected Button createActionBtn(String actionName, List<String> options) {
if (!isActionAllowed(actionName)) return null;
Button button = componentsFactory.createComponent(Button.class);
button.setAction(filterActions.get(actionName));
if (options.contains("no-caption")) {
button.setCaption(null);
button.setDescription(filterActions.get(actionName).getCaption());
}
if (options.contains("no-icon"))
button.setIcon(null);
return button;
}
protected void fillSettingsBtn(List<String> actionNames) {
for (String actionName : actionNames) {
AbstractAction action = filterActions.get(actionName);
if (action == null) {
log.warn("Action " + actionName + " cannot be added to settingsBtn");
continue;
}
if (isActionAllowed(actionName)) {
settingsBtn.addAction(action);
}
}
}
protected boolean isActionAllowed(String actionName) {
switch (actionName) {
case "pin":
return globalConfig.getAllowQueryFromSelected();
case "save_search_folder":
case "save_app_folder":
return folderActionsEnabled && filterHelper.isFolderActionsEnabled();
default:
return true;
}
}
}
}
|
modules/gui/src/com/haulmont/cuba/gui/components/filter/FilterDelegateImpl.java
|
/*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.haulmont.cuba.gui.components.filter;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.haulmont.bali.datastruct.Node;
import com.haulmont.bali.util.Dom4j;
import com.haulmont.bali.util.ParamsMap;
import com.haulmont.chile.core.datatypes.Datatypes;
import com.haulmont.chile.core.model.utils.InstanceUtils;
import com.haulmont.cuba.client.ClientConfig;
import com.haulmont.cuba.client.sys.PersistenceManagerClient;
import com.haulmont.cuba.core.app.DataService;
import com.haulmont.cuba.core.entity.AbstractSearchFolder;
import com.haulmont.cuba.core.entity.AppFolder;
import com.haulmont.cuba.core.entity.Entity;
import com.haulmont.cuba.core.global.*;
import com.haulmont.cuba.core.global.filter.DenyingClause;
import com.haulmont.cuba.core.global.filter.QueryFilter;
import com.haulmont.cuba.gui.ComponentsHelper;
import com.haulmont.cuba.gui.WindowManager;
import com.haulmont.cuba.gui.WindowManagerProvider;
import com.haulmont.cuba.gui.WindowParams;
import com.haulmont.cuba.gui.components.*;
import com.haulmont.cuba.gui.components.Action.Status;
import com.haulmont.cuba.gui.components.DialogAction.Type;
import com.haulmont.cuba.gui.components.KeyCombination.Key;
import com.haulmont.cuba.gui.components.actions.BaseAction;
import com.haulmont.cuba.gui.components.actions.ItemTrackingAction;
import com.haulmont.cuba.gui.components.filter.condition.AbstractCondition;
import com.haulmont.cuba.gui.components.filter.condition.CustomCondition;
import com.haulmont.cuba.gui.components.filter.edit.FilterEditor;
import com.haulmont.cuba.gui.components.filter.filterselect.FilterSelectWindow;
import com.haulmont.cuba.gui.config.WindowConfig;
import com.haulmont.cuba.gui.config.WindowInfo;
import com.haulmont.cuba.gui.data.CollectionDatasource;
import com.haulmont.cuba.gui.data.HierarchicalDatasource;
import com.haulmont.cuba.gui.presentations.Presentations;
import com.haulmont.cuba.gui.settings.SettingsImpl;
import com.haulmont.cuba.gui.theme.ThemeConstants;
import com.haulmont.cuba.gui.theme.ThemeConstantsManager;
import com.haulmont.cuba.gui.xml.layout.ComponentsFactory;
import com.haulmont.cuba.security.entity.FilterEntity;
import com.haulmont.cuba.security.entity.SearchFolder;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.BooleanUtils;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.dom4j.Attribute;
import org.dom4j.Element;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Scope;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*/
@org.springframework.stereotype.Component(FilterDelegate.NAME)
@Scope("prototype")
public class FilterDelegateImpl implements FilterDelegate {
protected static final String GLOBAL_FILTER_PERMISSION = "cuba.gui.filter.global";
protected static final String GLOBAL_APP_FOLDERS_PERMISSION = "cuba.gui.appFolder.global";
protected static final String FILTER_EDIT_PERMISSION = "cuba.gui.filter.edit";
protected static final String CONDITIONS_LOCATION_TOP = "top";
protected static final Logger log = LoggerFactory.getLogger(FilterDelegateImpl.class);
protected static final String MODIFIED_INDICATOR_SYMBOL = " *";
@Inject
protected ComponentsFactory componentsFactory;
@Inject
protected ThemeConstantsManager themeConstantsManager;
@Inject
protected Messages messages;
@Inject
protected WindowManagerProvider windowManagerProvider;
@Inject
protected Metadata metadata;
@Inject
protected WindowConfig windowConfig;
@Inject
protected UserSessionSource userSessionSource;
@Inject
protected Configuration configuration;
@Inject
protected Security security;
@Inject
protected FilterHelper filterHelper;
@Inject
protected FilterParser filterParser;
protected FtsFilterHelper ftsFilterHelper;
protected DataService dataService;
protected PersistenceManagerClient persistenceManager;
protected ClientConfig clientConfig;
protected GlobalConfig globalConfig;
protected FtsConfig ftsConfig;
protected AddConditionHelper addConditionHelper;
protected ThemeConstants theme;
protected WindowManager windowManager;
protected Filter filter;
protected FilterEntity adHocFilter;
protected ConditionsTree conditions = new ConditionsTree();
protected ConditionsTree prevConditions = new ConditionsTree();
protected List<AbstractCondition> initialConditions = new ArrayList<>();
protected FilterEntity filterEntity;
protected FilterEntity initialFilterEntity;
protected CollectionDatasource datasource;
protected QueryFilter dsQueryFilter;
protected List<FilterEntity> filterEntities = new ArrayList<>();
protected AppliedFilter lastAppliedFilter;
protected LinkedList<AppliedFilterHolder> appliedFilters = new LinkedList<>();
protected List<Filter.FilterEntityChangeListener> filterEntityChangeListeners = new ArrayList<>();
protected GroupBoxLayout groupBoxLayout;
protected BoxLayout layout;
protected PopupButton filtersPopupButton;
protected Component.Container conditionsLayout;
protected BoxLayout maxResultsLayout;
protected Field maxResultsField;
protected TextField maxResultsTextField;
protected LookupField maxResultsLookupField;
protected BoxLayout controlsLayout;
protected Component.Container appliedFiltersLayout;
protected PopupButton settingsBtn;
protected Component applyTo;
protected SaveAction saveAction;
protected TextField ftsSearchCriteriaField;
protected CheckBox ftsSwitch;
protected LinkButton addConditionBtn;
protected HBoxLayout filtersPopupBox;
protected Button searchBtn;
protected Component controlsLayoutGap;
protected Object paramEditComponentToFocus;
protected String caption;
protected int maxResults = -1;
protected boolean useMaxResults;
protected boolean textMaxResults;
protected Boolean manualApplyRequired;
protected boolean folderActionsEnabled = true;
protected boolean filtersLookupListenerEnabled = true;
protected boolean filtersPopupDisplayed = false;
protected boolean filtersLookupDisplayed = false;
protected boolean maxResultsAddedToLayout = false;
protected boolean editable = true;
protected FilterMode filterMode;
protected boolean filterSavingPossible = true;
protected Integer columnsCount;
protected String initialWindowCaption;
protected String conditionsLocation;
protected boolean filterActionsCreated = false;
protected boolean delayedFocus;
protected boolean modeSwitchVisible = true;
protected SaveAsAction saveAsAction;
protected EditAction editAction;
protected MakeDefaultAction makeDefaultAction;
protected RemoveAction removeAction;
protected PinAppliedAction pinAppliedAction;
protected SaveAsFolderAction saveAsAppFolderAction;
protected SaveAsFolderAction saveAsSearchFolderAction;
protected LookupField filtersLookup;
protected List<FDExpandedStateChangeListener> expandedStateChangeListeners;
protected enum ConditionsFocusType {
NONE,
FIRST,
LAST
}
@PostConstruct
public void init() {
theme = themeConstantsManager.getConstants();
windowManager = windowManagerProvider.get();
dataService = AppBeans.get(DataService.class);
persistenceManager = AppBeans.get(PersistenceManagerClient.class);
globalConfig = configuration.getConfig(GlobalConfig.class);
clientConfig = configuration.getConfig(ClientConfig.class);
ftsConfig = configuration.getConfig(FtsConfig.class);
if (AppBeans.containsBean(FtsFilterHelper.NAME)) {
ftsFilterHelper = AppBeans.get(FtsFilterHelper.class);
}
filterMode = FilterMode.GENERIC_MODE;
conditionsLocation = clientConfig.getGenericFilterConditionsLocation();
createLayout();
}
protected void createLayout() {
if (layout == null) {
groupBoxLayout = componentsFactory.createComponent(GroupBoxLayout.class);
groupBoxLayout.addExpandedStateChangeListener(e -> fireExpandStateChange());
groupBoxLayout.setOrientation(GroupBoxLayout.Orientation.VERTICAL);
groupBoxLayout.setStyleName("cuba-generic-filter");
groupBoxLayout.setWidth("100%");
layout = componentsFactory.createComponent(VBoxLayout.class);
layout.setWidth("100%");
groupBoxLayout.add(layout);
if (caption == null)
setCaption(getMainMessage("filter.groupBoxCaption"));
} else {
Collection<Component> components = layout.getComponents();
for (Component component : components) {
layout.remove(component);
}
}
layout.setSpacing(false);
appliedFiltersLayout = componentsFactory.createComponent(VBoxLayout.class);
conditionsLayout = componentsFactory.createComponent(HBoxLayout.class);
conditionsLayout.setWidth("100%");
conditionsLayout.setStyleName("filter-conditions");
if (filterMode == FilterMode.GENERIC_MODE) {
createControlsLayoutForGeneric();
} else {
createControlsLayoutForFts();
}
if (CONDITIONS_LOCATION_TOP.equals(conditionsLocation)) {
layout.add(conditionsLayout);
layout.add(controlsLayout);
} else {
layout.add(controlsLayout);
layout.add(conditionsLayout);
}
}
protected void createControlsLayoutForGeneric() {
controlsLayout = componentsFactory.createComponent(HBoxLayout.class);
controlsLayout.setSpacing(true);
controlsLayout.setWidth("100%");
filterHelper.setInternalDebugId(controlsLayout, "filterControlsLayout");
filtersPopupBox = componentsFactory.createComponent(HBoxLayout.class);
filtersPopupBox.setStyleName("filter-search-button-layout");
filterHelper.setInternalDebugId(filtersPopupBox, "filtersPopupBox");
searchBtn = componentsFactory.createComponent(Button.class);
filtersPopupBox.add(searchBtn);
searchBtn.setStyleName("filter-search-button");
searchBtn.setCaption(getMainMessage("filter.search"));
searchBtn.setIcon("icons/search.png");
searchBtn.setAction(new AbstractAction("search") {
@Override
public void actionPerform(Component component) {
apply(false);
}
});
filterHelper.setInternalDebugId(searchBtn, "searchBtn");
filtersPopupButton = componentsFactory.createComponent(PopupButton.class);
filterHelper.setInternalDebugId(filtersPopupButton, "filtersPopupButton");
filtersPopupBox.add(filtersPopupButton);
filtersLookup = componentsFactory.createComponent(LookupField.class);
filtersLookup.setWidth(theme.get("cuba.gui.filter.select.width"));
filtersLookup.addValueChangeListener(new FiltersLookupChangeListener());
filterHelper.setLookupNullSelectionAllowed(filtersLookup, false);
filterHelper.setInternalDebugId(filtersLookup, "filtersLookup");
addConditionBtn = componentsFactory.createComponent(LinkButton.class);
addConditionBtn.setAlignment(Component.Alignment.MIDDLE_LEFT);
addConditionBtn.setCaption(getMainMessage("filter.addCondition"));
addConditionBtn.setAction(new AbstractAction("openAddConditionDlg") {
@Override
public void actionPerform(Component component) {
addConditionHelper.addCondition(conditions);
}
});
filterHelper.setInternalDebugId(addConditionBtn, "addConditionBtn");
controlsLayoutGap = componentsFactory.createComponent(Label.class);
filterHelper.setInternalDebugId(controlsLayoutGap, "controlsLayoutGap");
controlsLayout.add(controlsLayoutGap);
controlsLayout.expand(controlsLayoutGap);
settingsBtn = componentsFactory.createComponent(PopupButton.class);
settingsBtn.setIcon("icons/gear.png");
filterHelper.setInternalDebugId(settingsBtn, "settingsBtn");
createFilterActions();
createMaxResultsLayout();
createFtsSwitch();
ftsSwitch.setAlignment(Component.Alignment.MIDDLE_RIGHT);
filterHelper.setInternalDebugId(ftsSwitch, "ftsSwitch");
String layoutDescription = clientConfig.getGenericFilterControlsLayout();
ControlsLayoutBuilder controlsLayoutBuilder = createControlsLayoutBuilder(layoutDescription);
controlsLayoutBuilder.build();
maxResultsLayout.setVisible(isMaxResultsLayoutVisible());
filterHelper.setInternalDebugId(maxResultsLayout, "maxResultsLayout");
}
protected void createControlsLayoutForFts() {
controlsLayout = componentsFactory.createComponent(HBoxLayout.class);
controlsLayout.setSpacing(true);
controlsLayout.setWidth("100%");
ftsSearchCriteriaField = componentsFactory.createComponent(TextField.class);
ftsSearchCriteriaField.setAlignment(Component.Alignment.MIDDLE_LEFT);
ftsSearchCriteriaField.setWidth(theme.get("cuba.gui.filter.ftsSearchCriteriaField.width"));
ftsSearchCriteriaField.setInputPrompt(getMainMessage("filter.enterSearchPhrase"));
paramEditComponentToFocus = ftsSearchCriteriaField;
filterHelper.addShortcutListener(ftsSearchCriteriaField, createFtsSearchShortcutListener());
controlsLayout.add(ftsSearchCriteriaField);
searchBtn = componentsFactory.createComponent(Button.class);
searchBtn.setCaption(getMainMessage("filter.search"));
searchBtn.setIcon("icons/search.png");
searchBtn.setAction(new AbstractAction("search") {
@Override
public void actionPerform(Component component) {
applyFts();
}
});
controlsLayout.add(searchBtn);
controlsLayoutGap = componentsFactory.createComponent(Label.class);
controlsLayout.add(controlsLayoutGap);
controlsLayout.expand(controlsLayoutGap);
createMaxResultsLayout();
if (isMaxResultsLayoutVisible()) {
controlsLayout.add(maxResultsLayout);
initMaxResults();
}
createFtsSwitch();
ftsSwitch.setAlignment(Component.Alignment.MIDDLE_RIGHT);
controlsLayout.add(ftsSwitch);
}
protected FilterHelper.ShortcutListener createFtsSearchShortcutListener() {
return new FilterHelper.ShortcutListener("ftsSearch", new KeyCombination(Key.ENTER)) {
@Override
public void handleShortcutPressed() {
applyFts();
}
};
}
protected void createFtsSwitch() {
ftsSwitch = componentsFactory.createComponent(CheckBox.class);
ftsSwitch.setCaption(getMainMessage("filter.ftsSwitch"));
ftsSwitch.setValue(filterMode == FilterMode.FTS_MODE);
ftsSwitch.addValueChangeListener(e -> {
filterMode = Boolean.TRUE.equals(e.getValue()) ? FilterMode.FTS_MODE : FilterMode.GENERIC_MODE;
switchFilterMode(filterMode);
});
ftsSwitch.setVisible(modeSwitchVisible);
}
@Override
public void switchFilterMode(FilterMode filterMode) {
if (filterMode == FilterMode.FTS_MODE && !isFtsModeEnabled()) {
log.warn("Unable to switch to the FTS filter mode. FTS mode is not supported for the " + datasource.getMetaClass().getName() + " entity");
return;
}
this.filterMode = filterMode;
if (filterMode == FilterMode.FTS_MODE) {
prevConditions = conditions;
((CollectionDatasource.SupportsApplyToSelected) datasource).unpinAllQuery();
appliedFilters.clear();
lastAppliedFilter = null;
}
conditions = (filterMode == FilterMode.GENERIC_MODE) ? prevConditions : new ConditionsTree();
createLayout();
// initMaxResults();
if (filterMode == FilterMode.GENERIC_MODE) {
fillConditionsLayout(ConditionsFocusType.FIRST);
addConditionBtn.setVisible(editable && userCanEditFilers());
setFilterActionsEnabled();
initFilterSelectComponents();
}
if (paramEditComponentToFocus != null)
requestFocusToParamEditComponent();
else if (filtersPopupDisplayed)
filtersPopupButton.requestFocus();
else if (filtersLookupDisplayed) {
filtersLookup.requestFocus();
}
updateWindowCaption();
}
protected void createMaxResultsLayout() {
maxResultsLayout = componentsFactory.createComponent(HBoxLayout.class);
maxResultsLayout.setSpacing(true);
maxResultsLayout.setAlignment(Component.Alignment.MIDDLE_RIGHT);
Label maxResultsLabel1 = componentsFactory.createComponent(Label.class);
maxResultsLabel1.setValue(messages.getMainMessage("filter.maxResults.label1"));
maxResultsLabel1.setAlignment(Component.Alignment.MIDDLE_RIGHT);
maxResultsLayout.add(maxResultsLabel1);
maxResultsTextField = componentsFactory.createComponent(TextField.class);
maxResultsTextField.setAlignment(Component.Alignment.MIDDLE_RIGHT);
maxResultsTextField.setMaxLength(4);
maxResultsTextField.setWidth(theme.get("cuba.gui.Filter.maxResults.width"));
maxResultsTextField.setDatatype(Datatypes.get("int"));
maxResultsLookupField = componentsFactory.createComponent(LookupField.class);
maxResultsLookupField.setAlignment(Component.Alignment.MIDDLE_RIGHT);
maxResultsLookupField.setWidth(theme.get("cuba.gui.Filter.maxResults.lookup.width"));
filterHelper.setLookupTextInputAllowed(maxResultsLookupField, false);
filterHelper.setLookupNullSelectionAllowed(maxResultsLookupField, false);
List<Integer> maxResultOptions = new ArrayList<>();
String maxResultOptionsStr = clientConfig.getGenericFilterMaxResultsOptions();
Iterable<String> split = Splitter.on(",").trimResults().split(maxResultOptionsStr);
for (String option : split) {
if ("NULL".equals(option)) {
filterHelper.setLookupNullSelectionAllowed(maxResultsLookupField, true);
}
else {
try {
Integer value = Integer.valueOf(option);
maxResultOptions.add(value);
} catch (NumberFormatException ignored) {}
}
}
maxResultsLookupField.setOptionsList(maxResultOptions);
maxResultsField = textMaxResults ? maxResultsTextField : maxResultsLookupField;
maxResultsLayout.add(maxResultsField);
}
/**
* Loads filter entities, finds default filter and applies it if found
*/
@Override
public void loadFiltersAndApplyDefault() {
initShortcutActions();
initAdHocFilter();
loadFilterEntities();
FilterEntity defaultFilter = getDefaultFilter(filterEntities);
initFilterSelectComponents();
if (defaultFilter == null) {
defaultFilter = adHocFilter;
}
try {
setFilterEntity(defaultFilter);
} catch (Exception e) {
log.error("Exception on loading default filter '" + defaultFilter.getName() + "'", e);
windowManager.showNotification(messages.formatMainMessage("filter.errorLoadingDefaultFilter", defaultFilter.getName()), Frame.NotificationType.ERROR);
defaultFilter = adHocFilter;
setFilterEntity(adHocFilter);
}
if (defaultFilter != adHocFilter) {
Window window = ComponentsHelper.getWindow(filter);
if (!WindowParams.DISABLE_AUTO_REFRESH.getBool(window.getContext())) {
if (getResultingManualApplyRequired()) {
if (BooleanUtils.isTrue(defaultFilter.getApplyDefault())) {
apply(true);
}
} else
apply(true);
if (filterEntity != null) {
window.setDescription(getFilterCaption(filterEntity));
} else
window.setDescription(null);
}
}
}
/**
* Sets filter entity, creates condition editor components and applies filter if necessary
*/
@Override
public void setFilterEntity(FilterEntity filterEntity) {
this.filterEntity = filterEntity;
conditions = filterParser.getConditions(filter, filterEntity.getXml());
initialConditions = conditions.toConditionsList();
for (AbstractCondition condition : conditions.toConditionsList()) {
condition.addListener(new AbstractCondition.Listener() {
@Override
public void captionChanged() {}
@Override
public void paramChanged(Param oldParam, Param newParam) {
updateFilterModifiedIndicator();
}
});
}
// If there are window parameters named as filter parameters, assign values to the corresponding
// filter params. Together with passing a filter code in 'filter' window parameter it allows to open an
// arbitrary filter with parameters regardless of a user defined default filter.
Window window = ComponentsHelper.getWindow(filter);
for (AbstractCondition condition : conditions.toConditionsList()) {
if (condition.getParam() != null) {
for (Map.Entry<String, Object> entry : window.getContext().getParams().entrySet()) {
if (entry.getKey().equals(condition.getParam().getName()))
condition.getParam().parseValue((String) entry.getValue());
}
}
}
saveInitialFilterState();
if (filtersLookupDisplayed) {
filtersLookupListenerEnabled = false;
filtersLookup.setValue(filterEntity);
filtersLookupListenerEnabled = true;
}
setFilterActionsEnabled();
setFilterActionsVisible();
fillConditionsLayout(ConditionsFocusType.FIRST);
if (delayedFocus) {
delayedFocus = false;
requestFocus();
} else {
requestFocusToParamEditComponent();
}
setConditionsLayoutVisible(true);
if (BooleanUtils.isTrue(filterEntity.getIsSet())
|| (filterEntity.getFolder() != null && BooleanUtils.isNotFalse(filterEntity.getApplyDefault()))) {
apply(true);
}
for (Filter.FilterEntityChangeListener listener : filterEntityChangeListeners) {
listener.filterEntityChanged(filterEntity);
}
updateWindowCaption();
}
/**
* Saves initial filter state. It is used for indicating of filter modifications
*/
protected void saveInitialFilterState() {
initialFilterEntity = metadata.create(FilterEntity.class);
initialFilterEntity.setName(filterEntity.getName());
initialFilterEntity.setCode(filterEntity.getCode());
initialFilterEntity.setUser(filterEntity.getUser());
initialFilterEntity.setXml(filterEntity.getXml());
}
/**
* Sets conditionsLayout visibility and shows/hides top border of controlsLayout
*/
protected void setConditionsLayoutVisible(boolean visible) {
conditionsLayout.setVisible(visible);
controlsLayout.setStyleName(getControlsLayoutStyleName());
}
protected void setFilterActionsEnabled() {
boolean isGlobal = filterEntity.getUser() == null;
boolean userCanEditGlobalFilter = uerCanEditGlobalFilter();
boolean userCanEditFilters = userCanEditFilers();
boolean filterEditable = isEditable();
boolean userCanEditGlobalAppFolder = userSessionSource.getUserSession().isSpecificPermitted(GLOBAL_APP_FOLDERS_PERMISSION);
boolean createdByCurrentUser = userSessionSource.getUserSession().getCurrentOrSubstitutedUser().equals(filterEntity.getUser());
boolean hasCode = !Strings.isNullOrEmpty(filterEntity.getCode());
boolean isFolder = filterEntity.getFolder() != null;
boolean isSearchFolder = isFolder && (filterEntity.getFolder() instanceof SearchFolder);
boolean isAppFolder = isFolder && (filterEntity.getFolder() instanceof AppFolder);
boolean isSet = BooleanUtils.isTrue(filterEntity.getIsSet());
boolean isDefault = BooleanUtils.isTrue(filterEntity.getIsDefault());
boolean isAdHocFilter = filterEntity == adHocFilter;
boolean editActionEnabled = !isSet && filterEditable && userCanEditFilters && (!isGlobal || userCanEditGlobalFilter);
filterSavingPossible = editActionEnabled &&
((isGlobal && userCanEditGlobalFilter) || (!isGlobal && createdByCurrentUser)) &&
((!isFolder && !hasCode) || isSearchFolder || (isAppFolder && userCanEditGlobalAppFolder));
boolean saveActionEnabled = filterSavingPossible && (isFolder || isFilterModified());
boolean saveAsActionEnabled = !isSet && filterEditable && userCanEditFilters;
boolean removeActionEnabled = !isSet &&
(!hasCode && !isFolder) &&
((isGlobal && userCanEditGlobalFilter) || (!isGlobal && createdByCurrentUser)) &&
!isAdHocFilter && filterEditable && userCanEditFilters;
boolean makeDefaultActionEnabled = !isDefault && !isFolder && !isSet && !isAdHocFilter && (!isGlobal || userCanEditGlobalFilter);
boolean pinAppliedActionEnabled = lastAppliedFilter != null
&& !(lastAppliedFilter.getFilterEntity() == adHocFilter && lastAppliedFilter.getConditions().getRoots().size() == 0);
boolean saveAsSearchFolderActionEnabled = folderActionsEnabled && !isFolder && !hasCode;
boolean saveAsAppFolderActionEnabled = folderActionsEnabled && !isFolder && !hasCode && userCanEditGlobalAppFolder;
saveAction.setEnabled(saveActionEnabled);
saveAsAction.setEnabled(saveAsActionEnabled);
editAction.setEnabled(editActionEnabled);
removeAction.setEnabled(removeActionEnabled);
makeDefaultAction.setEnabled(makeDefaultActionEnabled);
pinAppliedAction.setEnabled(pinAppliedActionEnabled);
saveAsSearchFolderAction.setEnabled(saveAsSearchFolderActionEnabled);
saveAsAppFolderAction.setEnabled(saveAsAppFolderActionEnabled);
if (filterHelper.isTableActionsEnabled()) {
fillTableActions();
}
}
protected void setFilterActionsVisible() {
saveAsSearchFolderAction.setVisible(folderActionsEnabled);
saveAsAppFolderAction.setVisible(folderActionsEnabled);
saveAction.setVisible(editable);
saveAsAction.setVisible(editable);
editAction.setVisible(editable);
removeAction.setVisible(editable);
}
protected void createFilterActions() {
saveAction = new SaveAction();
saveAsAction = new SaveAsAction();
editAction = new EditAction();
makeDefaultAction = new MakeDefaultAction();
removeAction = new RemoveAction();
pinAppliedAction = new PinAppliedAction();
saveAsAppFolderAction = new SaveAsFolderAction(true);
saveAsSearchFolderAction = new SaveAsFolderAction(false);
filterActionsCreated = true;
}
protected boolean uerCanEditGlobalFilter() {
return userSessionSource.getUserSession().isSpecificPermitted(GLOBAL_FILTER_PERMISSION);
}
protected boolean userCanEditFilers() {
return security.isSpecificPermitted(FILTER_EDIT_PERMISSION);
}
protected void saveFilterEntity() {
Boolean isDefault = filterEntity.getIsDefault();
Boolean applyDefault = filterEntity.getApplyDefault();
if (filterEntity.getFolder() == null) {
CommitContext ctx = new CommitContext(Collections.singletonList(filterEntity));
Set<Entity> result = dataService.commit(ctx);
FilterEntity savedFilterEntity = (FilterEntity) result.iterator().next();
filterEntities.remove(filterEntity);
filterEntity = savedFilterEntity;
filterEntities.add(filterEntity);
filterEntity.setApplyDefault(applyDefault);
filterEntity.setIsDefault(isDefault);
} else {
filterEntity.getFolder().setName(filterEntity.getName());
filterEntity.getFolder().setFilterXml(filterEntity.getXml());
AbstractSearchFolder folder = saveFolder(filterEntity.getFolder());
filterEntity.setFolder(folder);
}
saveInitialFilterState();
setFilterActionsEnabled();
updateFilterModifiedIndicator();
}
@Nullable
protected AbstractSearchFolder saveFolder(AbstractSearchFolder folder) {
return filterHelper.saveFolder(folder);
}
protected void saveAsFolder(boolean isAppFolder) {
final AbstractSearchFolder folder;
if (isAppFolder)
folder = (metadata.create(AppFolder.class));
else
folder = (metadata.create(SearchFolder.class));
if (filterEntity.getCode() == null) {
String folderName = filterEntity != adHocFilter ? filterEntity.getName() : "";
folder.setName(folderName);
folder.setTabName(folderName);
} else {
String name = messages.getMainMessage(filterEntity.getCode());
folder.setName(name);
folder.setTabName(name);
}
String newXml = filterParser.getXml(conditions, Param.ValueProperty.VALUE);
folder.setFilterComponentId(filterEntity.getComponentId());
folder.setFilterXml(newXml);
if (!isAppFolder) {
if (uerCanEditGlobalFilter())
((SearchFolder) folder).setUser(filterEntity.getUser());
else
((SearchFolder) folder).setUser(userSessionSource.getUserSession().getCurrentOrSubstitutedUser());
}
Presentations presentations;
if (applyTo != null && applyTo instanceof Component.HasPresentations) {
final Component.HasPresentations presentationsOwner = (Component.HasPresentations) applyTo;
presentations = presentationsOwner.isUsePresentations()
? presentationsOwner.getPresentations() : null;
} else {
presentations = null;
}
Runnable commitHandler;
if (isAppFolder) {
commitHandler = () -> {
AbstractSearchFolder savedFolder = saveFolder(folder);
filterEntity.setFolder(savedFolder);
};
} else {
commitHandler = () -> {
AbstractSearchFolder savedFolder = saveFolder(folder);
filterEntity.setFolder(savedFolder);
};
}
filterHelper.openFolderEditWindow(isAppFolder, folder, presentations, commitHandler);
}
/**
* Removes all components from conditionsLayout and fills it with components for editing filter conditions
*
* @param conditionsFocusType where to set focus (first condition, last condition, no focus)
*/
protected void fillConditionsLayout(ConditionsFocusType conditionsFocusType) {
layout.setSpacing(false);
for (Component component : conditionsLayout.getComponents()) {
conditionsLayout.remove(component);
}
paramEditComponentToFocus = null;
recursivelyCreateConditionsLayout(conditionsFocusType, false, conditions.getRootNodes(), conditionsLayout, 0);
if (!conditionsLayout.getComponents().isEmpty()) layout.setSpacing(true);
}
protected void recursivelyCreateConditionsLayout(ConditionsFocusType conditionsFocusType,
boolean initialFocusSet,
List<Node<AbstractCondition>> nodes,
Component.Container parentContainer,
int level) {
List<Node<AbstractCondition>> visibleConditionNodes = fetchVisibleNodes(nodes);
if (visibleConditionNodes.isEmpty()) {
if (level == 0)
controlsLayout.setStyleName("filter-control-no-border");
return;
}
//note that this is not grid columns count, but number of conditions (label cell + value cell) in one row
int conditionsCount = getColumnsCount();
int row = 0;
int nextColumnStart = 0;
GridLayout grid = componentsFactory.createComponent(GridLayout.class);
grid.setColumns(conditionsCount * 2);
//set expand ratio only for cells with param edit components
for (int i = 0; i < conditionsCount; i++) {
grid.setColumnExpandRatio(i * 2 + 1, 1);
}
grid.setRows(1);
grid.setSpacing(true);
grid.setWidth("100%");
ParamEditor firstParamEditor = null;
ParamEditor lastParamEditor = null;
boolean currentFocusSet = initialFocusSet;
for (int i = 0; i < visibleConditionNodes.size(); i++) {
Node<AbstractCondition> node = visibleConditionNodes.get(i);
final AbstractCondition condition = node.getData();
BoxLayout labelAndOperationCellContent = null;
Component paramEditComponentCellContent = null;
Component groupCellContent = null;
if (condition.isGroup()) {
groupCellContent = createGroupConditionBox(condition, node, conditionsFocusType, currentFocusSet, level);
level++;
} else {
if (condition.getParam().getJavaClass() != null) {
ParamEditor paramEditor = createParamEditor(condition);
if (firstParamEditor == null) firstParamEditor = paramEditor;
lastParamEditor = paramEditor;
currentFocusSet = true;
labelAndOperationCellContent = paramEditor.getLabelAndOperationLayout();
paramEditComponentCellContent = paramEditor.getParamEditComponentLayout();
} else {
BoxLayout paramLayout = componentsFactory.createComponent(HBoxLayout.class);
paramLayout.setSpacing(true);
paramLayout.setMargin(false);
labelAndOperationCellContent = paramLayout;
}
}
//groupBox for group conditions must occupy the whole line in conditions grid
Integer conditionWidth = condition.isGroup() ? (Integer)conditionsCount : condition.getWidth();
int nextColumnEnd = nextColumnStart + conditionWidth - 1;
if (nextColumnEnd >= conditionsCount) {
//complete current row in grid with gaps if next cell will be on next row
completeGridRowWithGaps(grid, row, nextColumnStart, false);
//place cell to next row in grid
nextColumnStart = 0;
nextColumnEnd = conditionWidth - 1;
row++;
grid.setRows(row + 1);
}
if (groupCellContent != null) {
grid.add(groupCellContent, nextColumnStart * 2, row, nextColumnEnd * 2 + 1, row);
}
if (labelAndOperationCellContent != null) {
grid.add(labelAndOperationCellContent, nextColumnStart * 2, row, nextColumnStart * 2, row);
if (nextColumnStart != 0)
labelAndOperationCellContent.setMargin(false, false, false, true);
}
if (paramEditComponentCellContent != null) {
paramEditComponentCellContent.setAlignment(Component.Alignment.MIDDLE_LEFT);
grid.add(paramEditComponentCellContent, nextColumnStart * 2 + 1, row, nextColumnEnd * 2 + 1, row);
}
nextColumnStart = nextColumnEnd + 1;
//add next row if necessary
if (i < visibleConditionNodes.size() - 1) {
if (nextColumnStart >= conditionsCount) {
nextColumnStart = 0;
row++;
grid.setRows(row + 1);
}
}
}
if (!initialFocusSet) {
switch (conditionsFocusType) {
case FIRST:
if (firstParamEditor != null)
paramEditComponentToFocus = firstParamEditor;
break;
case LAST:
if (lastParamEditor != null)
paramEditComponentToFocus = lastParamEditor;
}
}
//complete last row in grid with gaps
completeGridRowWithGaps(grid, row, nextColumnStart, true);
if (parentContainer != null) {
parentContainer.add(grid);
}
if (level == 0)
controlsLayout.setStyleName(getControlsLayoutStyleName());
}
protected List<Node<AbstractCondition>> fetchVisibleNodes(List<Node<AbstractCondition>> nodes) {
List<Node<AbstractCondition>> visibleConditionNodes = new ArrayList<>();
for (Node<AbstractCondition> node : nodes) {
AbstractCondition condition = node.getData();
if (!condition.getHidden())
visibleConditionNodes.add(node);
}
return visibleConditionNodes;
}
protected Component createGroupConditionBox(AbstractCondition condition, Node<AbstractCondition> node, ConditionsFocusType conditionsFocusType, boolean focusSet, int level) {
Component groupCellContent;
GroupBoxLayout groupBox = componentsFactory.createComponent(GroupBoxLayout.class);
groupBox.setWidth("100%");
groupBox.setCaption(condition.getLocCaption());
if (!node.getChildren().isEmpty()) {
recursivelyCreateConditionsLayout(conditionsFocusType, focusSet, node.getChildren(), groupBox, level);
}
groupCellContent = groupBox;
return groupCellContent;
}
protected ParamEditor createParamEditor(final AbstractCondition condition) {
boolean conditionRemoveEnabled = !initialConditions.contains(condition);
ParamEditor paramEditor = new ParamEditor(condition, conditionRemoveEnabled, isEditable() && userCanEditFilers());
AbstractAction removeConditionAction = new AbstractAction("") {
@Override
public void actionPerform(Component component) {
conditions.removeCondition(condition);
fillConditionsLayout(ConditionsFocusType.NONE);
updateFilterModifiedIndicator();
}
};
removeConditionAction.setVisible(conditionRemoveEnabled);
paramEditor.setRemoveButtonAction(removeConditionAction);
return paramEditor;
}
/**
* Adds empty containers to grid row. If not to complete the row with gaps then in case of grid with one element (element width = 1)
* this element will occupy 100% of grid width, but expected behaviour is to occupy 1/3 of grid width
*/
protected void completeGridRowWithGaps(GridLayout grid, int row, int startColumn, boolean lastRow) {
for (int i = startColumn * 2; i < grid.getColumns(); i++) {
Component gap = componentsFactory.createComponent(Label.class);
gap.setWidth("100%");
grid.add(gap, i, row);
}
}
protected String getControlsLayoutStyleName() {
String styleName = "filter-control-no-border";
if (conditionsLayout.isVisible() && !conditionsLayout.getComponents().isEmpty()) {
styleName = CONDITIONS_LOCATION_TOP.equals(conditionsLocation) ? "filter-control-with-top-border"
: "filter-control-with-bottom-border";
}
return styleName;
}
protected boolean isFilterModified() {
boolean filterPropertiesModified =
!Objects.equals(initialFilterEntity.getName(), filterEntity.getName()) ||
!Objects.equals(initialFilterEntity.getCode(), filterEntity.getCode()) ||
!Objects.equals(initialFilterEntity.getUser(), filterEntity.getUser());
if (filterPropertiesModified) return true;
String filterXml = filterEntity.getFolder() == null ? filterParser.getXml(conditions, Param.ValueProperty.DEFAULT_VALUE)
: filterParser.getXml(conditions, Param.ValueProperty.VALUE);
return !StringUtils.equals(filterXml, initialFilterEntity.getXml());
}
protected void updateFilterModifiedIndicator() {
boolean filterModified = isFilterModified();
saveAction.setEnabled(filterSavingPossible && filterModified);
String currentCaption = groupBoxLayout.getCaption();
if (filterModified && !currentCaption.endsWith(MODIFIED_INDICATOR_SYMBOL)) {
groupBoxLayout.setCaption(currentCaption + MODIFIED_INDICATOR_SYMBOL);
}
if (!filterModified && currentCaption.endsWith(MODIFIED_INDICATOR_SYMBOL)) {
groupBoxLayout.setCaption(currentCaption.substring(0, currentCaption.length() - 1));
}
}
/**
* Load filter entities from database and saves them in {@code filterEntities} collection.
*/
protected void loadFilterEntities() {
LoadContext<FilterEntity> ctx = LoadContext.create(FilterEntity.class);
ctx.setView("app");
ctx.setQueryString("select f from sec$Filter f left join f.user u " +
"where f.componentId = :component and (u.id = :userId or u is null) order by f.name")
.setParameter("component", ComponentsHelper.getFilterComponentPath(filter))
.setParameter("userId", userSessionSource.getUserSession().getCurrentOrSubstitutedUser().getId());
filterEntities = new ArrayList<>(dataService.loadList(ctx));
}
protected FilterEntity getDefaultFilter(List<FilterEntity> filters) {
Window window = ComponentsHelper.getWindow(filter);
// First check if there is parameter with name equal to this filter component id, containing a filter code to apply
Map<String, Object> params = filter.getFrame().getContext().getParams();
String code = (String) params.get(filter.getId());
if (!StringUtils.isBlank(code)) {
for (FilterEntity filter : filters) {
if (code.equals(filter.getCode()))
return filter;
}
}
// No 'filter' parameter found, load default filter
SettingsImpl settings = new SettingsImpl(window.getId());
String componentPath = ComponentsHelper.getFilterComponentPath(filter);
String[] strings = ValuePathHelper.parse(componentPath);
String name = ValuePathHelper.format((String[]) ArrayUtils.subarray(strings, 1, strings.length));
Element e = settings.get(name).element("defaultFilter");
if (e != null) {
String defIdStr = e.attributeValue("id");
Boolean applyDefault = Boolean.valueOf(e.attributeValue("applyDefault"));
if (!StringUtils.isBlank(defIdStr)) {
UUID defaultId = null;
try {
defaultId = UUID.fromString(defIdStr);
} catch (IllegalArgumentException ex) {
//
}
if (defaultId != null) {
for (FilterEntity filter : filters) {
if (defaultId.equals(filter.getId())) {
filter.setIsDefault(true);
filter.setApplyDefault(applyDefault);
return filter;
}
}
}
}
}
return null;
}
protected void initFiltersPopupButton() {
filtersPopupButton.removeAllActions();
addFiltersPopupActions();
}
protected void addFiltersPopupActions() {
addResetFilterAction(filtersPopupButton);
Collections.sort(
filterEntities,
new Comparator<FilterEntity>() {
@Override
public int compare(FilterEntity f1, FilterEntity f2) {
return getFilterCaption(f1).compareTo(getFilterCaption(f2));
}
}
);
Iterator<FilterEntity> it = filterEntities.iterator();
int addedEntitiesCount = 0;
while (it.hasNext() && addedEntitiesCount < clientConfig.getGenericFilterPopupListSize()) {
final FilterEntity fe = it.next();
addSetFilterEntityAction(filtersPopupButton, fe);
addedEntitiesCount++;
}
if (filterEntities.size() > clientConfig.getGenericFilterPopupListSize()) {
addShowMoreFilterEntitiesAction(filtersPopupButton);
}
}
protected void addSetFilterEntityAction(PopupButton popupButton, final FilterEntity fe) {
popupButton.addAction(new AbstractAction("setEntity" + fe.getId()) {
@Override
public void actionPerform(Component component) {
if (fe != filterEntity) {
setFilterEntity(fe);
}
}
@Override
public String getCaption() {
return getFilterCaption(fe);
}
});
}
protected void addShowMoreFilterEntitiesAction(PopupButton popupButton) {
popupButton.addAction(new AbstractAction("showMoreFilterEntities") {
@Override
public void actionPerform(Component component) {
WindowInfo windowInfo = windowConfig.getWindowInfo("filterSelect");
FilterSelectWindow window = (FilterSelectWindow) windowManager.openWindow(windowInfo,
WindowManager.OpenType.DIALOG,
ParamsMap.of("filterEntities", filterEntities));
window.addCloseListener(actionId -> {
if (Window.COMMIT_ACTION_ID.equals(actionId)) {
FilterEntity selectedEntity = window.getFilterEntity();
setFilterEntity(selectedEntity);
}
});
}
@Override
public String getCaption() {
return formatMainMessage("filter.showMore", filterEntities.size());
}
});
}
protected void addResetFilterAction(PopupButton popupButton) {
popupButton.addAction(new AbstractAction("resetFilter") {
@Override
public void actionPerform(Component component) {
conditions = new ConditionsTree();
setFilterEntity(adHocFilter);
}
@Override
public String getCaption() {
return getMainMessage("filter.resetFilter");
}
});
}
protected void initFiltersLookup() {
Map<Object, String> captionsMap = new LinkedHashMap<>();
for (FilterEntity entity : filterEntities) {
String caption = getFilterCaption(entity);
if (entity.getIsDefault()) {
caption += " " + getMainMessage("filter.default");
}
captionsMap.put(entity, caption);
}
captionsMap.put(adHocFilter, getFilterCaption(adHocFilter));
filtersLookupListenerEnabled = false;
//set null to remove previous value from lookup options list
filtersLookup.setValue(null);
List<Object> optionsList = new ArrayList<>();
optionsList.add(adHocFilter);
optionsList.addAll(filterEntities);
filtersLookup.setOptionsList(optionsList);
filterHelper.setLookupCaptions(filtersLookup, captionsMap);
filtersLookup.setValue(filterEntity);
filtersLookupListenerEnabled = true;
}
protected void initFilterSelectComponents() {
if (filtersPopupDisplayed) {
initFiltersPopupButton();
}
if (filtersLookupDisplayed) {
initFiltersLookup();
}
}
protected void initAdHocFilter() {
adHocFilter = metadata.create(FilterEntity.class);
String emptyXml = filterParser.getXml(new ConditionsTree(), Param.ValueProperty.VALUE);
adHocFilter.setXml(emptyXml);
adHocFilter.setComponentId(ComponentsHelper.getFilterComponentPath(filter));
adHocFilter.setUser(userSessionSource.getUserSession().getCurrentOrSubstitutedUser());
adHocFilter.setName(getMainMessage("filter.adHocFilter"));
}
protected void addAppliedFilter() {
if (lastAppliedFilter == null)
return;
if (!appliedFilters.isEmpty() && appliedFilters.getLast().filter.equals(lastAppliedFilter))
return;
this.layout.add(appliedFiltersLayout, CONDITIONS_LOCATION_TOP.equals(conditionsLocation) ? 0 : 1);
BoxLayout layout = componentsFactory.createComponent(HBoxLayout.class);
layout.setSpacing(true);
if (!appliedFilters.isEmpty()) {
AppliedFilterHolder holder = appliedFilters.getLast();
holder.layout.remove(holder.button);
}
Label label = componentsFactory.createComponent(Label.class);
label.setValue(lastAppliedFilter.getText());
layout.add(label);
label.setAlignment(Component.Alignment.MIDDLE_LEFT);
LinkButton button = componentsFactory.createComponent(LinkButton.class);
button.setIcon("icons/item-remove.png");
button.setAction(new AbstractAction("") {
@Override
public void actionPerform(Component component) {
removeAppliedFilter();
}
});
layout.add(button);
addAppliedFilterLayoutHook(layout);
appliedFiltersLayout.add(layout);
appliedFilters.add(new AppliedFilterHolder(lastAppliedFilter, layout, button));
}
protected void addAppliedFilterLayoutHook(Component.Container layout) {
//nothing
}
protected void removeAppliedFilter() {
if (!appliedFilters.isEmpty()) {
if (appliedFilters.size() == 1) {
AppliedFilterHolder holder = appliedFilters.removeLast();
appliedFiltersLayout.remove(holder.layout);
((CollectionDatasource.SupportsApplyToSelected) datasource).unpinAllQuery();
this.layout.remove(appliedFiltersLayout);
} else {
windowManager.showOptionDialog(messages.getMainMessage("removeApplied.title"),
messages.getMainMessage("removeApplied.message"), Frame.MessageType.WARNING,
new Action[]{
new DialogAction(Type.YES) {
@Override
public void actionPerform(Component component) {
for (AppliedFilterHolder holder : appliedFilters) {
appliedFiltersLayout.remove(holder.layout);
FilterDelegateImpl.this.layout.remove(appliedFiltersLayout);
}
appliedFilters.clear();
((CollectionDatasource.SupportsApplyToSelected) datasource).unpinAllQuery();
}
},
new DialogAction(Type.NO, Status.PRIMARY)
});
}
}
}
protected String getFilterCaption(FilterEntity filterEntity) {
String name;
if (filterEntity != null) {
if (filterEntity.getCode() == null)
name = InstanceUtils.getInstanceName(filterEntity);
else {
name = messages.getMainMessage(filterEntity.getCode());
}
AbstractSearchFolder folder = filterEntity.getFolder();
if (folder != null) {
if (!StringUtils.isBlank(folder.getTabName()))
name = messages.getMainMessage(folder.getTabName());
else if (!StringUtils.isBlank(folder.getName())) {
name = messages.getMainMessage(folder.getName());
}
if (BooleanUtils.isTrue(filterEntity.getIsSet()))
name = getMainMessage("filter.setPrefix") + " " + name;
else
name = getMainMessage("filter.folderPrefix") + " " + name;
}
} else
name = "";
return name;
}
protected String formatMainMessage(String key, Object... params) {
return messages.formatMainMessage(key, params);
}
protected String getMainMessage(String key) {
return messages.getMainMessage(key);
}
@Override
public Component.Container getLayout() {
return groupBoxLayout;
}
@Override
public void setDatasource(CollectionDatasource datasource) {
this.datasource = datasource;
this.dsQueryFilter = datasource.getQueryFilter();
if (getResultingManualApplyRequired()) {
// set initial denying condition to get empty datasource before explicit filter applying
QueryFilter queryFilter = new QueryFilter(new DenyingClause(), datasource.getMetaClass().getName());
if (dsQueryFilter != null) {
queryFilter = QueryFilter.merge(dsQueryFilter, queryFilter);
}
datasource.setQueryFilter(queryFilter);
}
if (datasource instanceof CollectionDatasource.Lazy || datasource instanceof HierarchicalDatasource) {
setUseMaxResults(false);
} else if (useMaxResults) {
initMaxResults();
}
if (!isFtsModeEnabled()) {
controlsLayout.remove(ftsSwitch);
}
}
@Override
public CollectionDatasource getDatasource() {
return datasource;
}
protected void initMaxResults() {
int maxResults;
if (this.maxResults != -1) {
maxResults = this.maxResults;
} else {
maxResults = datasource.getMaxResults();
}
if (maxResults == 0 || maxResults == persistenceManager.getMaxFetchUI(datasource.getMetaClass().getName())) {
maxResults = persistenceManager.getFetchUI(datasource.getMetaClass().getName());
}
if (maxResultsAddedToLayout) {
if (!textMaxResults) {
List<Integer> optionsList = ((LookupField) maxResultsField).getOptionsList();
if (!optionsList.contains(maxResults)) {
ArrayList<Integer> newOptions = new ArrayList<>(optionsList);
newOptions.add(maxResults);
Collections.sort(newOptions);
((LookupField) maxResultsField).setOptionsList(newOptions);
}
}
maxResultsField.setValue(maxResults);
}
datasource.setMaxResults(maxResults);
}
protected boolean isFtsModeEnabled() {
return ftsConfig.getEnabled()
&& ftsFilterHelper != null
&& datasource != null
&& ftsFilterHelper.isEntityIndexed(datasource.getMetaClass().getName());
}
@Override
public int getMaxResults() {
return maxResults;
}
@Override
public void setMaxResults(int maxResults) {
this.maxResults = maxResults;
initMaxResults();
}
@Override
public void setUseMaxResults(boolean useMaxResults) {
this.useMaxResults = useMaxResults;
if (maxResultsLayout != null)
maxResultsLayout.setVisible(isMaxResultsLayoutVisible());
}
protected boolean isMaxResultsLayoutVisible() {
return useMaxResults && security.isSpecificPermitted("cuba.gui.filter.maxResults") && maxResultsAddedToLayout;
}
@Override
public boolean getUseMaxResults() {
return useMaxResults;
}
@Override
public void setTextMaxResults(boolean textMaxResults) {
boolean valueChanged = this.textMaxResults != textMaxResults;
this.textMaxResults = textMaxResults;
if (maxResultsAddedToLayout && valueChanged) {
maxResultsLayout.remove(maxResultsField);
maxResultsField = textMaxResults ? maxResultsTextField : maxResultsLookupField;
maxResultsLayout.add(maxResultsField);
}
}
@Override
public boolean getTextMaxResults() {
return textMaxResults;
}
@Override
public boolean apply(boolean isNewWindow) {
if (clientConfig.getGenericFilterChecking()) {
if (filterEntity != null && conditions.getRoots().size() > 0) {
boolean haveCorrectCondition = hasCorrectCondition();
if (!haveCorrectCondition) {
if (!isNewWindow) {
windowManager.showNotification(messages.getMainMessage("filter.emptyConditions"),
Frame.NotificationType.HUMANIZED);
}
return false;
}
}
}
if (filterEntity != null) {
lastAppliedFilter = new AppliedFilter(filterEntity, conditions);
} else {
lastAppliedFilter = null;
}
if (filterEntity != null) {
boolean haveRequiredConditions = haveFilledRequiredConditions();
if (!haveRequiredConditions) {
if (!isNewWindow) {
windowManager.showNotification(messages.getMainMessage("filter.emptyRequiredConditions"),
Frame.NotificationType.HUMANIZED);
}
return false;
}
setFilterActionsEnabled();
}
applyDatasourceFilter();
initDatasourceMaxResults();
refreshDatasource();
if ((applyTo != null) && (Table.class.isAssignableFrom(applyTo.getClass()))) {
filterHelper.removeTableFtsTooltips((Table) applyTo);
}
return true;
}
protected void applyFts() {
if (ftsFilterHelper == null)
return;
String searchTerm = ftsSearchCriteriaField.getValue();
if (Strings.isNullOrEmpty(searchTerm) && clientConfig.getGenericFilterChecking()) {
windowManager.showNotification(getMainMessage("filter.fillSearchCondition"), Frame.NotificationType.TRAY);
return;
}
Map<String, Object> params = new HashMap<>();
if (!Strings.isNullOrEmpty(searchTerm)) {
FtsFilterHelper.FtsSearchResult ftsSearchResult = ftsFilterHelper.search(searchTerm, datasource.getMetaClass().getName());
int queryKey = ftsSearchResult.getQueryKey();
params.put("sessionId", userSessionSource.getUserSession().getId());
params.put("queryKey", queryKey);
CustomCondition ftsCondition = ftsFilterHelper.createFtsCondition(queryKey);
conditions = new ConditionsTree();
conditions.getRootNodes().add(new Node<>(ftsCondition));
if ((applyTo != null) && (Table.class.isAssignableFrom(applyTo.getClass()))) {
filterHelper.initTableFtsTooltips((Table) applyTo, ftsSearchResult.getHitInfos());
}
} else if ((applyTo != null) && (Table.class.isAssignableFrom(applyTo.getClass()))) {
filterHelper.initTableFtsTooltips((Table) applyTo, Collections.<UUID, String>emptyMap());
}
applyDatasourceFilter();
initDatasourceMaxResults();
datasource.refresh(params);
}
/**
* The method is invoked before search. It sets datasource {@code maxResults} value based on the maxResults
* field (if visible) or on maxFetchUI value.
* Method also resets the datasource {@code firstResult} value
*/
protected void initDatasourceMaxResults() {
if (datasource == null) {
throw new DevelopmentException("Filter datasource is not set");
}
if (this.maxResults != -1) {
datasource.setMaxResults(maxResults);
} else if (maxResultsAddedToLayout && useMaxResults) {
Integer maxResults = maxResultsField.getValue();
if (maxResults != null && maxResults > 0) {
datasource.setMaxResults(maxResults);
} else {
datasource.setMaxResults(persistenceManager.getMaxFetchUI(datasource.getMetaClass().getName()));
}
}
if (datasource instanceof CollectionDatasource.SupportsPaging) {
((CollectionDatasource.SupportsPaging) datasource).setFirstResult(0);
}
}
protected void applyDatasourceFilter() {
if (datasource != null) {
String currentFilterXml = filterParser.getXml(conditions, Param.ValueProperty.VALUE);
if (!Strings.isNullOrEmpty(currentFilterXml)) {
Element element = Dom4j.readDocument(currentFilterXml).getRootElement();
QueryFilter queryFilter = new QueryFilter(element, datasource.getMetaClass().getName());
if (dsQueryFilter != null) {
queryFilter = QueryFilter.merge(dsQueryFilter, queryFilter);
}
datasource.setQueryFilter(queryFilter);
} else {
datasource.setQueryFilter(dsQueryFilter);
}
} else {
log.warn("Unable to apply datasource filter with null datasource");
}
}
protected boolean haveFilledRequiredConditions() {
for (AbstractCondition condition : conditions.toConditionsList()) {
if ((condition.getRequired())
&& (condition.getParam() != null)
&& (condition.getParam().getValue() == null)) {
return false;
}
}
return true;
}
protected boolean hasCorrectCondition() {
boolean haveCorrectCondition = false;
for (AbstractCondition condition : conditions.toConditionsList()) {
if ((condition.getParam() != null) && (condition.getParam().getValue() != null)
|| condition instanceof CustomCondition && condition.getHidden()) {
haveCorrectCondition = true;
break;
}
}
return haveCorrectCondition;
}
/**
* extenders should be able to modify the datasource
* before it will be refreshed
*/
protected void refreshDatasource() {
if (datasource instanceof CollectionDatasource.Suspendable)
((CollectionDatasource.Suspendable) datasource).refreshIfNotSuspended();
else
datasource.refresh();
}
@Override
public String getCaption() {
return caption;
}
@Override
public void setCaption(String caption) {
this.caption = caption;
groupBoxLayout.setCaption(caption);
}
@Override
public void setManualApplyRequired(Boolean manualApplyRequired) {
this.manualApplyRequired = manualApplyRequired;
}
@Override
public Boolean getManualApplyRequired() {
return manualApplyRequired;
}
protected boolean getResultingManualApplyRequired() {
return manualApplyRequired != null ? manualApplyRequired : clientConfig.getGenericFilterManualApplyRequired();
}
@Override
public Component getOwnComponent(String id) {
for (AbstractCondition condition : conditions.toConditionsList()) {
if (condition.getParam() != null) {
String paramName = condition.getParam().getName();
String componentName = paramName.substring(paramName.lastIndexOf('.') + 1);
if (id.equals(componentName)) {
ParamWrapper wrapper = new ParamWrapper(condition, condition.getParam());
return wrapper;
}
}
}
return null;
}
@Override
@Nullable
public Component getComponent(String id) {
String[] elements = ValuePathHelper.parse(id);
if (elements.length == 1) {
return getOwnComponent(id);
} else {
throw new UnsupportedOperationException("Filter contains only one level of subcomponents");
}
}
@Override
public void applySettings(Element element) {
Element groupBoxExpandedEl = element.element("groupBoxExpanded");
if (groupBoxExpandedEl != null) {
Boolean expanded = Boolean.valueOf(groupBoxExpandedEl.getText());
groupBoxLayout.setExpanded(expanded);
}
}
@Override
public boolean saveSettings(Element element) {
Boolean changed = false;
Element e = element.element("defaultFilter");
if (e == null)
e = element.addElement("defaultFilter");
UUID defaultId = null;
Boolean applyDefault = false;
for (FilterEntity filter : filterEntities) {
if (BooleanUtils.isTrue(filter.getIsDefault())) {
defaultId = filter.getId();
applyDefault = filter.getApplyDefault();
break;
}
}
String newDef = defaultId != null ? defaultId.toString() : null;
Attribute attr = e.attribute("id");
String oldDef = attr != null ? attr.getValue() : null;
if (!ObjectUtils.equals(oldDef, newDef)) {
if (newDef == null && attr != null) {
e.remove(attr);
} else {
if (attr == null)
e.addAttribute("id", newDef);
else
attr.setValue(newDef);
}
changed = true;
}
Boolean newApplyDef = BooleanUtils.isTrue(applyDefault);
Attribute applyDefaultAttr = e.attribute("applyDefault");
Boolean oldApplyDef = applyDefaultAttr != null ? Boolean.valueOf(applyDefaultAttr.getValue()) : false;
if (!ObjectUtils.equals(oldApplyDef, newApplyDef)) {
if (applyDefaultAttr != null) {
applyDefaultAttr.setValue(newApplyDef.toString());
} else {
e.addAttribute("applyDefault", newApplyDef.toString());
}
changed = true;
}
Element groupBoxExpandedEl = element.element("groupBoxExpanded");
if (groupBoxExpandedEl == null)
groupBoxExpandedEl = element.addElement("groupBoxExpanded");
Boolean oldGroupBoxExpandedValue = Boolean.valueOf(groupBoxExpandedEl.getText());
Boolean newGroupBoxExpandedValue = groupBoxLayout.isExpanded();
if (!ObjectUtils.equals(oldGroupBoxExpandedValue, newGroupBoxExpandedValue)) {
groupBoxExpandedEl.setText(newGroupBoxExpandedValue.toString());
changed = true;
}
return changed;
}
@Override
public Component getApplyTo() {
return applyTo;
}
@Override
public void setApplyTo(Component applyTo) {
this.applyTo = applyTo;
}
@Override
public void setFolderActionsEnabled(boolean folderActionsEnabled) {
this.folderActionsEnabled = folderActionsEnabled;
}
@Override
public boolean isFolderActionsEnabled() {
return folderActionsEnabled;
}
/**
* Adds actions of 'Entities Set' functionality to Table component
*/
protected void fillTableActions() {
Table table;
if ((applyTo != null) && (Table.class.isAssignableFrom(applyTo.getClass()))) {
table = (Table) applyTo;
} else {
return;
}
ButtonsPanel buttons = table.getButtonsPanel();
if (buttons == null) {
return; // in lookup windows, there is no button panel
}
com.haulmont.cuba.gui.components.Button addToSetBtn = (Button) buttons.getComponent("addToSetBtn");
com.haulmont.cuba.gui.components.Button addToCurSetBtn = (Button) buttons.getComponent("addToCurSetBtn");
com.haulmont.cuba.gui.components.Button removeFromCurSetBtn = (Button) buttons.getComponent("removeFromCurSetBtn");
Action addToSet = table.getAction("filter.addToSet");
Action addToCurrSet = table.getAction("filter.addToCurSet");
Action removeFromCurrSet = table.getAction("filter.removeFromCurSet");
if (addToSet != null)
table.removeAction(addToSet);
if (addToSetBtn != null)
addToSetBtn.setVisible(false);
if (addToCurrSet != null) {
table.removeAction(addToCurrSet);
}
if (addToCurSetBtn != null) {
addToCurSetBtn.setVisible(false);
}
if (removeFromCurrSet != null) {
table.removeAction(removeFromCurrSet);
}
if (removeFromCurSetBtn != null) {
removeFromCurSetBtn.setVisible(false);
}
if ((filterEntity != null) && (BooleanUtils.isTrue(filterEntity.getIsSet()))) {
addToCurrSet = new AddToCurrSetAction();
if (addToCurSetBtn == null) {
addToCurSetBtn = componentsFactory.createComponent(Button.class);
addToCurSetBtn.setId("addToCurSetBtn");
addToCurSetBtn.setCaption(getMainMessage("filter.addToCurSet"));
buttons.add(addToCurSetBtn);
} else {
addToCurSetBtn.setVisible(true);
}
if (StringUtils.isEmpty(addToCurSetBtn.getIcon())) {
addToCurSetBtn.setIcon("icons/join-to-set.png");
}
addToCurSetBtn.setAction(addToCurrSet);
table.addAction(addToCurrSet);
removeFromCurrSet = new RemoveFromSetAction(table);
if (removeFromCurSetBtn == null) {
removeFromCurSetBtn = componentsFactory.createComponent(Button.class);
removeFromCurSetBtn.setId("removeFromCurSetBtn");
removeFromCurSetBtn.setCaption(getMainMessage("filter.removeFromCurSet"));
buttons.add(removeFromCurSetBtn);
} else {
removeFromCurSetBtn.setVisible(true);
}
if (StringUtils.isEmpty(removeFromCurSetBtn.getIcon())) {
removeFromCurSetBtn.setIcon("icons/delete-from-set.png");
}
removeFromCurSetBtn.setAction(removeFromCurrSet);
table.addAction(removeFromCurrSet);
} else {
addToSet = new AddToSetAction(table);
if (addToSetBtn == null) {
addToSetBtn = componentsFactory.createComponent(Button.class);
addToSetBtn.setId("addToSetBtn");
addToSetBtn.setCaption(getMainMessage("filter.addToSet"));
buttons.add(addToSetBtn);
} else {
addToSetBtn.setVisible(true);
}
if (StringUtils.isEmpty(addToSetBtn.getIcon())) {
addToSetBtn.setIcon("icons/insert-to-set.png");
}
addToSetBtn.setAction(addToSet);
table.addAction(addToSet);
}
}
@Override
public void setEditable(boolean editable) {
this.editable = editable;
addConditionBtn.setVisible(editable && userCanEditFilers());
//do not process actions if method is invoked from filter loader
if (filterActionsCreated && filterEntity != null) {
setFilterActionsEnabled();
setFilterActionsVisible();
}
}
@Override
public boolean isEditable() {
return editable;
}
@Override
public Object getParamValue(String paramName) {
Component component = getOwnComponent(paramName);
if (component instanceof Component.HasValue) {
return ((Component.HasValue) component).getValue();
}
return null;
}
@Override
public void setParamValue(String paramName, Object value) {
Component component = getOwnComponent(paramName);
if (component instanceof Component.HasValue) {
((Component.HasValue) component).setValue(value);
}
}
@Override
public void addFilterEntityChangeListener(Filter.FilterEntityChangeListener listener) {
filterEntityChangeListeners.add(listener);
}
@Override
public List<Filter.FilterEntityChangeListener> getFilterEntityChangeListeners() {
return filterEntityChangeListeners;
}
@Override
public Integer getColumnsCount() {
return columnsCount != null ? columnsCount : clientConfig.getGenericFilterColumnsCount();
}
@Override
public void setColumnsCount(int columnsCount) {
this.columnsCount = columnsCount;
}
@Override
public boolean isExpanded() {
return groupBoxLayout.isExpanded();
}
@Override
public void setExpanded(boolean expanded) {
groupBoxLayout.setExpanded(expanded);
}
@Override
public boolean isCollapsable() {
return groupBoxLayout.isCollapsable();
}
@Override
public void setCollapsable(boolean collapsable) {
groupBoxLayout.setCollapsable(collapsable);
}
@Override
public void setModeSwitchVisible(boolean modeSwitchVisible) {
this.modeSwitchVisible = modeSwitchVisible;
ftsSwitch.setVisible(modeSwitchVisible && isFtsModeEnabled());
}
@Override
public void requestFocus() {
if (filterEntity == null) {
delayedFocus = true;
return;
}
if (paramEditComponentToFocus != null) {
requestFocusToParamEditComponent();
} else if (filtersLookupDisplayed) {
filtersLookup.requestFocus();
} else if (filtersPopupDisplayed) {
filtersPopupButton.requestFocus();
}
}
protected void requestFocusToParamEditComponent() {
if (paramEditComponentToFocus instanceof ParamEditor) {
((ParamEditor) paramEditComponentToFocus).requestFocus();
} else if (paramEditComponentToFocus instanceof TextField) {
((TextField) paramEditComponentToFocus).requestFocus();
}
}
@Override
public void addExpandedStateChangeListener(FDExpandedStateChangeListener listener) {
if (expandedStateChangeListeners == null) {
expandedStateChangeListeners = new ArrayList<>();
}
if (!expandedStateChangeListeners.contains(listener)) {
expandedStateChangeListeners.add(listener);
}
}
@Override
public void removeExpandedStateChangeListener(FDExpandedStateChangeListener listener) {
if (expandedStateChangeListeners != null) {
expandedStateChangeListeners.remove(listener);
}
}
protected void fireExpandStateChange() {
if (expandedStateChangeListeners != null) {
FDExpandedStateChangeEvent event = new FDExpandedStateChangeEvent(this, isExpanded());
for (FDExpandedStateChangeListener listener : expandedStateChangeListeners) {
listener.expandedStateChanged(event);
}
}
}
@Override
public void setFilter(Filter filter) {
this.filter = filter;
addConditionHelper = new AddConditionHelper(filter, new AddConditionHelper.Handler() {
@Override
public void handle(AbstractCondition condition) {
try {
addCondition(condition);
} catch (Exception e) {
conditions.removeCondition(condition);
throw e;
}
}
});
}
protected void addCondition(AbstractCondition condition) {
conditions.getRootNodes().add(new Node<>(condition));
fillConditionsLayout(ConditionsFocusType.LAST);
requestFocusToParamEditComponent();
updateFilterModifiedIndicator();
condition.addListener(new AbstractCondition.Listener() {
@Override
public void captionChanged() {
}
@Override
public void paramChanged(Param oldParam, Param newParam) {
updateFilterModifiedIndicator();
}
});
}
protected void initShortcutActions() {
if (filter.getFrame().getAction(Filter.APPLY_ACTION_ID) == null) {
filter.getFrame().addAction(new AbstractAction(Filter.APPLY_ACTION_ID, clientConfig.getFilterApplyShortcut()) {
@Override
public void actionPerform(Component component) {
if (isVisible() && datasource != null) {
if (filterMode == FilterMode.GENERIC_MODE) {
apply(false);
} else {
applyFts();
}
}
}
});
}
if (filter.getFrame().getAction(Filter.SELECT_ACTION_ID) == null) {
filter.getFrame().addAction(new AbstractAction(Filter.SELECT_ACTION_ID, clientConfig.getFilterSelectShortcut()) {
@Override
public void actionPerform(Component component) {
if (isVisible() && datasource != null && filtersPopupButton.isEnabled()) {
filtersPopupButton.setPopupVisible(true);
}
}
});
}
}
protected void updateWindowCaption() {
Window window = ComponentsHelper.getWindow(filter);
String filterTitle;
if (filterMode == FilterMode.GENERIC_MODE && filterEntity != null && filterEntity != adHocFilter) {
filterTitle = getFilterCaption(filterEntity);
} else {
filterTitle = null;
}
window.setDescription(filterTitle);
if (initialWindowCaption == null) {
initialWindowCaption = window.getCaption();
}
windowManager.setWindowCaption(window, initialWindowCaption, filterTitle);
groupBoxLayout.setCaption(Strings.isNullOrEmpty(filterTitle) ? caption : caption + ": " + filterTitle);
}
protected ControlsLayoutBuilder createControlsLayoutBuilder(String layoutDescription) {
return new ControlsLayoutBuilder(layoutDescription);
}
/**
* Method sets default = false to all filters except the current one
*/
protected void resetDefaultFilters() {
for (FilterEntity filter : filterEntities) {
if (!ObjectUtils.equals(filter, filterEntity)) {
if (BooleanUtils.isTrue(filter.getIsDefault())) {
filter.setIsDefault(false);
}
}
}
}
protected class FiltersLookupChangeListener implements Component.ValueChangeListener {
public FiltersLookupChangeListener() {
}
@Override
public void valueChanged(Component.ValueChangeEvent e) {
if (!filtersLookupListenerEnabled) return;
if (e.getValue() instanceof FilterEntity) {
setFilterEntity((FilterEntity) e.getValue());
}
}
}
protected class SaveAction extends AbstractAction {
public SaveAction() {
super("save");
}
@Override
public void actionPerform(Component component) {
if (PersistenceHelper.isNew(filterEntity) && filterEntity.getFolder() == null) {
WindowInfo windowInfo = windowConfig.getWindowInfo("saveFilter");
Map<String, Object> params = new HashMap<>();
if (!getMainMessage("filter.adHocFilter").equals(filterEntity.getName())) {
params.put("filterName", filterEntity.getName());
}
final SaveFilterWindow window = (SaveFilterWindow) windowManager.openWindow(windowInfo, WindowManager.OpenType.DIALOG, params);
window.addCloseListener(actionId -> {
if (Window.COMMIT_ACTION_ID.equals(actionId)) {
String filterName = window.getFilterName();
filterEntity.setName(filterName);
filterEntity.setXml(filterParser.getXml(conditions, Param.ValueProperty.DEFAULT_VALUE));
saveFilterEntity();
initAdHocFilter();
initFilterSelectComponents();
updateWindowCaption();
//recreate layout to remove delete conditions buttons
initialConditions = conditions.toConditionsList();
fillConditionsLayout(ConditionsFocusType.NONE);
}
});
} else {
String xml = filterEntity.getFolder() == null ? filterParser.getXml(conditions, Param.ValueProperty.DEFAULT_VALUE)
: filterParser.getXml(conditions, Param.ValueProperty.VALUE);
filterEntity.setXml(xml);
saveFilterEntity();
}
}
@Override
public String getCaption() {
return getMainMessage("filter.save");
}
@Override
public String getIcon() {
return "icons/save.png";
}
}
protected class SaveAsAction extends AbstractAction {
protected SaveAsAction() {
super("saveAs");
}
@Override
public void actionPerform(Component component) {
WindowInfo windowInfo = windowConfig.getWindowInfo("saveFilter");
final SaveFilterWindow window = (SaveFilterWindow) windowManager.openWindow(windowInfo, WindowManager.OpenType.DIALOG);
window.addCloseListener(actionId -> {
if (Window.COMMIT_ACTION_ID.equals(actionId)) {
String filterName = window.getFilterName();
FilterEntity newFilterEntity = metadata.create(FilterEntity.class);
metadata.getTools().copy(filterEntity, newFilterEntity);
newFilterEntity.setCode(null);
newFilterEntity.setId(UuidProvider.createUuid());
//if filter was global but current user cannot create global filter then new filter
//will be connected with current user
if (newFilterEntity.getUser() == null && !uerCanEditGlobalFilter()) {
newFilterEntity.setUser(userSessionSource.getUserSession().getCurrentOrSubstitutedUser());
}
String xml = filterEntity.getFolder() == null ? filterParser.getXml(conditions, Param.ValueProperty.DEFAULT_VALUE)
: filterParser.getXml(conditions, Param.ValueProperty.VALUE);
filterEntity = newFilterEntity;
filterEntity.setName(filterName);
filterEntity.setXml(xml);
saveFilterEntity();
initFilterSelectComponents();
updateWindowCaption();
//recreate layout to remove delete conditions buttons
initialConditions = conditions.toConditionsList();
fillConditionsLayout(ConditionsFocusType.NONE);
}
});
}
@Override
public String getCaption() {
return getMainMessage("filter.saveAs");
}
}
protected class EditAction extends AbstractAction {
protected EditAction() {
super("edit");
}
@Override
public void actionPerform(Component component) {
WindowInfo windowInfo = windowConfig.getWindowInfo("filterEditor");
Map<String, Object> params = new HashMap<>();
params.put("filterEntity", filterEntity);
params.put("filter", filter);
params.put("conditions", conditions);
FilterEditor window = (FilterEditor) windowManager.openWindow(windowInfo, WindowManager.OpenType.DIALOG, params);
window.addCloseListener(actionId -> {
if (Window.COMMIT_ACTION_ID.equals(actionId)) {
conditions = window.getConditions();
filterEntity.setXml(filterParser.getXml(conditions, Param.ValueProperty.DEFAULT_VALUE));
if (filterEntity.getIsDefault()) {
resetDefaultFilters();
}
saveFilterEntity();
initAdHocFilter();
initFilterSelectComponents();
updateWindowCaption();
fillConditionsLayout(ConditionsFocusType.FIRST);
requestFocusToParamEditComponent();
updateFilterModifiedIndicator();
} else {
requestFocusToParamEditComponent();
}
});
}
@Override
public String getCaption() {
return getMainMessage("filter.edit");
}
@Override
public String getIcon() {
return "icons/edit.png";
}
}
protected class MakeDefaultAction extends AbstractAction {
public MakeDefaultAction() {
super("filter.makeDefault");
}
@Override
public void actionPerform(Component component) {
setDefaultFilter();
}
protected void setDefaultFilter() {
if (filterEntity != null) {
filterEntity.setIsDefault(true);
}
resetDefaultFilters();
initFilterSelectComponents();
setFilterActionsEnabled();
}
}
protected class RemoveAction extends AbstractAction {
public RemoveAction() {
super("remove");
}
@Override
public void actionPerform(Component component) {
if (filterEntity == adHocFilter) return;
windowManager.showOptionDialog(
getMainMessage("filter.removeDialogTitle"),
getMainMessage("filter.removeDialogMessage"),
Frame.MessageType.CONFIRMATION,
new Action[]{
new DialogAction(Type.YES) {
@Override
public void actionPerform(Component component) {
removeFilterEntity();
}
},
new DialogAction(Type.NO, Status.PRIMARY)
});
}
@Override
public String getCaption() {
return getMainMessage("filter.remove");
}
@Override
public String getIcon() {
return "icons/remove.png";
}
}
protected void removeFilterEntity() {
CommitContext ctx = new CommitContext(Collections.emptyList(), Collections.singletonList(filterEntity));
dataService.commit(ctx);
filterEntities.remove(filterEntity);
setFilterEntity(adHocFilter);
initFilterSelectComponents();
updateWindowCaption();
}
protected class PinAppliedAction extends AbstractAction {
public PinAppliedAction() {
super("pinApplied");
}
@Override
public void actionPerform(Component component) {
if (datasource instanceof CollectionDatasource.SupportsApplyToSelected) {
((CollectionDatasource.SupportsApplyToSelected) datasource).pinQuery();
addAppliedFilter();
}
}
@Override
public String getCaption() {
return getMainMessage("filter.pinApplied");
}
@Override
public String getIcon() {
return "icons/pin.png";
}
}
protected class SaveAsFolderAction extends AbstractAction {
public static final String SAVE_AS_APP_FOLDER = "saveAsAppFolder";
public static final String SAVE_AS_SEARCH_FOLDER = "saveAsSearchFolder";
protected boolean isAppFolder;
protected SaveAsFolderAction(boolean isAppFolder) {
super(isAppFolder ? SAVE_AS_APP_FOLDER : SAVE_AS_SEARCH_FOLDER);
this.isAppFolder = isAppFolder;
}
@Override
public String getCaption() {
return getMainMessage("filter." + getId());
}
@Override
public void actionPerform(Component component) {
saveAsFolder(isAppFolder);
}
}
protected class AddToSetAction extends ItemTrackingAction {
protected AddToSetAction(Table table) {
super(table, "filter.addToSet");
}
@Override
public String getCaption() {
return getMainMessage(getId());
}
@Override
public void actionPerform(Component component) {
Set<Entity> ownerSelection = target.getSelected();
if (!ownerSelection.isEmpty()) {
String entityType = target.getDatasource().getMetaClass().getName();
Map<String, Object> params = new HashMap<>();
params.put("entityType", entityType);
params.put("items", ownerSelection);
params.put("componentPath", ComponentsHelper.getFilterComponentPath(filter));
String[] strings = ValuePathHelper.parse(ComponentsHelper.getFilterComponentPath(filter));
String componentId = ValuePathHelper.format(Arrays.copyOfRange(strings, 1, strings.length));
params.put("componentId", componentId);
params.put("foldersPane", filterHelper.getFoldersPane());
params.put("entityClass", datasource.getMetaClass().getJavaClass().getName());
params.put("query", datasource.getQuery());
filter.getFrame().openWindow("saveSetInFolder",
WindowManager.OpenType.DIALOG,
params);
}
}
}
protected class RemoveFromSetAction extends ItemTrackingAction {
protected RemoveFromSetAction(Table table) {
super(table, "filter.removeFromCurSet");
}
@Override
public String getCaption() {
return getMainMessage(getId());
}
@Override
public void actionPerform(Component component) {
if (filterEntity == null) {
// todo add notification 'Filter not selected'
return;
}
Set selected = target.getSelected();
if (selected.isEmpty()) {
return;
}
if (target.getDatasource().getItemIds().size() == 1) {
filterHelper.removeFolderFromFoldersPane(filterEntity.getFolder());
removeFilterEntity();
Window window = ComponentsHelper.getWindow(filter);
windowManager.close(window);
} else {
String filterXml = filterEntity.getXml();
filterEntity.setXml(UserSetHelper.removeEntities(filterXml, selected));
filterEntity.getFolder().setFilterXml(filterEntity.getXml());
filterEntity.setFolder(saveFolder((filterEntity.getFolder())));
setFilterEntity(filterEntity);
}
}
}
protected class AddToCurrSetAction extends BaseAction {
protected AddToCurrSetAction() {
super("filter.addToCurSet");
}
@Override
public String getCaption() {
return getMainMessage(getId());
}
@Override
public void actionPerform(Component component) {
if (filterEntity == null) {
// todo add notification 'Filter not selected'
return;
}
Frame frame = filter.getFrame();
String[] strings = ValuePathHelper.parse(ComponentsHelper.getFilterComponentPath(filter));
String windowAlias = strings[0];
StringBuilder lookupAlias = new StringBuilder(windowAlias);
if (windowAlias.endsWith(Window.BROWSE_WINDOW_SUFFIX)) {
int index = lookupAlias.lastIndexOf(Window.BROWSE_WINDOW_SUFFIX);
lookupAlias.delete(index, lookupAlias.length());
lookupAlias.append(Window.LOOKUP_WINDOW_SUFFIX);
}
frame.openLookup(lookupAlias.toString(), new Window.Lookup.Handler() {
@Override
public void handleLookup(Collection items) {
String filterXml = filterEntity.getXml();
filterEntity.setXml(UserSetHelper.addEntities(filterXml, items));
filterEntity.getFolder().setFilterXml(filterEntity.getXml());
filterEntity.setFolder(saveFolder(filterEntity.getFolder()));
setFilterEntity(filterEntity);
}
}, WindowManager.OpenType.THIS_TAB);
}
}
protected static class AppliedFilterHolder {
public final AppliedFilter filter;
public final Component.Container layout;
public final Button button;
protected AppliedFilterHolder(AppliedFilter filter, Component.Container layout, Button button) {
this.filter = filter;
this.layout = layout;
this.button = button;
}
}
/**
* Class creates filter controls layout based on template.
* See template format in documentation for {@link ClientConfig#getGenericFilterControlsLayout()}
*/
protected class ControlsLayoutBuilder {
protected Map<String, List<String>> components = new LinkedHashMap<>();
protected Map<String, AbstractAction> filterActions = new HashMap<>();
public ControlsLayoutBuilder(String layoutDescription) {
initFilterActions();
parseLayoutDescription(layoutDescription);
}
protected void initFilterActions() {
filterActions.put("save", saveAction);
filterActions.put("save_as", saveAsAction);
filterActions.put("edit", editAction);
filterActions.put("remove", removeAction);
filterActions.put("pin", pinAppliedAction);
filterActions.put("save_search_folder", saveAsSearchFolderAction);
filterActions.put("save_app_folder", saveAsAppFolderAction);
filterActions.put("make_default", makeDefaultAction);
}
protected void parseLayoutDescription(String layoutDescription) {
Pattern panelComponentPattern = Pattern.compile("\\[(.*?)\\]");
Matcher matcher = panelComponentPattern.matcher(layoutDescription);
Splitter componentDescriptionSplitter = Splitter.on("|").trimResults();
Splitter optionsSplitter = Splitter.on(",").trimResults();
while (matcher.find()) {
String componentDescription = matcher.group(1);
Iterable<String> parts = componentDescriptionSplitter.split(componentDescription);
Iterator<String> iterator = parts.iterator();
String componentName = iterator.next();
String componentOptions = iterator.hasNext() ? iterator.next() : "";
Iterable<String> options = optionsSplitter.split(componentOptions);
components.put(componentName, Lists.newArrayList(options));
}
}
public void build() {
for (Map.Entry<String, List<String>> entry : components.entrySet()) {
Component component = getControlsLayoutComponent(entry.getKey(), entry.getValue());
if (component == null) {
log.warn("Filter controls layout component " + entry.getKey() + " not supported");
continue;
}
controlsLayout.add(component);
if (component == controlsLayoutGap) {
controlsLayout.expand(component);
}
}
}
@Nullable
protected Component getControlsLayoutComponent(String name, List<String> options) {
switch (name) {
case "filters_popup":
filtersPopupDisplayed = true;
return filtersPopupBox;
case "filters_lookup":
filtersLookupDisplayed = true;
return filtersLookup;
case "search":
searchBtn.setParent(null);
return searchBtn;
case "add_condition":
return addConditionBtn;
case "settings":
fillSettingsBtn(options);
return settingsBtn;
case "max_results":
maxResultsAddedToLayout = true;
return maxResultsLayout;
case "fts_switch":
return ftsSwitch;
case "spacer":
return controlsLayoutGap;
case "pin":
case "save":
case "save_as":
case "edit":
case "remove":
case "make_default":
case "save_search_folder":
case "save_app_folder":
return createActionBtn(name, options);
}
return null;
}
protected Button createActionBtn(String actionName, List<String> options) {
if (!isActionAllowed(actionName)) return null;
Button button = componentsFactory.createComponent(Button.class);
button.setAction(filterActions.get(actionName));
if (options.contains("no-caption")) {
button.setCaption(null);
button.setDescription(filterActions.get(actionName).getCaption());
}
if (options.contains("no-icon"))
button.setIcon(null);
return button;
}
protected void fillSettingsBtn(List<String> actionNames) {
for (String actionName : actionNames) {
AbstractAction action = filterActions.get(actionName);
if (action == null) {
log.warn("Action " + actionName + " cannot be added to settingsBtn");
continue;
}
if (isActionAllowed(actionName)) {
settingsBtn.addAction(action);
}
}
}
protected boolean isActionAllowed(String actionName) {
switch (actionName) {
case "pin":
return globalConfig.getAllowQueryFromSelected();
case "save_search_folder":
case "save_app_folder":
return folderActionsEnabled && filterHelper.isFolderActionsEnabled();
default:
return true;
}
}
}
}
|
PL-6306 Add cuba-id for settings button of Filter
|
modules/gui/src/com/haulmont/cuba/gui/components/filter/FilterDelegateImpl.java
|
PL-6306 Add cuba-id for settings button of Filter
|
|
Java
|
apache-2.0
|
38cc45d9ae1dcc25eb9a0c3b2a0bae274551cdee
| 0
|
michaelknigge/pclbox
|
package mk.pclbox;
import java.nio.charset.Charset;
import java.util.Arrays;
/**
* A {@link TextCommand} contains text that has to be printed by the printer. The text
* is handled as a byte[] and not a string because decoding of the byte[] depends on
* the text parsing method that (maybe) has been set by a "Text Parsing Method" PCL command
* in the data stream.
*/
public final class TextCommand extends PrinterCommand {
private static final Charset ISO_8859_1 = Charset.forName("iso-8859-1");
final byte[] text;
/**
* Constructor of the {@link TextCommand}.
*
* @param offset - position within the data stream
* @param text - the text
*/
public TextCommand(long offset, final byte[] text) {
super(offset);
this.text = text.clone();
}
/**
* Gets the text that has to be printed by a printer. Note that the text may be encoded
* as a multi-byte text, depending on a "Text Parsing Method" that may be present in
* the PCL printer data stream.
*
* @return the text as a byte array.
*/
public byte[] getText() {
return this.text.clone();
}
@Override
void accept(PrinterCommandVisitor visitor) {
visitor.handle(this);
}
@Override
public int hashCode() {
return Arrays.hashCode(this.getText()) ^ this.getOffsetHash();
}
@Override
public boolean equals(final Object other) {
if (other instanceof TextCommand) {
final TextCommand o = (TextCommand) other;
return Arrays.equals(o.getText(), this.getText()) && o.getOffset() == this.getOffset();
} else {
return false;
}
}
@Override
public String toString() {
return new String(this.getText(), ISO_8859_1);
}
}
|
pclbox/src/main/java/mk/pclbox/TextCommand.java
|
package mk.pclbox;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
/**
* A {@link TextCommand} contains text that has to be printed by the printer. The text
* is handled as a byte[] and not a string because decoding of the byte[] depends on
* the text parsing method that (maybe) has been set by a "Text Parsing Method" PCL command
* in the data stream.
*/
public final class TextCommand extends PrinterCommand {
final byte[] text;
/**
* Constructor of the {@link TextCommand}.
*
* @param offset - position within the data stream
* @param text - the text
*/
public TextCommand(long offset, final byte[] text) {
super(offset);
this.text = text.clone();
}
/**
* Gets the text that has to be printed by a printer. Note that the text may be encoded
* as a multi-byte text, depending on a "Text Parsing Method" that may be present in
* the PCL printer data stream.
*
* @return the text as a byte array.
*/
public byte[] getText() {
return this.text.clone();
}
@Override
void accept(PrinterCommandVisitor visitor) {
visitor.handle(this);
}
@Override
public int hashCode() {
return this.getText().hashCode() ^ this.getOffsetHash();
}
@Override
public boolean equals(final Object other) {
if (other instanceof TextCommand) {
final TextCommand o = (TextCommand) other;
return Arrays.equals(o.getText(), this.getText()) && o.getOffset() == this.getOffset();
} else {
return false;
}
}
@Override
public String toString() {
try {
return new String(this.getText(), "ISO-8859-1") + "@" + this.getOffset();
} catch (final UnsupportedEncodingException e) {
// Well.... this should really never get invoked...
return Arrays.toString(this.getText()) + "@" + this.getOffset();
}
}
}
|
Fixed findbugs violations
|
pclbox/src/main/java/mk/pclbox/TextCommand.java
|
Fixed findbugs violations
|
|
Java
|
mit
|
b919a02d3f9edae33558bf6b08c0814c42ad40cd
| 0
|
treeleafj/xMax
|
package org.treeleafj.xmax.bean;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
/**
* 泛型工具
*/
public class GenericUtils {
private static Logger log = LoggerFactory.getLogger(GenericUtils.class);
/**
* 获取当前类的泛型类型
*
* @param obj
* @return
*/
public static Class getGenericFirst(Object obj) {
Type type = obj.getClass().getGenericSuperclass();
ParameterizedType pt = (ParameterizedType) type;
Class classz = ((Class) pt.getActualTypeArguments()[0]);
return classz;
}
/**
* 获取当前类的多个泛型类型中的第index个
*
* @param obj
* @param index
* @return
*/
public static Class getGeneric(Object obj, int index) {
Type type = obj.getClass().getGenericSuperclass();
ParameterizedType pt = (ParameterizedType) type;
Class classz = ((Class) pt.getActualTypeArguments()[index]);
return classz;
}
}
|
xMax-common/src/main/java/org/treeleafj/xmax/bean/GenericUtils.java
|
package org.treeleafj.xmax.bean;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
/**
* 泛型工具
*/
public class GenericUtils {
private static Logger log = LoggerFactory.getLogger(GenericUtils.class);
/**
* 获取当前类的泛型类型
*
* @param obj
* @return
*/
public static Class getGenericFirst(Object obj) {
Type type = (Type) obj.getClass().getGenericSuperclass();
ParameterizedType pt = (ParameterizedType) type;
Class classz = ((Class) pt.getActualTypeArguments()[0]);
return classz;
}
/**
* 获取当前类的多个泛型类型中的第index个
*
* @param obj
* @param index
* @return
*/
public static Class getGeneric(Object obj, int index) {
Type type = (Type) obj.getClass().getGenericSuperclass();
ParameterizedType pt = (ParameterizedType) type;
log.info("pt:" + pt);
Class classz = ((Class) pt.getActualTypeArguments()[index]);
return classz;
}
}
|
去掉日志打印
|
xMax-common/src/main/java/org/treeleafj/xmax/bean/GenericUtils.java
|
去掉日志打印
|
|
Java
|
mit
|
68bf6a41568b7bd89e9ee80f7ba9ab0662165b2b
| 0
|
drumonii/LeagueTrollBuild,drumonii/LeagueTrollBuild,drumonii/LeagueTrollBuild,drumonii/LeagueTrollBuild
|
package com.drumonii.loltrollbuild.repository;
import com.drumonii.loltrollbuild.model.GameMap;
import com.drumonii.loltrollbuild.model.Item;
import org.springframework.cache.annotation.CacheConfig;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.data.rest.core.annotation.RestResource;
import java.util.List;
/**
* JPA repository to the ITEM table.
*/
@CacheConfig(cacheNames = "items")
public interface ItemsRepository extends JpaRepository<Item, Integer> {
/**
* Gets a {@link List} of upgraded {@link Item} boots from Boots of Speed found only on the specified
* {@link GameMap}.
*
* @param mapId the {@link GameMap}'s ID
* @return a {@link List} of upgraded {@link Item} boots
* @see <a href="http://leagueoflegends.wikia.com/wiki/Boots_of_Speed">Boots of Speed</a>
* @see <a href="http://leagueoflegends.wikia.com/wiki/Advanced_item">Advanced Items</a>
*/
@Query("select i from Item i join i.from f left join i.maps m " +
"where i.id <> 1001 and f in ('1001') " +
"and (key(m) <> :mapId and m = false) " +
"group by i.id")
@RestResource(exported = false)
@Cacheable(key = "{#root.methodName, #mapId}")
List<Item> boots(@Param("mapId") int mapId);
/**
* Gets a {@link List} of basic Trinket {@link Item}s (non Advanced) found only on the specified {@link GameMap}.
*
* @param mapId the {@link GameMap}'s ID
* @return a {@link List} of basic Trinket {@link Item}s
* @see <a href="http://leagueoflegends.wikia.com/wiki/Trinket">Trinket</a>
*/
@Query("select i from Item i left join i.maps m " +
"where (i.name like '%Trinket%' or i.description like '%Trinket%') " +
"and i.gold.total = 0 and i.gold.purchasable = true " +
"and (key(m) <> :mapId and m = false) " +
"group by i.id")
@RestResource(exported = false)
@Cacheable(key = "{#root.methodName, #mapId}")
List<Item> trinkets(@Param("mapId") int mapId);
/**
* Gets a {@link List} of Viktor only starting {@link Item}s.
*
* @return a {@link List} of Viktor only {@link Item}s
* @see <a href="http://leagueoflegends.wikia.com/wiki/Viktor">Viktor</a>
*/
@Query("select i from Item i where i.requiredChampion = 'Viktor'")
@RestResource(exported = false)
@Cacheable(key = "#root.methodName")
List<Item> viktorOnly();
/**
* Gets a {@link List} of {@link Item}s eligible for the troll build. That is, all purchasable (excluding items like
* Muramana or Seraph's Embrace - they are non purchasable), non-consumable, and fully upgraded items found only on
* the specified {@link GameMap}. This excludes boots, potions, Trinkets, items not requiring a particular champion,
* jungle related items, Doran's items, and Quick Charge items.
*
* @param mapId the {@link GameMap}'s ID
* @return a {@link List} of {@link Item}s eligible for the troll build
*/
@Query("select i from Item i left join i.into i_into left join i.maps m " +
"where i.name is not null and i.description is not null " +
"and i.gold.purchasable = true and i.consumed is null and (i.group is null or i.group <> 'FlaskGroup') " +
"and i_into is null and not exists (select m2 from i.maps m2 where key(m2) = :mapId and m2 = false)" +
"and i.id <> 1001 and i.description not like '%Movement%' " +
"and (i.name not like '%Potion%' and i.description not like '%Potion%') " +
"and (i.name not like '%Trinket%' and i.description not like '%Trinket%') " +
"and i.requiredChampion is null " +
"and i.name not like 'Enchantment%' and i.name not like 'Doran%' and i.name not like '%(Quick Charge)' " +
"group by i.id")
@RestResource(exported = false)
@Cacheable(key = "{#root.methodName, #mapId}")
List<Item> forTrollBuild(@Param("mapId") int mapId);
@Cacheable
@Override
List<Item> findAll();
@CacheEvict(allEntries = true)
@Override
<S extends Item> List<S> save(Iterable<S> entities);
@CacheEvict(allEntries = true)
@Override
<S extends Item> S save(S entity);
@Cacheable
@Override
Item findOne(Integer integer);
@CacheEvict(allEntries = true)
@Override
void delete(Integer integer);
@CacheEvict(allEntries = true)
@Override
void delete(Iterable<? extends Item> entities);
@CacheEvict(allEntries = true)
@Override
void deleteAll();
@Cacheable(key = "{#example.probe, #pageable}")
@Override
<S extends Item> Page<S> findAll(Example<S> example, Pageable pageable);
}
|
src/main/java/com/drumonii/loltrollbuild/repository/ItemsRepository.java
|
package com.drumonii.loltrollbuild.repository;
import com.drumonii.loltrollbuild.model.GameMap;
import com.drumonii.loltrollbuild.model.Item;
import org.springframework.cache.annotation.CacheConfig;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.data.domain.Example;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.data.rest.core.annotation.RestResource;
import java.util.List;
/**
* JPA repository to the ITEM table.
*/
@CacheConfig(cacheNames = "items")
public interface ItemsRepository extends JpaRepository<Item, Integer> {
/**
* Gets a {@link List} of upgraded {@link Item} boots from Boots of Speed found only on the specified
* {@link GameMap}.
*
* @param mapId the {@link GameMap}'s ID
* @return a {@link List} of upgraded {@link Item} boots
* @see <a href="http://leagueoflegends.wikia.com/wiki/Boots_of_Speed">Boots of Speed</a>
* @see <a href="http://leagueoflegends.wikia.com/wiki/Advanced_item">Advanced Items</a>
*/
@Query("select i from Item i join i.from f left join i.maps m " +
"where i.id <> 1001 and f in ('1001') " +
"and (key(m) <> :mapId and m = false) " +
"group by i.id")
@RestResource(exported = false)
@Cacheable(key = "{#root.methodName, #mapId}")
List<Item> boots(@Param("mapId") int mapId);
/**
* Gets a {@link List} of basic Trinket {@link Item}s (non Advanced) found only on the specified {@link GameMap}.
*
* @param mapId the {@link GameMap}'s ID
* @return a {@link List} of basic Trinket {@link Item}s
* @see <a href="http://leagueoflegends.wikia.com/wiki/Trinket">Trinket</a>
*/
@Query("select i from Item i left join i.maps m " +
"where (i.name like '%Trinket%' or i.description like '%Trinket%') " +
"and i.gold.total = 0 and i.gold.purchasable = true " +
"and (key(m) <> :mapId and m = false) " +
"group by i.id")
@RestResource(exported = false)
@Cacheable(key = "{#root.methodName, #mapId}")
List<Item> trinkets(@Param("mapId") int mapId);
/**
* Gets a {@link List} of Viktor only starting {@link Item}s.
*
* @return a {@link List} of Viktor only {@link Item}s
* @see <a href="http://leagueoflegends.wikia.com/wiki/Viktor">Viktor</a>
*/
@Query("select i from Item i where i.requiredChampion = 'Viktor'")
@RestResource(exported = false)
@Cacheable(key = "#root.methodName")
List<Item> viktorOnly();
/**
* Gets a {@link List} of {@link Item}s eligible for the troll build. That is, all purchasable (excluding items like
* Muramana or Seraph's Embrace - they are non purchasable), non-consumable, and fully upgraded items found only on
* the specified {@link GameMap}. This excludes boots, potions, Trinkets, items not requiring a particular champion,
* jungle related items, Doran's items, and Quick Charge items.
*
* @param mapId the {@link GameMap}'s ID
* @return a {@link List} of {@link Item}s eligible for the troll build
*/
@Query("select i from Item i left join i.into i_into left join i.maps m " +
"where i.name is not null and i.description is not null " +
"and i.gold.purchasable = true and i.consumed is null and (i.group is null or i.group <> 'FlaskGroup') " +
"and i_into is null and not exists (select m2 from i.maps m2 where key(m2) = :mapId and m2 = false)" +
"and i.id <> 1001 " +
"and (i.name not like '%Potion%' and i.description not like '%Potion%') " +
"and (i.name not like '%Trinket%' and i.description not like '%Trinket%') " +
"and i.requiredChampion is null " +
"and i.name not like 'Enchantment%' and i.name not like 'Doran%' and i.name not like '%(Quick Charge)' " +
"group by i.id")
@RestResource(exported = false)
@Cacheable(key = "{#root.methodName, #mapId}")
List<Item> forTrollBuild(@Param("mapId") int mapId);
@Cacheable
@Override
List<Item> findAll();
@CacheEvict(allEntries = true)
@Override
<S extends Item> List<S> save(Iterable<S> entities);
@CacheEvict(allEntries = true)
@Override
<S extends Item> S save(S entity);
@Cacheable
@Override
Item findOne(Integer integer);
@CacheEvict(allEntries = true)
@Override
void delete(Integer integer);
@CacheEvict(allEntries = true)
@Override
void delete(Iterable<? extends Item> entities);
@CacheEvict(allEntries = true)
@Override
void deleteAll();
@Cacheable(key = "{#example.probe, #pageable}")
@Override
<S extends Item> Page<S> findAll(Example<S> example, Pageable pageable);
}
|
Fix enchanted boots from showing in Troll Build
|
src/main/java/com/drumonii/loltrollbuild/repository/ItemsRepository.java
|
Fix enchanted boots from showing in Troll Build
|
|
Java
|
lgpl-2.1
|
8e01b175d89c58b90749744a040dc188b266b11c
| 0
|
mbatchelor/pentaho-platform-plugin-reporting,mbatchelor/pentaho-platform-plugin-reporting,mbatchelor/pentaho-platform-plugin-reporting
|
/*
* This program is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software
* Foundation.
*
* You should have received a copy of the GNU Lesser General Public License along with this
* program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html
* or from the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* Copyright 2008 Pentaho Corporation. All rights reserved.
*/
package org.pentaho.reporting.platform.plugin;
import java.io.FileNotFoundException;
import java.io.InputStream;
import org.pentaho.platform.api.engine.ISolutionFile;
import org.pentaho.platform.api.repository.ISolutionRepository;
import org.pentaho.platform.engine.core.system.PentahoSystem;
import org.pentaho.reporting.libraries.resourceloader.ResourceData;
import org.pentaho.reporting.libraries.resourceloader.ResourceKey;
import org.pentaho.reporting.libraries.resourceloader.ResourceLoadingException;
import org.pentaho.reporting.libraries.resourceloader.ResourceManager;
import org.pentaho.reporting.libraries.resourceloader.loader.AbstractResourceData;
/**
* This class is implemented to support loading solution files from the pentaho repository into JFreeReport
*
* @author Will Gorman/Michael D'Amour
*/
public class RepositoryResourceData extends AbstractResourceData {
public static final String PENTAHO_REPOSITORY_KEY = "pentahoRepositoryKey"; //$NON-NLS-1$
private String filename;
private ResourceKey key;
/**
* constructor which takes a resource key for data loading specifics
*
* @param key
* resource key
*/
public RepositoryResourceData(final ResourceKey key) {
if (key == null) {
throw new NullPointerException();
}
this.key = key;
this.filename = (String) key.getIdentifier();
}
/**
* gets a resource stream from the runtime context.
*
* @param caller
* resource manager
* @return input stream
*/
public InputStream getResourceAsStream(ResourceManager caller) throws ResourceLoadingException {
try {
ISolutionRepository solutionRepository = PentahoSystem.get(ISolutionRepository.class);
return solutionRepository.getResourceInputStream(key.getIdentifier().toString(), false, ISolutionRepository.ACTION_EXECUTE);
} catch (FileNotFoundException e) {
// might be due to access denial
throw new ResourceLoadingException(e.getLocalizedMessage(), e);
}
}
/**
* returns a requested attribute, currently only supporting filename.
*
* @param key
* attribute requested
* @return attribute value
*/
public Object getAttribute(String lookupKey) {
if (lookupKey.equals(ResourceData.FILENAME)) {
return filename;
}
return null;
}
/**
* return the version number
*
* @param caller
* resource manager
*
* @return version
*/
public long getVersion(ResourceManager caller) throws ResourceLoadingException {
ISolutionRepository solutionRepository = PentahoSystem.get(ISolutionRepository.class);
ISolutionFile file = solutionRepository.getSolutionFile(key.getIdentifier().toString(), ISolutionRepository.ACTION_EXECUTE);
// if we got a FileNotFoundException on getResourceInputStream then we will get a null file; avoid NPE
if (file != null) {
return file.getLastModified();
} else {
return -1;
}
}
/**
* get the resource key
*
* @return resource key
*/
public ResourceKey getKey() {
return key;
}
}
|
src/org/pentaho/reporting/platform/plugin/RepositoryResourceData.java
|
/*
* This program is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software
* Foundation.
*
* You should have received a copy of the GNU Lesser General Public License along with this
* program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html
* or from the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* Copyright 2008 Pentaho Corporation. All rights reserved.
*/
package org.pentaho.reporting.platform.plugin;
import java.io.FileNotFoundException;
import java.io.InputStream;
import org.pentaho.platform.api.engine.ISolutionFile;
import org.pentaho.platform.api.repository.ISolutionRepository;
import org.pentaho.platform.engine.core.system.PentahoSessionHolder;
import org.pentaho.platform.engine.core.system.PentahoSystem;
import org.pentaho.reporting.libraries.resourceloader.ResourceData;
import org.pentaho.reporting.libraries.resourceloader.ResourceKey;
import org.pentaho.reporting.libraries.resourceloader.ResourceLoadingException;
import org.pentaho.reporting.libraries.resourceloader.ResourceManager;
import org.pentaho.reporting.libraries.resourceloader.loader.AbstractResourceData;
/**
* This class is implemented to support loading solution files from the pentaho repository into JFreeReport
*
* @author Will Gorman/Michael D'Amour
*/
public class RepositoryResourceData extends AbstractResourceData {
public static final String PENTAHO_REPOSITORY_KEY = "pentahoRepositoryKey"; //$NON-NLS-1$
private String filename;
private ResourceKey key;
/**
* constructor which takes a resource key for data loading specifics
*
* @param key
* resource key
*/
public RepositoryResourceData(final ResourceKey key) {
if (key == null) {
throw new NullPointerException();
}
this.key = key;
this.filename = (String) key.getIdentifier();
}
/**
* gets a resource stream from the runtime context.
*
* @param caller
* resource manager
* @return input stream
*/
public InputStream getResourceAsStream(ResourceManager caller) throws ResourceLoadingException {
try {
ISolutionRepository solutionRepository = PentahoSystem.get(ISolutionRepository.class);
return solutionRepository.getResourceInputStream(key.getIdentifier().toString(), false, ISolutionRepository.ACTION_EXECUTE);
} catch (FileNotFoundException e) {
// might be due to access denial
throw new ResourceLoadingException(e.getLocalizedMessage(), e);
}
}
/**
* returns a requested attribute, currently only supporting filename.
*
* @param key
* attribute requested
* @return attribute value
*/
public Object getAttribute(String lookupKey) {
if (lookupKey.equals(ResourceData.FILENAME)) {
return filename;
}
return null;
}
/**
* return the version number
*
* @param caller
* resource manager
*
* @return version
*/
public long getVersion(ResourceManager caller) throws ResourceLoadingException {
ISolutionRepository solutionRepository = PentahoSystem.get(ISolutionRepository.class);
ISolutionFile file = solutionRepository.getSolutionFile(key.getIdentifier().toString(), ISolutionRepository.ACTION_EXECUTE);
// if we got a FileNotFoundException on getResourceInputStream then we will get a null file; avoid NPE
if (file != null) {
return file.getLastModified();
} else {
return -1;
}
}
/**
* get the resource key
*
* @return resource key
*/
public ResourceKey getKey() {
return key;
}
}
|
Import cleaned out
|
src/org/pentaho/reporting/platform/plugin/RepositoryResourceData.java
|
Import cleaned out
|
|
Java
|
apache-2.0
|
1c1cd6c090f6e046c810ac06a263eb262b34ac17
| 0
|
wasn-lab/visual-positioning,TonnyXu/Zxing,Akylas/zxing,Akylas/zxing,Akylas/zxing,smart-make/zxing,wasn-lab/visual-positioning,smart-make/zxing,Akylas/zxing,TonnyXu/Zxing,smart-make/zxing,TonnyXu/Zxing,wasn-lab/visual-positioning,TonnyXu/Zxing,smart-make/zxing,smart-make/zxing,youknowone/zxing,TonnyXu/Zxing,Akylas/zxing,wasn-lab/visual-positioning,wasn-lab/visual-positioning,TonnyXu/Zxing,wasn-lab/visual-positioning,TonnyXu/Zxing,wasn-lab/visual-positioning,smart-make/zxing,smart-make/zxing,wasn-lab/visual-positioning,Akylas/zxing,TonnyXu/Zxing,Akylas/zxing,smart-make/zxing,youknowone/zxing,youknowone/zxing,youknowone/zxing
|
/*
* Copyright (C) 2009 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android.history;
import android.app.AlertDialog;
import android.content.ContentValues;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.content.res.Resources;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteException;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.Cursor;
import android.net.Uri;
import android.os.Environment;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.charset.Charset;
import java.text.DateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.ArrayList;
import android.preference.PreferenceManager;
import android.util.Log;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.client.android.Intents;
import com.google.zxing.client.android.PreferencesActivity;
import com.google.zxing.client.android.R;
import com.google.zxing.client.android.CaptureActivity;
import com.google.zxing.Result;
/**
* <p>Manages functionality related to scan history.</p>
*
* @author Sean Owen
*/
public final class HistoryManager {
private static final String TAG = HistoryManager.class.getSimpleName();
private static final int MAX_ITEMS = 500;
//private static final String[] TEXT_COL_PROJECTION = { DBHelper.TEXT_COL };
private static final String[] GET_ITEM_COL_PROJECTION = {
DBHelper.TEXT_COL,
DBHelper.FORMAT_COL,
DBHelper.TIMESTAMP_COL,
};
private static final String[] EXPORT_COL_PROJECTION = {
DBHelper.TEXT_COL,
DBHelper.DISPLAY_COL,
DBHelper.FORMAT_COL,
DBHelper.TIMESTAMP_COL,
};
private static final String[] ID_COL_PROJECTION = { DBHelper.ID_COL };
private static final DateFormat EXPORT_DATE_TIME_FORMAT = DateFormat.getDateTimeInstance();
private final CaptureActivity activity;
public HistoryManager(CaptureActivity activity) {
this.activity = activity;
}
List<Result> getHistoryItems() {
SQLiteOpenHelper helper = new DBHelper(activity);
List<Result> items = new ArrayList<Result>();
SQLiteDatabase db;
try {
db = helper.getWritableDatabase();
} catch (SQLiteException sqle) {
Log.w(TAG, "Error while opening database", sqle);
return Collections.emptyList();
}
Cursor cursor = null;
try {
cursor = db.query(DBHelper.TABLE_NAME,
GET_ITEM_COL_PROJECTION,
null, null, null, null,
DBHelper.TIMESTAMP_COL + " DESC");
while (cursor.moveToNext()) {
Result result = new Result(cursor.getString(0),
null,
null,
BarcodeFormat.valueOf(cursor.getString(1)),
cursor.getLong(2));
items.add(result);
}
} finally {
if (cursor != null) {
cursor.close();
}
db.close();
}
return items;
}
public AlertDialog buildAlert() {
List<Result> items = getHistoryItems();
int size = items.size();
String[] dialogItems = new String[size + 2];
for (int i = 0; i < size; i++) {
dialogItems[i] = items.get(i).getText();
}
Resources res = activity.getResources();
dialogItems[dialogItems.length - 2] = res.getString(R.string.history_send);
dialogItems[dialogItems.length - 1] = res.getString(R.string.history_clear_text);
DialogInterface.OnClickListener clickListener = new HistoryClickListener(this, activity, dialogItems, items);
AlertDialog.Builder builder = new AlertDialog.Builder(activity);
builder.setTitle(R.string.history_title);
builder.setItems(dialogItems, clickListener);
return builder.create();
}
public void addHistoryItem(Result result) {
if (!activity.getIntent().getBooleanExtra(Intents.Scan.SAVE_HISTORY, true)) {
return; // Do not save this item to the history.
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(activity);
boolean rememberDuplicates = prefs.getBoolean(PreferencesActivity.KEY_REMEMBER_DUPLICATES, false);
if (!rememberDuplicates) {
deletePrevious(result.getText());
}
SQLiteOpenHelper helper = new DBHelper(activity);
SQLiteDatabase db;
try {
db = helper.getWritableDatabase();
} catch (SQLiteException sqle) {
Log.w(TAG, "Error while opening database", sqle);
return;
}
try {
// Insert
ContentValues values = new ContentValues();
values.put(DBHelper.TEXT_COL, result.getText());
values.put(DBHelper.FORMAT_COL, result.getBarcodeFormat().toString());
values.put(DBHelper.DISPLAY_COL, result.getText()); // TODO use parsed result display value?
values.put(DBHelper.TIMESTAMP_COL, System.currentTimeMillis());
db.insert(DBHelper.TABLE_NAME, DBHelper.TIMESTAMP_COL, values);
} finally {
db.close();
}
}
private void deletePrevious(String text) {
SQLiteOpenHelper helper = new DBHelper(activity);
SQLiteDatabase db;
try {
db = helper.getWritableDatabase();
} catch (SQLiteException sqle) {
Log.w(TAG, "Error while opening database", sqle);
return;
}
try {
db.delete(DBHelper.TABLE_NAME, DBHelper.TEXT_COL + "=?", new String[] { text });
} finally {
db.close();
}
}
public void trimHistory() {
SQLiteOpenHelper helper = new DBHelper(activity);
SQLiteDatabase db;
try {
db = helper.getWritableDatabase();
} catch (SQLiteException sqle) {
Log.w(TAG, "Error while opening database", sqle);
return;
}
Cursor cursor = null;
try {
cursor = db.query(DBHelper.TABLE_NAME,
ID_COL_PROJECTION,
null, null, null, null,
DBHelper.TIMESTAMP_COL + " DESC");
int count = 0;
while (count < MAX_ITEMS && cursor.moveToNext()) {
count++;
}
while (cursor.moveToNext()) {
db.delete(DBHelper.TABLE_NAME, DBHelper.ID_COL + '=' + cursor.getString(0), null);
}
} finally {
if (cursor != null) {
cursor.close();
}
db.close();
}
}
/**
* <p>Builds a text representation of the scanning history. Each scan is encoded on one
* line, terminated by a line break (\r\n). The values in each line are comma-separated,
* and double-quoted. Double-quotes within values are escaped with a sequence of two
* double-quotes. The fields output are:</p>
*
* <ul>
* <li>Raw text</li>
* <li>Display text</li>
* <li>Format (e.g. QR_CODE)</li>
* <li>Timestamp</li>
* <li>Formatted version of timestamp</li>
* </ul>
*/
CharSequence buildHistory() {
StringBuilder historyText = new StringBuilder(1000);
SQLiteOpenHelper helper = new DBHelper(activity);
SQLiteDatabase db;
try {
db = helper.getWritableDatabase();
} catch (SQLiteException sqle) {
Log.w(TAG, "Error while opening database", sqle);
return "";
}
Cursor cursor = null;
try {
cursor = db.query(DBHelper.TABLE_NAME,
EXPORT_COL_PROJECTION,
null, null, null, null,
DBHelper.TIMESTAMP_COL + " DESC");
while (cursor.moveToNext()) {
for (int col = 0; col < EXPORT_COL_PROJECTION.length; col++) {
historyText.append('"').append(massageHistoryField(cursor.getString(col))).append("\",");
}
// Add timestamp again, formatted
long timestamp = cursor.getLong(EXPORT_COL_PROJECTION.length - 1);
historyText.append('"').append(massageHistoryField(
EXPORT_DATE_TIME_FORMAT.format(new Date(timestamp)))).append("\"\r\n");
}
} finally {
if (cursor != null) {
cursor.close();
}
db.close();
}
return historyText;
}
static Uri saveHistory(String history) {
File bsRoot = new File(Environment.getExternalStorageDirectory(), "BarcodeScanner");
File historyRoot = new File(bsRoot, "History");
if (!historyRoot.exists() && !historyRoot.mkdirs()) {
Log.w(TAG, "Couldn't make dir " + historyRoot);
return null;
}
File historyFile = new File(historyRoot, "history-" + System.currentTimeMillis() + ".csv");
OutputStreamWriter out = null;
try {
out = new OutputStreamWriter(new FileOutputStream(historyFile), Charset.forName("UTF-8"));
out.write(history);
return Uri.parse("file://" + historyFile.getAbsolutePath());
} catch (IOException ioe) {
Log.w(TAG, "Couldn't access file " + historyFile + " due to " + ioe);
return null;
} finally {
if (out != null) {
try {
out.close();
} catch (IOException ioe) {
// do nothing
}
}
}
}
private static String massageHistoryField(String value) {
return value.replace("\"","\"\"");
}
void clearHistory() {
SQLiteOpenHelper helper = new DBHelper(activity);
SQLiteDatabase db;
try {
db = helper.getWritableDatabase();
} catch (SQLiteException sqle) {
Log.w(TAG, "Error while opening database", sqle);
return;
}
try {
db.delete(DBHelper.TABLE_NAME, null, null);
} finally {
db.close();
}
}
}
|
android/src/com/google/zxing/client/android/history/HistoryManager.java
|
/*
* Copyright (C) 2009 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android.history;
import android.app.AlertDialog;
import android.content.ContentValues;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.content.res.Resources;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteException;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.Cursor;
import android.net.Uri;
import android.os.Environment;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.charset.Charset;
import java.text.DateFormat;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.ArrayList;
import android.preference.PreferenceManager;
import android.util.Log;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.client.android.Intents;
import com.google.zxing.client.android.PreferencesActivity;
import com.google.zxing.client.android.R;
import com.google.zxing.client.android.CaptureActivity;
import com.google.zxing.Result;
/**
* <p>Manages functionality related to scan history.</p>
*
* @author Sean Owen
*/
public final class HistoryManager {
private static final String TAG = HistoryManager.class.getSimpleName();
private static final int MAX_ITEMS = 50;
//private static final String[] TEXT_COL_PROJECTION = { DBHelper.TEXT_COL };
private static final String[] GET_ITEM_COL_PROJECTION = {
DBHelper.TEXT_COL,
DBHelper.FORMAT_COL,
DBHelper.TIMESTAMP_COL,
};
private static final String[] EXPORT_COL_PROJECTION = {
DBHelper.TEXT_COL,
DBHelper.DISPLAY_COL,
DBHelper.FORMAT_COL,
DBHelper.TIMESTAMP_COL,
};
private static final String[] ID_COL_PROJECTION = { DBHelper.ID_COL };
private static final DateFormat EXPORT_DATE_TIME_FORMAT = DateFormat.getDateTimeInstance();
private final CaptureActivity activity;
public HistoryManager(CaptureActivity activity) {
this.activity = activity;
}
List<Result> getHistoryItems() {
SQLiteOpenHelper helper = new DBHelper(activity);
List<Result> items = new ArrayList<Result>();
SQLiteDatabase db;
try {
db = helper.getWritableDatabase();
} catch (SQLiteException sqle) {
Log.w(TAG, "Error while opening database", sqle);
return Collections.emptyList();
}
Cursor cursor = null;
try {
cursor = db.query(DBHelper.TABLE_NAME,
GET_ITEM_COL_PROJECTION,
null, null, null, null,
DBHelper.TIMESTAMP_COL + " DESC");
while (cursor.moveToNext()) {
Result result = new Result(cursor.getString(0),
null,
null,
BarcodeFormat.valueOf(cursor.getString(1)),
cursor.getLong(2));
items.add(result);
}
} finally {
if (cursor != null) {
cursor.close();
}
db.close();
}
return items;
}
public AlertDialog buildAlert() {
List<Result> items = getHistoryItems();
int size = items.size();
String[] dialogItems = new String[size + 2];
for (int i = 0; i < size; i++) {
dialogItems[i] = items.get(i).getText();
}
Resources res = activity.getResources();
dialogItems[dialogItems.length - 2] = res.getString(R.string.history_send);
dialogItems[dialogItems.length - 1] = res.getString(R.string.history_clear_text);
DialogInterface.OnClickListener clickListener = new HistoryClickListener(this, activity, dialogItems, items);
AlertDialog.Builder builder = new AlertDialog.Builder(activity);
builder.setTitle(R.string.history_title);
builder.setItems(dialogItems, clickListener);
return builder.create();
}
public void addHistoryItem(Result result) {
if (!activity.getIntent().getBooleanExtra(Intents.Scan.SAVE_HISTORY, true)) {
return; // Do not save this item to the history.
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(activity);
boolean rememberDuplicates = prefs.getBoolean(PreferencesActivity.KEY_REMEMBER_DUPLICATES, false);
if (!rememberDuplicates) {
deletePrevious(result.getText());
}
SQLiteOpenHelper helper = new DBHelper(activity);
SQLiteDatabase db;
try {
db = helper.getWritableDatabase();
} catch (SQLiteException sqle) {
Log.w(TAG, "Error while opening database", sqle);
return;
}
try {
// Insert
ContentValues values = new ContentValues();
values.put(DBHelper.TEXT_COL, result.getText());
values.put(DBHelper.FORMAT_COL, result.getBarcodeFormat().toString());
values.put(DBHelper.DISPLAY_COL, result.getText()); // TODO use parsed result display value?
values.put(DBHelper.TIMESTAMP_COL, System.currentTimeMillis());
db.insert(DBHelper.TABLE_NAME, DBHelper.TIMESTAMP_COL, values);
} finally {
db.close();
}
}
private void deletePrevious(String text) {
SQLiteOpenHelper helper = new DBHelper(activity);
SQLiteDatabase db;
try {
db = helper.getWritableDatabase();
} catch (SQLiteException sqle) {
Log.w(TAG, "Error while opening database", sqle);
return;
}
try {
db.delete(DBHelper.TABLE_NAME, DBHelper.TEXT_COL + "=?", new String[] { text });
} finally {
db.close();
}
}
public void trimHistory() {
SQLiteOpenHelper helper = new DBHelper(activity);
SQLiteDatabase db;
try {
db = helper.getWritableDatabase();
} catch (SQLiteException sqle) {
Log.w(TAG, "Error while opening database", sqle);
return;
}
Cursor cursor = null;
try {
cursor = db.query(DBHelper.TABLE_NAME,
ID_COL_PROJECTION,
null, null, null, null,
DBHelper.TIMESTAMP_COL + " DESC");
int count = 0;
while (count < MAX_ITEMS && cursor.moveToNext()) {
count++;
}
while (cursor.moveToNext()) {
db.delete(DBHelper.TABLE_NAME, DBHelper.ID_COL + '=' + cursor.getString(0), null);
}
} finally {
if (cursor != null) {
cursor.close();
}
db.close();
}
}
/**
* <p>Builds a text representation of the scanning history. Each scan is encoded on one
* line, terminated by a line break (\r\n). The values in each line are comma-separated,
* and double-quoted. Double-quotes within values are escaped with a sequence of two
* double-quotes. The fields output are:</p>
*
* <ul>
* <li>Raw text</li>
* <li>Display text</li>
* <li>Format (e.g. QR_CODE)</li>
* <li>Timestamp</li>
* <li>Formatted version of timestamp</li>
* </ul>
*/
CharSequence buildHistory() {
StringBuilder historyText = new StringBuilder(1000);
SQLiteOpenHelper helper = new DBHelper(activity);
SQLiteDatabase db;
try {
db = helper.getWritableDatabase();
} catch (SQLiteException sqle) {
Log.w(TAG, "Error while opening database", sqle);
return "";
}
Cursor cursor = null;
try {
cursor = db.query(DBHelper.TABLE_NAME,
EXPORT_COL_PROJECTION,
null, null, null, null,
DBHelper.TIMESTAMP_COL + " DESC");
while (cursor.moveToNext()) {
for (int col = 0; col < EXPORT_COL_PROJECTION.length; col++) {
historyText.append('"').append(massageHistoryField(cursor.getString(col))).append("\",");
}
// Add timestamp again, formatted
long timestamp = cursor.getLong(EXPORT_COL_PROJECTION.length - 1);
historyText.append('"').append(massageHistoryField(
EXPORT_DATE_TIME_FORMAT.format(new Date(timestamp)))).append("\"\r\n");
}
} finally {
if (cursor != null) {
cursor.close();
}
db.close();
}
return historyText;
}
static Uri saveHistory(String history) {
File bsRoot = new File(Environment.getExternalStorageDirectory(), "BarcodeScanner");
File historyRoot = new File(bsRoot, "History");
if (!historyRoot.exists() && !historyRoot.mkdirs()) {
Log.w(TAG, "Couldn't make dir " + historyRoot);
return null;
}
File historyFile = new File(historyRoot, "history-" + System.currentTimeMillis() + ".csv");
OutputStreamWriter out = null;
try {
out = new OutputStreamWriter(new FileOutputStream(historyFile), Charset.forName("UTF-8"));
out.write(history);
return Uri.parse("file://" + historyFile.getAbsolutePath());
} catch (IOException ioe) {
Log.w(TAG, "Couldn't access file " + historyFile + " due to " + ioe);
return null;
} finally {
if (out != null) {
try {
out.close();
} catch (IOException ioe) {
// do nothing
}
}
}
}
private static String massageHistoryField(String value) {
return value.replace("\"","\"\"");
}
void clearHistory() {
SQLiteOpenHelper helper = new DBHelper(activity);
SQLiteDatabase db;
try {
db = helper.getWritableDatabase();
} catch (SQLiteException sqle) {
Log.w(TAG, "Error while opening database", sqle);
return;
}
try {
db.delete(DBHelper.TABLE_NAME, null, null);
} finally {
db.close();
}
}
}
|
Issue 679 allow much more history
git-svn-id: d565a5fbffec933846bf643895bf9c245569575b@1700 59b500cc-1b3d-0410-9834-0bbf25fbcc57
|
android/src/com/google/zxing/client/android/history/HistoryManager.java
|
Issue 679 allow much more history
|
|
Java
|
apache-2.0
|
c00fb31c33a621f2b9a5ffd7557f4b7a50f53c5b
| 0
|
amezgin/amezgin,amezgin/amezgin
|
package ru.job4j.frogsleap;
import java.util.HashMap;
import java.util.Map;
/**
* The class FrogsLeap.
*
* @author Alexander Mezgin
* @version 1.0
* @since 14.07.2017
*/
public class FrogsLeap {
/**
* The field where the jumping frog.
*/
private int[][] circle = new int[16][10];
/**
* The way that makes the frog.
*/
private Map<Integer, String> path = new HashMap<>();
/**
* This method considers the minimum number of jumps.
*
* @param startSector the start sector.
* @param finishSector the finish sector.
* @param countTree the number of trees.
* @param coordTree the coordinates the trees.
* @return the minimum number of jumps.
*/
public String countJump(String startSector, String finishSector, int countTree, String... coordTree) {
String[] startCoords = startSector.split(",");
int startSegment = Integer.parseInt(startCoords[0].trim());
int startRing = Integer.parseInt(startCoords[1].trim());
for (int i = 0; i < countTree; i++) {
setTreeInArray(coordTree[i], -1);
}
jump(startSegment - 1, startRing - 1, 0, 0);
String[] finishCoords = finishSector.split(",");
int finishSegment = Integer.parseInt(finishCoords[0].trim());
int finishRing = Integer.parseInt(finishCoords[1].trim());
String result;
if (this.circle[finishSegment - 1][finishRing - 1] <= 0) {
result = "The frog cannot jump into this sector!";
} else {
createPath(finishSegment - 1, finishRing - 1, this.circle[finishSegment - 1][finishRing - 1]);
showPath();
result = String.format("The frog can jump into sector (%s) for %d jumps!", finishSector,
this.circle[finishSegment - 1][finishRing - 1] - 1);
}
return result;
}
/**
* This method set trees on the field.
*
* @param coord coordinates trees.
* @param sign the sign of the tree.
*/
private void setTreeInArray(String coord, int sign) {
String[] coords = coord.split(",");
int startSegment = Integer.parseInt(coords[0].trim());
int startRing = Integer.parseInt(coords[1].trim());
this.circle[startSegment - 1][startRing - 1] = sign;
}
/**
* This method describes the jump.
*
* @param segment the segment on the field.
* @param ring the ring on the field.
* @param countSectorJump the number of sectors for jump.
* @param countRingJump the number of rings for jump.
*/
private void jump(int segment, int ring, int countSectorJump, int countRingJump) {
int countJump = this.circle[segment][ring];
segment = (segment + countSectorJump) % 16;
ring = ring + countRingJump;
if (ring > 9 || ring < 0 || this.circle[segment][ring] == -1
|| this.circle[segment][ring] < countJump && this.circle[segment][ring] > 0) {
return;
}
this.circle[segment][ring] = countJump + 1;
jump(segment, ring, 3, 0);
jump(segment, ring, 1, 2);
jump(segment, ring, 1, -2);
jump(segment, ring, 2, 1);
jump(segment, ring, 2, -1);
}
/**
* This method save the frog path.
*
* @param finishSegment the finish segment.
* @param finishRing the finish ring.
* @param jump count jump in definite sector.
*/
private void createPath(int finishSegment, int finishRing, int jump) {
String coord = String.format("(%d, %d)", (finishSegment + 17) % 16, finishRing + 1);
if (finishRing > 9 || finishRing < 0 || this.circle[(finishSegment + 16) % 16][finishRing] == -1 || jump == 0) {
return;
}
if (this.circle[(finishSegment + 16) % 16][finishRing] == jump) {
this.path.put(jump, coord);
}
createPath(finishSegment - 3, finishRing, jump - 1);
createPath(finishSegment - 2, finishRing - 1, jump - 1);
createPath(finishSegment - 2, finishRing + 1, jump - 1);
createPath(finishSegment - 1, finishRing - 2, jump - 1);
createPath(finishSegment - 1, finishRing + 2, jump - 1);
}
/**
* This method show the frog path.
*/
private void showPath() {
for (String value : path.values()) {
System.out.print(value);
}
}
}
|
chapter_004/testtask/src/main/java/ru/job4j/frogsleap/FrogsLeap.java
|
package ru.job4j.frogsleap;
import java.util.ArrayList;
import java.util.List;
/**
* The class FrogsLeap.
*
* @author Alexander Mezgin
* @version 1.0
* @since 14.07.2017
*/
public class FrogsLeap {
/**
* The field where the jumping frog.
*/
private int[][] circle = new int[16][10];
/**
* The way that makes the frog.
*/
private List<String> path = new ArrayList<>();
/**
* This method considers the minimum number of jumps.
*
* @param startSector the start sector.
* @param finishSector the finish sector.
* @param countTree the number of trees.
* @param coordTree the coordinates the trees.
* @return the minimum number of jumps.
*/
public String countJump(String startSector, String finishSector, int countTree, String... coordTree) {
String[] startCoords = startSector.split(",");
int startSegment = Integer.parseInt(startCoords[0].trim());
int startRing = Integer.parseInt(startCoords[1].trim());
for (int i = 0; i < countTree; i++) {
setTreeInArray(coordTree[i], -1);
}
jump(startSegment - 1, startRing - 1, 0, 0);
String[] finishCoords = finishSector.split(",");
int finishSegment = Integer.parseInt(finishCoords[0].trim());
int finishRing = Integer.parseInt(finishCoords[1].trim());
String result;
if (this.circle[finishSegment - 1][finishRing - 1] <= 0) {
result = "The frog cannot jump into this sector!";
} else {
creatPath(startSegment, startRing, finishSegment, finishRing);
showPath();
result = String.format("The frog can jump into sector (%s) for %d jumps!", finishSector,
this.circle[finishSegment - 1][finishRing - 1] - 1);
}
return result;
}
/**
* This method set trees on the field.
*
* @param coord coordinates trees.
* @param sign the sign of the tree.
*/
private void setTreeInArray(String coord, int sign) {
String[] coords = coord.split(",");
int startSegment = Integer.parseInt(coords[0].trim());
int startRing = Integer.parseInt(coords[1].trim());
this.circle[startSegment - 1][startRing - 1] = sign;
}
/**
* This method describes the jump.
*
* @param segment the segment on the field.
* @param ring the ring on the field.
* @param countSectorJump the number of sectors for jump.
* @param countRingJump the number of rings for jump.
*/
private void jump(int segment, int ring, int countSectorJump, int countRingJump) {
int countJump = this.circle[segment][ring];
segment = (segment + countSectorJump) % 16;
ring = ring + countRingJump;
if (ring > 9 || ring < 0 || this.circle[segment][ring] == -1
|| this.circle[segment][ring] < countJump && this.circle[segment][ring] > 0) {
return;
}
this.circle[segment][ring] = countJump + 1;
jump(segment, ring, 3, 0);
jump(segment, ring, 1, 2);
jump(segment, ring, 1, -2);
jump(segment, ring, 2, 1);
jump(segment, ring, 2, -1);
}
/**
* This method save the frog path.
*
* @param startSegment the start segment.
* @param startRing the start ring.
* @param finishSegment the finish segment.
* @param finishRing the finish ring.
*/
private void creatPath(int startSegment, int startRing, int finishSegment, int finishRing) {
int countJump = this.circle[finishSegment - 1][finishRing - 1];
String coord = String.format("The finish coordinates is (%d, %d)", finishSegment, finishRing);
this.path.add(coord);
finishSegment = finishSegment - 1;
int currentRing = finishRing - 1;
while (countJump != 2) {
int changeSegment = 0;
int findStep = 0;
for (int segmentStep = 1; segmentStep <= 3; segmentStep++) {
for (int ring = 0; ring < 10; ring++) {
if (this.circle[finishSegment][currentRing]
- this.circle[(finishSegment - segmentStep + 16) % 16][ring] == 1
&& (segmentStep + Math.abs(currentRing - ring)) == 3) {
currentRing = ring;
changeSegment = segmentStep;
countJump--;
findStep++;
coord = String.format("(%d, %d)", (finishSegment - segmentStep + 16) % 16 + 1, ring + 1);
this.path.add(coord);
break;
}
}
if (findStep != 0) {
break;
}
}
finishSegment = (finishSegment - changeSegment + 16) % 16;
}
coord = String.format("The start coordinates is(%d, %d)", startSegment, startRing);
this.path.add(coord);
}
/**
* This method show the frog path.
*/
private void showPath() {
for (int i = this.path.size() - 1; i >= 0; i--) {
System.out.println(this.path.get(i));
}
}
}
|
Задание о минимальном количестве прыжков лягушки. Fixed display of the path of the jumping frog.
|
chapter_004/testtask/src/main/java/ru/job4j/frogsleap/FrogsLeap.java
|
Задание о минимальном количестве прыжков лягушки. Fixed display of the path of the jumping frog.
|
|
Java
|
apache-2.0
|
3eec7af47d50502efa410ca9407554bda418e1f8
| 0
|
EstefaniaGilVaquero/ciceAndroid,EstefaniaGilVaquero/ciceAndroid
|
package com.example.vale.textinputlayout;
import android.app.Activity;
import android.support.design.widget.TextInputLayout;
import android.util.Log;
import android.util.Patterns;
import android.view.View;
import android.widget.EditText;
import android.widget.TextView;
/**
* Created by vale on 6/06/16.
*/
public class FocusListener implements View.OnFocusChangeListener {
private Activity actividad;
public FocusListener (Activity activity)
{
this.actividad = activity;
}
/**
* Idealmente, este método debería ir en un clase Util Validar o algo parecido
*/
private boolean emailValido (String email)
{
return (Patterns.EMAIL_ADDRESS.matcher(email).matches());
}
private boolean telefonoValido (String telefono)
{
return Patterns.PHONE.matcher(telefono).matches();
}
public boolean validarDivisible(String telefono){
int largo=telefono.length(), sum=0;
boolean valido = false;
for(int i=0; i<largo; i++){
Character character = telefono.charAt(i);
sum += Integer.parseInt(String.valueOf(character));
}
if ((sum % 2) == 0){
valido = true;
}
System.out.println(" Cadena es: " + telefono);
System.out.println(" suma es: " + sum);
return valido;
}
@Override
public void onFocusChange(View v, boolean hasFocus) {
if (!hasFocus) {
switch (v.getId()) {
case R.id.cajamail:
EditText cajatextomail = (EditText) v;
String mailintroducido = cajatextomail.getText().toString();
if (!emailValido(mailintroducido)) {
TextInputLayout wrapmail = (TextInputLayout) actividad.findViewById(R.id.tilcajamail);
wrapmail.setError("Mail Incorrecto");
}
break;
case R.id.cajatelf:
EditText cajatextotelefono = (EditText) v;
String telefonoIntroducido = cajatextotelefono.getText().toString();
if (!telefonoValido(telefonoIntroducido)) {
TextInputLayout wrapmail = (TextInputLayout) actividad.findViewById(R.id.tilcajatelf);
wrapmail.setError("Suma no multiplo de 2");
}
if (!validarDivisible(telefonoIntroducido)) {
TextInputLayout wrapmail = (TextInputLayout) actividad.findViewById(R.id.tilcajatelf);
wrapmail.setError("Telefono no Valido");
}
break;
}
}
}
}
|
TextInputLayout/app/src/main/java/com/example/vale/textinputlayout/FocusListener.java
|
package com.example.vale.textinputlayout;
import android.app.Activity;
import android.support.design.widget.TextInputLayout;
import android.util.Log;
import android.util.Patterns;
import android.view.View;
import android.widget.EditText;
import android.widget.TextView;
/**
* Created by vale on 6/06/16.
*/
public class FocusListener implements View.OnFocusChangeListener {
private Activity actividad;
public FocusListener (Activity activity)
{
this.actividad = activity;
}
/**
* Idealmente, este método debería ir en un clase Util Validar o algo parecido
*/
private boolean emailValido (String email)
{
return (Patterns.EMAIL_ADDRESS.matcher(email).matches());
}
private boolean telefonoValido (String telefono)
{
return Patterns.PHONE.matcher(telefono).matches();
}
public boolean validarDivisible(String telefono){
int largo=telefono.length(), sum=0;
boolean valido = false;
for(int i=0; i<largo; i++){
Character character = telefono.charAt(i);
if (Character.isDigit(character)){
sum += Integer.parseInt(String.valueOf(character));
}
}
if ((sum % 2) == 0){
valido = true;
}
System.out.println(" Cadena es: " + telefono);
System.out.println(" suma es: " + sum);
return valido;
}
@Override
public void onFocusChange(View v, boolean hasFocus) {
if (!hasFocus) {
switch (v.getId()) {
case R.id.cajamail:
EditText cajatextomail = (EditText) v;
String mailintroducido = cajatextomail.getText().toString();
if (!emailValido(mailintroducido)) {
TextInputLayout wrapmail = (TextInputLayout) actividad.findViewById(R.id.tilcajamail);
wrapmail.setError("Mail Incorrecto");
}
break;
case R.id.cajatelf:
EditText cajatextotelefono = (EditText) v;
String telefonoIntroducido = cajatextotelefono.getText().toString();
if (!telefonoValido(telefonoIntroducido)) {
TextInputLayout wrapmail = (TextInputLayout) actividad.findViewById(R.id.tilcajatelf);
wrapmail.setError("Suma no multiplo de 2");
}
if (!validarDivisible(telefonoIntroducido)) {
TextInputLayout wrapmail = (TextInputLayout) actividad.findViewById(R.id.tilcajatelf);
wrapmail.setError("Telefono no Valido");
}
break;
}
}
}
}
|
Mejoras
|
TextInputLayout/app/src/main/java/com/example/vale/textinputlayout/FocusListener.java
|
Mejoras
|
|
Java
|
apache-2.0
|
4e2863c5e3a8fd141a9a2778ad4695ead6dc8bc9
| 0
|
joansmith/pdfbox,joansmith/pdfbox,benmccann/pdfbox,veraPDF/veraPDF-pdfbox,veraPDF/veraPDF-pdfbox,BezrukovM/veraPDF-pdfbox,benmccann/pdfbox,ZhenyaM/veraPDF-pdfbox,mathieufortin01/pdfbox,BezrukovM/veraPDF-pdfbox,ChunghwaTelecom/pdfbox,mdamt/pdfbox,ChunghwaTelecom/pdfbox,gavanx/pdflearn,ZhenyaM/veraPDF-pdfbox,gavanx/pdflearn,mathieufortin01/pdfbox,mdamt/pdfbox
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.tools.pdfdebugger.flagbitspane;
/**
* @author Khyrul Bashar
*/
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import javax.swing.Box;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.pdmodel.font.PDFontDescriptor;
import org.apache.pdfbox.pdmodel.interactive.annotation.PDAnnotation;
/**
* A class that displays flag bits found in many Flags entry in PDF document's dictionaries
* detail whether a particular bit is set or unset.
*/
public class FlagBitsPane
{
private static final String[] COLUMNNAMES = {"Bit\nPosition", "Name", "Status"};
private JPanel panel;
/**
* Constructor
* @param dictionary COSDictionary instance that contains the flag
* @param flagKey the flag key in the dictionary
*/
public FlagBitsPane(COSDictionary dictionary, COSName flagKey)
{
initUI(dictionary, flagKey);
}
private void initUI(final COSDictionary dictionary, final COSName flagKey)
{
Object[][] flagBits = getFlagBits(dictionary, flagKey);
panel = new JPanel();
panel.setLayout(new GridBagLayout());
panel.setPreferredSize(new Dimension(300, 500));
JLabel flagLabel = new JLabel(getFlagTypeString(dictionary, flagKey) + " Flags");
flagLabel.setAlignmentX(Component.CENTER_ALIGNMENT);
flagLabel.setFont(new Font(Font.MONOSPACED, Font.BOLD, 30));
JPanel flagLabelPanel = new JPanel();
flagLabelPanel.setAlignmentX(Component.LEFT_ALIGNMENT);
flagLabelPanel.add(flagLabel);
JLabel flagValueLabel = new JLabel("Flag value: " + getFlagValue(dictionary, flagKey));
flagValueLabel.setAlignmentX(Component.LEFT_ALIGNMENT);
flagValueLabel.setFont(new Font(Font.MONOSPACED, Font.BOLD, 20));
JTable table = new JTable(flagBits, COLUMNNAMES);
JScrollPane scrollPane = new JScrollPane(table);
table.setFillsViewportHeight(true);
scrollPane.setAlignmentX(Component.LEFT_ALIGNMENT);
Box box = Box.createVerticalBox();
box.add(flagValueLabel);
box.add(scrollPane);
box.setAlignmentX(Component.LEFT_ALIGNMENT);
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx = 0;
gbc.gridy = 0;
gbc.weighty = 0.05;
gbc.fill = GridBagConstraints.HORIZONTAL;
gbc.anchor = GridBagConstraints.PAGE_START;
panel.add(flagLabelPanel, gbc);
gbc.gridy = 2;
gbc.weighty = 0.9;
gbc.weightx = 1;
gbc.fill = GridBagConstraints.BOTH;
gbc.anchor = GridBagConstraints.BELOW_BASELINE;
panel.add(box, gbc);
}
private String getFlagTypeString(final COSDictionary dictionary, final COSName flagKey)
{
if (dictionary.getCOSName(COSName.TYPE).equals(COSName.FONT_DESC) && flagKey.equals(COSName.FLAGS))
{
return "Font";
}
else if (dictionary.getCOSName(COSName.TYPE).equals(COSName.ANNOT) && flagKey.equals(COSName.F))
{
return "Annot:" + dictionary.getCOSName(COSName.SUBTYPE).getName();
}
else if (flagKey.equals(COSName.FF) && flagKey.equals(COSName.FF))
{
if (dictionary.getCOSName(COSName.FT).equals(COSName.TX))
{
return "Text field";
}
else if (dictionary.getCOSName(COSName.FT).equals(COSName.BTN))
{
return "Button field";
}
else if (dictionary.getCOSName(COSName.FT).equals(COSName.CH))
{
return "Choice field";
}
}
return null;
}
private int getFlagValue(final COSDictionary dictionary, final COSName flagKey)
{
if (dictionary.getCOSName(COSName.TYPE).equals(COSName.FONT_DESC) && flagKey.equals(COSName.FLAGS))
{
return dictionary.getInt(COSName.FLAGS);
}
//TODO Type key is not Required field in the dictionary. So we need a better way to Identify.
if (dictionary.getCOSName(COSName.TYPE).equals(COSName.ANNOT) && flagKey.equals(COSName.F))
{
return dictionary.getInt(COSName.F);
}
if (dictionary.containsKey(COSName.FT))
{
return dictionary.getInt(COSName.FF);
}
return 0;
}
private Object[][] getFlagBits(final COSDictionary dictionary, COSName flagKey)
{
if (dictionary.getCOSName(COSName.TYPE).equals(COSName.FONT_DESC) && flagKey.equals(COSName.FLAGS))
{
return getFontFlagBits(dictionary);
}
if (dictionary.getCOSName(COSName.TYPE).equals(COSName.ANNOT) && flagKey.equals(COSName.F))
{
return getAnnotFlagBits(dictionary);
}
if (dictionary.containsKey(COSName.FT) && flagKey.equals(COSName.FF))
{
return getFieldFlagBits(dictionary, flagKey);
}
return null;
}
private Object[][] getAnnotFlagBits(COSDictionary dictionary)
{
PDAnnotation annotation = new PDAnnotation(dictionary)
{
};
return new Object[][]{
new Object[]{1, "Invisible", annotation.isInvisible()},
new Object[]{2, "Hidden", annotation.isHidden()},
new Object[]{3, "Print", annotation.isPrinted()},
new Object[]{4, "NoZoom", annotation.isNoZoom()},
new Object[]{5, "NoRotate", annotation.isNoRotate()},
new Object[]{6, "NoView", annotation.isNoView()},
new Object[]{7, "ReadOnly", annotation.isReadOnly()},
new Object[]{8, "Locked", annotation.isLocked()},
new Object[]{9, "ToggleNoView", annotation.isToggleNoView()},
new Object[]{10, "LockedContents", annotation.isLocked()}
};
}
private Object[][] getFontFlagBits(final COSDictionary dictionary)
{
PDFontDescriptor fontDesc = new PDFontDescriptor(dictionary);
return new Object[][]{
new Object[]{1, "FixedPitch", fontDesc.isFixedPitch()},
new Object[]{2, "Serif", fontDesc.isSerif()},
new Object[]{3, "Symbolic", fontDesc.isSymbolic()},
new Object[]{4, "Script", fontDesc.isScript()},
new Object[]{6, "NonSymbolic", fontDesc.isNonSymbolic()},
new Object[]{7, "Italic", fontDesc.isItalic()},
new Object[]{17, "AllCap", fontDesc.isAllCap()},
new Object[]{18, "SmallCap", fontDesc.isSmallCap()},
new Object[]{19, "ForceBold", fontDesc.isForceBold()}
};
}
private Object[][] getFieldFlagBits(final COSDictionary dictionary, final COSName flagKey)
{
int flagValue = getFlagValue(dictionary, flagKey);
if (dictionary.getCOSName(COSName.FT).equals(COSName.TX))
{
return getTextFieldFlagBits(flagValue);
}
else if (dictionary.getCOSName(COSName.FT).equals(COSName.BTN))
{
return getButtonFieldFlagBits(flagValue);
}
else if (dictionary.getCOSName(COSName.FT).equals(COSName.CH))
{
return getChoiceFieldFlagBits(flagValue);
}
return null;
}
private Object[][] getTextFieldFlagBits(final int flagValue)
{
return new Object[][]{
new Object[]{1, "ReadOnly", isFlagBitSet(flagValue, 1)},
new Object[]{2, "Required", isFlagBitSet(flagValue, 2)},
new Object[]{3, "NoExport", isFlagBitSet(flagValue, 3)},
new Object[]{13, "Multiline", isFlagBitSet(flagValue, 13)},
new Object[]{14, "Password", isFlagBitSet(flagValue, 14)},
new Object[]{21, "FileSelect", isFlagBitSet(flagValue, 21)},
new Object[]{23, "DoNotSpellCheck", isFlagBitSet(flagValue, 23)},
new Object[]{24, "DoNotScroll", isFlagBitSet(flagValue, 24)},
new Object[]{25, "Comb", isFlagBitSet(flagValue, 25)},
new Object[]{26, "RichText", isFlagBitSet(flagValue, 26)}
};
}
private Object[][] getButtonFieldFlagBits(final int flagValue)
{
return new Object[][]{
new Object[]{1, "ReadOnly", isFlagBitSet(flagValue, 1)},
new Object[]{2, "Required", isFlagBitSet(flagValue, 2)},
new Object[]{3, "NoExport", isFlagBitSet(flagValue, 3)},
new Object[]{15, "NoToggleToOff", isFlagBitSet(flagValue, 15)},
new Object[]{16, "Radio", isFlagBitSet(flagValue, 16)},
new Object[]{17, "Pushbutton", isFlagBitSet(flagValue, 17)},
new Object[]{26, "RadiosInUnison", isFlagBitSet(flagValue, 26)}
};
}
private Object[][] getChoiceFieldFlagBits(final int flagValue)
{
return new Object[][]{
new Object[]{1, "ReadOnly", isFlagBitSet(flagValue, 1)},
new Object[]{2, "Required", isFlagBitSet(flagValue, 2)},
new Object[]{3, "NoExport", isFlagBitSet(flagValue, 3)},
new Object[]{18, "Combo", isFlagBitSet(flagValue, 18)},
new Object[]{19, "Edit", isFlagBitSet(flagValue, 19)},
new Object[]{20, "Sort", isFlagBitSet(flagValue, 20)},
new Object[]{22, "MultiSelect", isFlagBitSet(flagValue, 22)},
new Object[]{23, "DoNotSpellCheck", isFlagBitSet(flagValue, 23)},
new Object[]{27, "CommitOnSelChange", isFlagBitSet(flagValue, 27)}
};
}
/**
* Check the corresponding flag bit if set or not
* @param flagValue the flag integer
* @param bitPosition bit position to check
* @return if set return true else false
*/
private boolean isFlagBitSet(int flagValue, int bitPosition)
{
int binaryFormat = 1 << (bitPosition - 1);
return (flagValue & binaryFormat) == binaryFormat;
}
/**
* @return returns the JPanel instance
*/
public JPanel getPanel()
{
return panel;
}
}
|
tools/src/main/java/org/apache/pdfbox/tools/pdfdebugger/flagbitspane/FlagBitsPane.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.tools.pdfdebugger.flagbitspane;
/**
* @author Khyrul Bashar
*/
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import javax.swing.Box;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.pdmodel.font.PDFontDescriptor;
import org.apache.pdfbox.pdmodel.interactive.annotation.PDAnnotation;
/**
* A class that displays flag bits found in many Flags entry in PDF document's dictionaries
* detail whether a particular bit is set or unset.
*/
public class FlagBitsPane
{
private final static String[] columnNames = {"Bit\nPosition", "Name", "Status"};
private JPanel panel;
/**
* Constructor
* @param dictionary COSDictionary instance that contains the flag
* @param flagKey the flag key in the dictionary
*/
public FlagBitsPane(COSDictionary dictionary, COSName flagKey)
{
initUI(dictionary, flagKey);
}
private void initUI(final COSDictionary dictionary, final COSName flagKey)
{
Object[][] flagBits = getFlagBits(dictionary, flagKey);
panel = new JPanel();
panel.setLayout(new GridBagLayout());
panel.setPreferredSize(new Dimension(300, 500));
JLabel flagLabel = new JLabel(getFlagTypeString(dictionary, flagKey) + " Flags");
flagLabel.setAlignmentX(Component.CENTER_ALIGNMENT);
flagLabel.setFont(new Font(Font.MONOSPACED, Font.BOLD, 30));
JPanel flagLabelPanel = new JPanel();
flagLabelPanel.setAlignmentX(Component.LEFT_ALIGNMENT);
flagLabelPanel.add(flagLabel);
JLabel flagValueLabel = new JLabel("Flag value: " + getFlagValue(dictionary, flagKey));
flagValueLabel.setAlignmentX(Component.LEFT_ALIGNMENT);
flagValueLabel.setFont(new Font(Font.MONOSPACED, Font.BOLD, 20));
JTable table = new JTable(flagBits, columnNames);
JScrollPane scrollPane = new JScrollPane(table);
table.setFillsViewportHeight(true);
scrollPane.setAlignmentX(Component.LEFT_ALIGNMENT);
Box box = Box.createVerticalBox();
box.add(flagValueLabel);
box.add(scrollPane);
box.setAlignmentX(Component.LEFT_ALIGNMENT);
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx = 0;
gbc.gridy = 0;
gbc.weighty = 0.05;
gbc.fill = GridBagConstraints.HORIZONTAL;
gbc.anchor = GridBagConstraints.PAGE_START;
panel.add(flagLabelPanel, gbc);
gbc.gridy = 2;
gbc.weighty = 0.9;
gbc.weightx = 1;
gbc.fill = GridBagConstraints.BOTH;
gbc.anchor = GridBagConstraints.BELOW_BASELINE;
panel.add(box, gbc);
}
private String getFlagTypeString(final COSDictionary dictionary, final COSName flagKey)
{
if (dictionary.getCOSName(COSName.TYPE).equals(COSName.FONT_DESC) && flagKey.equals(COSName.FLAGS))
{
return "Font";
}
else if (dictionary.getCOSName(COSName.TYPE).equals(COSName.ANNOT) && flagKey.equals(COSName.F))
{
return "Annot:" + dictionary.getCOSName(COSName.SUBTYPE).getName();
}
else if (flagKey.equals(COSName.FF) && flagKey.equals(COSName.FF))
{
if (dictionary.getCOSName(COSName.FT).equals(COSName.TX))
{
return "Text field";
}
else if (dictionary.getCOSName(COSName.FT).equals(COSName.BTN))
{
return "Button field";
}
else if (dictionary.getCOSName(COSName.FT).equals(COSName.CH))
{
return "Choice field";
}
}
return null;
}
private int getFlagValue(final COSDictionary dictionary, final COSName flagKey)
{
if (dictionary.getCOSName(COSName.TYPE).equals(COSName.FONT_DESC) && flagKey.equals(COSName.FLAGS))
{
return dictionary.getInt(COSName.FLAGS);
}
//TODO Type key is not Required field in the dictionary. So we need a better way to Identify.
if (dictionary.getCOSName(COSName.TYPE).equals(COSName.ANNOT) && flagKey.equals(COSName.F))
{
return dictionary.getInt(COSName.F);
}
if (dictionary.containsKey(COSName.FT))
{
return dictionary.getInt(COSName.FF);
}
return 0;
}
private Object[][] getFlagBits(final COSDictionary dictionary, COSName flagKey)
{
if (dictionary.getCOSName(COSName.TYPE).equals(COSName.FONT_DESC) && flagKey.equals(COSName.FLAGS))
{
return getFontFlagBits(dictionary);
}
if (dictionary.getCOSName(COSName.TYPE).equals(COSName.ANNOT) && flagKey.equals(COSName.F))
{
return getAnnotFlagBits(dictionary);
}
if (dictionary.containsKey(COSName.FT) && flagKey.equals(COSName.FF))
{
return getFieldFlagBits(dictionary, flagKey);
}
return null;
}
private Object[][] getAnnotFlagBits(COSDictionary dictionary)
{
PDAnnotation annotation = new PDAnnotation(dictionary)
{
};
return new Object[][]{
new Object[]{1, "Invisible", annotation.isInvisible()},
new Object[]{2, "Hidden", annotation.isHidden()},
new Object[]{3, "Print", annotation.isPrinted()},
new Object[]{4, "NoZoom", annotation.isNoZoom()},
new Object[]{5, "NoRotate", annotation.isNoRotate()},
new Object[]{6, "NoView", annotation.isNoView()},
new Object[]{7, "ReadOnly", annotation.isReadOnly()},
new Object[]{8, "Locked", annotation.isLocked()},
new Object[]{9, "ToggleNoView", annotation.isToggleNoView()},
new Object[]{10, "LockedContents", annotation.isLocked()}
};
}
private Object[][] getFontFlagBits(final COSDictionary dictionary)
{
PDFontDescriptor fontDesc = new PDFontDescriptor(dictionary);
return new Object[][]{
new Object[]{1, "FixedPitch", fontDesc.isFixedPitch()},
new Object[]{2, "Serif", fontDesc.isSerif()},
new Object[]{3, "Symbolic", fontDesc.isSymbolic()},
new Object[]{4, "Script", fontDesc.isScript()},
new Object[]{6, "NonSymbolic", fontDesc.isNonSymbolic()},
new Object[]{7, "Italic", fontDesc.isItalic()},
new Object[]{17, "AllCap", fontDesc.isAllCap()},
new Object[]{18, "SmallCap", fontDesc.isSmallCap()},
new Object[]{19, "ForceBold", fontDesc.isForceBold()}
};
}
private Object[][] getFieldFlagBits(final COSDictionary dictionary, final COSName flagKey)
{
int flagValue = getFlagValue(dictionary, flagKey);
if (dictionary.getCOSName(COSName.FT).equals(COSName.TX))
{
return getTextFieldFlagBits(flagValue);
}
else if (dictionary.getCOSName(COSName.FT).equals(COSName.BTN))
{
return getButtonFieldFlagBits(flagValue);
}
else if (dictionary.getCOSName(COSName.FT).equals(COSName.CH))
{
return getChoiceFieldFlagBits(flagValue);
}
return null;
}
private Object[][] getTextFieldFlagBits(final int flagValue)
{
return new Object[][]{
new Object[]{1, "ReadOnly", isFlagBitSet(flagValue, 1)},
new Object[]{2, "Required", isFlagBitSet(flagValue, 2)},
new Object[]{3, "NoExport", isFlagBitSet(flagValue, 3)},
new Object[]{13, "Multiline", isFlagBitSet(flagValue, 13)},
new Object[]{14, "Password", isFlagBitSet(flagValue, 14)},
new Object[]{21, "FileSelect", isFlagBitSet(flagValue, 21)},
new Object[]{23, "DoNotSpellCheck", isFlagBitSet(flagValue, 23)},
new Object[]{24, "DoNotScroll", isFlagBitSet(flagValue, 24)},
new Object[]{25, "Comb", isFlagBitSet(flagValue, 25)},
new Object[]{26, "RichText", isFlagBitSet(flagValue, 26)}
};
}
private Object[][] getButtonFieldFlagBits(final int flagValue)
{
return new Object[][]{
new Object[]{1, "ReadOnly", isFlagBitSet(flagValue, 1)},
new Object[]{2, "Required", isFlagBitSet(flagValue, 2)},
new Object[]{3, "NoExport", isFlagBitSet(flagValue, 3)},
new Object[]{15, "NoToggleToOff", isFlagBitSet(flagValue, 15)},
new Object[]{16, "Radio", isFlagBitSet(flagValue, 16)},
new Object[]{17, "Pushbutton", isFlagBitSet(flagValue, 17)},
new Object[]{26, "RadiosInUnison", isFlagBitSet(flagValue, 26)}
};
}
private Object[][] getChoiceFieldFlagBits(final int flagValue)
{
return new Object[][]{
new Object[]{1, "ReadOnly", isFlagBitSet(flagValue, 1)},
new Object[]{2, "Required", isFlagBitSet(flagValue, 2)},
new Object[]{3, "NoExport", isFlagBitSet(flagValue, 3)},
new Object[]{18, "Combo", isFlagBitSet(flagValue, 18)},
new Object[]{19, "Edit", isFlagBitSet(flagValue, 19)},
new Object[]{20, "Sort", isFlagBitSet(flagValue, 20)},
new Object[]{22, "MultiSelect", isFlagBitSet(flagValue, 22)},
new Object[]{23, "DoNotSpellCheck", isFlagBitSet(flagValue, 23)},
new Object[]{27, "CommitOnSelChange", isFlagBitSet(flagValue, 27)}
};
}
/**
* Check the corresponding flag bit if set or not
* @param flagValue the flag integer
* @param bitPosition bit position to check
* @return if set return true else false
*/
private boolean isFlagBitSet(int flagValue, int bitPosition)
{
int binaryFormat = 1 << (bitPosition - 1);
return (flagValue & binaryFormat) == binaryFormat;
}
/**
* @return returns the JPanel instance
*/
public JPanel getPanel()
{
return panel;
}
}
|
PDFBOX-2530: set constant all caps
git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1687869 13f79535-47bb-0310-9956-ffa450edef68
|
tools/src/main/java/org/apache/pdfbox/tools/pdfdebugger/flagbitspane/FlagBitsPane.java
|
PDFBOX-2530: set constant all caps
|
|
Java
|
apache-2.0
|
bb0ce4b3567fa60a4c2097e4128ccaa0ef9e0a92
| 0
|
agoncharuk/ignite,samaitra/ignite,StalkXT/ignite,chandresh-pancholi/ignite,amirakhmedov/ignite,vadopolski/ignite,akuznetsov-gridgain/ignite,SharplEr/ignite,rfqu/ignite,DoudTechData/ignite,apacheignite/ignite,louishust/incubator-ignite,f7753/ignite,akuznetsov-gridgain/ignite,murador/ignite,ptupitsyn/ignite,ntikhonov/ignite,ascherbakoff/ignite,arijitt/incubator-ignite,VladimirErshov/ignite,dream-x/ignite,amirakhmedov/ignite,afinka77/ignite,nivanov/ignite,vsisko/incubator-ignite,agoncharuk/ignite,daradurvs/ignite,gargvish/ignite,afinka77/ignite,nizhikov/ignite,xtern/ignite,amirakhmedov/ignite,amirakhmedov/ignite,wmz7year/ignite,WilliamDo/ignite,andrey-kuznetsov/ignite,sylentprayer/ignite,avinogradovgg/ignite,nivanov/ignite,xtern/ignite,kidaa/incubator-ignite,ntikhonov/ignite,thuTom/ignite,amirakhmedov/ignite,kromulan/ignite,ilantukh/ignite,ryanzz/ignite,vadopolski/ignite,ashutakGG/incubator-ignite,vladisav/ignite,mcherkasov/ignite,apacheignite/ignite,NSAmelchev/ignite,leveyj/ignite,vladisav/ignite,shurun19851206/ignite,daradurvs/ignite,voipp/ignite,ilantukh/ignite,sk0x50/ignite,vsuslov/incubator-ignite,WilliamDo/ignite,nizhikov/ignite,apache/ignite,nizhikov/ignite,shroman/ignite,vldpyatkov/ignite,sk0x50/ignite,kromulan/ignite,andrey-kuznetsov/ignite,amirakhmedov/ignite,StalkXT/ignite,vsuslov/incubator-ignite,vldpyatkov/ignite,sylentprayer/ignite,svladykin/ignite,sylentprayer/ignite,tkpanther/ignite,leveyj/ignite,StalkXT/ignite,a1vanov/ignite,adeelmahmood/ignite,vsuslov/incubator-ignite,agura/incubator-ignite,DoudTechData/ignite,wmz7year/ignite,BiryukovVA/ignite,alexzaitzev/ignite,daradurvs/ignite,agoncharuk/ignite,ryanzz/ignite,sylentprayer/ignite,vldpyatkov/ignite,irudyak/ignite,VladimirErshov/ignite,leveyj/ignite,zzcclp/ignite,vsuslov/incubator-ignite,dmagda/incubator-ignite,dlnufox/ignite,tkpanther/ignite,vsuslov/incubator-ignite,ptupitsyn/ignite,rfqu/ignite,shroman/ignite,vladisav/ignite,kromulan/ignite,ntikhonov/ignite,andrey-kuznetsov/ignite,nizhikov/ignite,shroman/ignite,SomeFire/ignite,vadopolski/ignite,shurun19851206/ignite,alexzaitzev/ignite,vsisko/incubator-ignite,SomeFire/ignite,SharplEr/ignite,f7753/ignite,kidaa/incubator-ignite,avinogradovgg/ignite,NSAmelchev/ignite,vladisav/ignite,ascherbakoff/ignite,SharplEr/ignite,shroman/ignite,StalkXT/ignite,alexzaitzev/ignite,leveyj/ignite,endian675/ignite,mcherkasov/ignite,vadopolski/ignite,nizhikov/ignite,sk0x50/ignite,andrey-kuznetsov/ignite,voipp/ignite,ryanzz/ignite,ashutakGG/incubator-ignite,pperalta/ignite,chandresh-pancholi/ignite,louishust/incubator-ignite,tkpanther/ignite,svladykin/ignite,kidaa/incubator-ignite,tkpanther/ignite,thuTom/ignite,ntikhonov/ignite,murador/ignite,dmagda/incubator-ignite,shurun19851206/ignite,svladykin/ignite,daradurvs/ignite,gargvish/ignite,ascherbakoff/ignite,amirakhmedov/ignite,zzcclp/ignite,NSAmelchev/ignite,arijitt/incubator-ignite,arijitt/incubator-ignite,voipp/ignite,adeelmahmood/ignite,zzcclp/ignite,nivanov/ignite,WilliamDo/ignite,f7753/ignite,pperalta/ignite,a1vanov/ignite,nivanov/ignite,afinka77/ignite,afinka77/ignite,sylentprayer/ignite,f7753/ignite,murador/ignite,zzcclp/ignite,samaitra/ignite,WilliamDo/ignite,samaitra/ignite,vsisko/incubator-ignite,vadopolski/ignite,SomeFire/ignite,samaitra/ignite,shroman/ignite,adeelmahmood/ignite,nizhikov/ignite,irudyak/ignite,alexzaitzev/ignite,ptupitsyn/ignite,murador/ignite,sk0x50/ignite,daradurvs/ignite,kidaa/incubator-ignite,shroman/ignite,voipp/ignite,vldpyatkov/ignite,SharplEr/ignite,nivanov/ignite,pperalta/ignite,wmz7year/ignite,dlnufox/ignite,a1vanov/ignite,avinogradovgg/ignite,voipp/ignite,SomeFire/ignite,ilantukh/ignite,afinka77/ignite,agura/incubator-ignite,tkpanther/ignite,daradurvs/ignite,gargvish/ignite,alexzaitzev/ignite,arijitt/incubator-ignite,wmz7year/ignite,tkpanther/ignite,gargvish/ignite,psadusumilli/ignite,shurun19851206/ignite,pperalta/ignite,BiryukovVA/ignite,DoudTechData/ignite,StalkXT/ignite,samaitra/ignite,mcherkasov/ignite,StalkXT/ignite,dream-x/ignite,xtern/ignite,StalkXT/ignite,ptupitsyn/ignite,vsisko/incubator-ignite,StalkXT/ignite,vsisko/incubator-ignite,ntikhonov/ignite,apache/ignite,louishust/incubator-ignite,ryanzz/ignite,SomeFire/ignite,akuznetsov-gridgain/ignite,irudyak/ignite,avinogradovgg/ignite,apache/ignite,thuTom/ignite,dmagda/incubator-ignite,sk0x50/ignite,DoudTechData/ignite,sylentprayer/ignite,thuTom/ignite,nizhikov/ignite,dream-x/ignite,NSAmelchev/ignite,psadusumilli/ignite,kromulan/ignite,apache/ignite,ptupitsyn/ignite,psadusumilli/ignite,apache/ignite,ntikhonov/ignite,endian675/ignite,tkpanther/ignite,ryanzz/ignite,thuTom/ignite,SomeFire/ignite,samaitra/ignite,adeelmahmood/ignite,thuTom/ignite,ashutakGG/incubator-ignite,murador/ignite,VladimirErshov/ignite,dream-x/ignite,VladimirErshov/ignite,mcherkasov/ignite,dmagda/incubator-ignite,apacheignite/ignite,samaitra/ignite,apache/ignite,apache/ignite,chandresh-pancholi/ignite,vladisav/ignite,voipp/ignite,nizhikov/ignite,SomeFire/ignite,thuTom/ignite,daradurvs/ignite,irudyak/ignite,arijitt/incubator-ignite,BiryukovVA/ignite,dmagda/incubator-ignite,WilliamDo/ignite,thuTom/ignite,chandresh-pancholi/ignite,agura/incubator-ignite,andrey-kuznetsov/ignite,daradurvs/ignite,vldpyatkov/ignite,xtern/ignite,SharplEr/ignite,SharplEr/ignite,a1vanov/ignite,voipp/ignite,NSAmelchev/ignite,dlnufox/ignite,WilliamDo/ignite,psadusumilli/ignite,ascherbakoff/ignite,irudyak/ignite,samaitra/ignite,ilantukh/ignite,andrey-kuznetsov/ignite,vsisko/incubator-ignite,ashutakGG/incubator-ignite,dmagda/incubator-ignite,BiryukovVA/ignite,WilliamDo/ignite,shroman/ignite,murador/ignite,ryanzz/ignite,ntikhonov/ignite,mcherkasov/ignite,DoudTechData/ignite,murador/ignite,louishust/incubator-ignite,shurun19851206/ignite,NSAmelchev/ignite,BiryukovVA/ignite,sk0x50/ignite,shroman/ignite,ilantukh/ignite,rfqu/ignite,agura/incubator-ignite,xtern/ignite,kromulan/ignite,leveyj/ignite,gargvish/ignite,VladimirErshov/ignite,BiryukovVA/ignite,SharplEr/ignite,ascherbakoff/ignite,pperalta/ignite,f7753/ignite,dlnufox/ignite,irudyak/ignite,agoncharuk/ignite,alexzaitzev/ignite,pperalta/ignite,dmagda/incubator-ignite,vsisko/incubator-ignite,DoudTechData/ignite,vsisko/incubator-ignite,NSAmelchev/ignite,BiryukovVA/ignite,andrey-kuznetsov/ignite,vladisav/ignite,zzcclp/ignite,psadusumilli/ignite,StalkXT/ignite,ilantukh/ignite,apacheignite/ignite,svladykin/ignite,BiryukovVA/ignite,vsuslov/incubator-ignite,apache/ignite,leveyj/ignite,ptupitsyn/ignite,zzcclp/ignite,nizhikov/ignite,a1vanov/ignite,f7753/ignite,zzcclp/ignite,ascherbakoff/ignite,avinogradovgg/ignite,andrey-kuznetsov/ignite,sylentprayer/ignite,wmz7year/ignite,psadusumilli/ignite,ashutakGG/incubator-ignite,gargvish/ignite,daradurvs/ignite,ntikhonov/ignite,alexzaitzev/ignite,DoudTechData/ignite,f7753/ignite,endian675/ignite,tkpanther/ignite,afinka77/ignite,ilantukh/ignite,kromulan/ignite,adeelmahmood/ignite,ptupitsyn/ignite,amirakhmedov/ignite,apacheignite/ignite,kidaa/incubator-ignite,ilantukh/ignite,rfqu/ignite,irudyak/ignite,wmz7year/ignite,samaitra/ignite,alexzaitzev/ignite,kidaa/incubator-ignite,rfqu/ignite,NSAmelchev/ignite,vldpyatkov/ignite,leveyj/ignite,vadopolski/ignite,a1vanov/ignite,rfqu/ignite,chandresh-pancholi/ignite,sk0x50/ignite,avinogradovgg/ignite,vldpyatkov/ignite,daradurvs/ignite,xtern/ignite,amirakhmedov/ignite,avinogradovgg/ignite,agura/incubator-ignite,gargvish/ignite,dream-x/ignite,vladisav/ignite,mcherkasov/ignite,ascherbakoff/ignite,SharplEr/ignite,apacheignite/ignite,SomeFire/ignite,arijitt/incubator-ignite,endian675/ignite,shurun19851206/ignite,akuznetsov-gridgain/ignite,a1vanov/ignite,sylentprayer/ignite,chandresh-pancholi/ignite,dlnufox/ignite,adeelmahmood/ignite,vladisav/ignite,afinka77/ignite,agura/incubator-ignite,SomeFire/ignite,gargvish/ignite,ryanzz/ignite,apache/ignite,ilantukh/ignite,rfqu/ignite,ascherbakoff/ignite,xtern/ignite,andrey-kuznetsov/ignite,chandresh-pancholi/ignite,endian675/ignite,vldpyatkov/ignite,chandresh-pancholi/ignite,dream-x/ignite,endian675/ignite,louishust/incubator-ignite,akuznetsov-gridgain/ignite,rfqu/ignite,svladykin/ignite,shroman/ignite,BiryukovVA/ignite,dream-x/ignite,mcherkasov/ignite,shurun19851206/ignite,leveyj/ignite,ptupitsyn/ignite,f7753/ignite,dmagda/incubator-ignite,agoncharuk/ignite,kromulan/ignite,SharplEr/ignite,alexzaitzev/ignite,shurun19851206/ignite,dlnufox/ignite,BiryukovVA/ignite,svladykin/ignite,WilliamDo/ignite,shroman/ignite,vadopolski/ignite,sk0x50/ignite,adeelmahmood/ignite,zzcclp/ignite,VladimirErshov/ignite,ptupitsyn/ignite,nivanov/ignite,pperalta/ignite,psadusumilli/ignite,agura/incubator-ignite,dream-x/ignite,irudyak/ignite,nivanov/ignite,agoncharuk/ignite,apacheignite/ignite,ilantukh/ignite,VladimirErshov/ignite,endian675/ignite,SomeFire/ignite,svladykin/ignite,pperalta/ignite,sk0x50/ignite,endian675/ignite,akuznetsov-gridgain/ignite,kromulan/ignite,VladimirErshov/ignite,irudyak/ignite,voipp/ignite,agoncharuk/ignite,dlnufox/ignite,ascherbakoff/ignite,ptupitsyn/ignite,adeelmahmood/ignite,wmz7year/ignite,samaitra/ignite,mcherkasov/ignite,dlnufox/ignite,wmz7year/ignite,ryanzz/ignite,murador/ignite,agoncharuk/ignite,louishust/incubator-ignite,andrey-kuznetsov/ignite,afinka77/ignite,xtern/ignite,a1vanov/ignite,psadusumilli/ignite,ashutakGG/incubator-ignite,vadopolski/ignite,xtern/ignite,voipp/ignite,nivanov/ignite,DoudTechData/ignite,apacheignite/ignite,chandresh-pancholi/ignite,agura/incubator-ignite,NSAmelchev/ignite
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.examples.datagrid;
import org.apache.ignite.*;
import org.apache.ignite.cache.*;
import org.apache.ignite.configuration.*;
import org.apache.ignite.examples.*;
import org.apache.ignite.transactions.*;
import java.io.*;
import static org.apache.ignite.transactions.TransactionConcurrency.*;
import static org.apache.ignite.transactions.TransactionIsolation.*;
/**
* Demonstrates how to use cache transactions.
* <p>
* Remote nodes should always be started with special configuration file which
* enables P2P class loading: {@code 'ignite.{sh|bat} examples/config/example-ignite.xml'}.
* <p>
* Alternatively you can run {@link ExampleNodeStartup} in another JVM which will
* start node with {@code examples/config/example-ignite.xml} configuration.
*/
public class CacheTransactionExample {
/** Cache name. */
private static final String CACHE_NAME = CacheTransactionExample.class.getSimpleName();
/**
* Executes example.
*
* @param args Command line arguments, none required.
* @throws IgniteException If example execution failed.
*/
public static void main(String[] args) throws IgniteException {
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println();
System.out.println(">>> Cache transaction example started.");
CacheConfiguration<Integer, Account> cfg = new CacheConfiguration<>(CACHE_NAME);
cfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL);
try (IgniteCache<Integer, Account> cache = ignite.getOrCreateCache(cfg, new NearCacheConfiguration<Integer, Account>())) {
// Initialize.
cache.put(1, new Account(1, 100));
cache.put(2, new Account(1, 200));
System.out.println();
System.out.println(">>> Accounts before deposit: ");
System.out.println(">>> " + cache.get(1));
System.out.println(">>> " + cache.get(2));
// Make transactional deposits.
deposit(cache, 1, 100);
deposit(cache, 2, 200);
System.out.println();
System.out.println(">>> Accounts after transfer: ");
System.out.println(">>> " + cache.get(1));
System.out.println(">>> " + cache.get(2));
System.out.println(">>> Cache transaction example finished.");
}
}
}
/**
* Make deposit into specified account.
*
* @param acctId Account ID.
* @param amount Amount to deposit.
* @throws IgniteException If failed.
*/
private static void deposit(IgniteCache<Integer, Account> cache, int acctId, double amount) throws IgniteException {
try (Transaction tx = Ignition.ignite().transactions().txStart(PESSIMISTIC, REPEATABLE_READ)) {
Account acct = cache.get(acctId);
assert acct != null;
// Deposit into account.
acct.update(amount);
// Store updated account in cache.
cache.put(acctId, acct);
tx.commit();
}
System.out.println();
System.out.println(">>> Transferred amount: $" + amount);
}
/**
* Account.
*/
private static class Account implements Serializable {
/** Account ID. */
private int id;
/** Account balance. */
private double balance;
/**
* @param id Account ID.
* @param balance Balance.
*/
Account(int id, double balance) {
this.id = id;
this.balance = balance;
}
/**
* Change balance by specified amount.
*
* @param amount Amount to add to balance (may be negative).
*/
void update(double amount) {
balance += amount;
}
/** {@inheritDoc} */
@Override public String toString() {
return "Account [id=" + id + ", balance=$" + balance + ']';
}
}
}
|
examples/src/main/java/org/apache/ignite/examples/datagrid/CacheTransactionExample.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.examples.datagrid;
import org.apache.ignite.*;
import org.apache.ignite.cache.*;
import org.apache.ignite.configuration.*;
import org.apache.ignite.examples.*;
import org.apache.ignite.transactions.*;
import java.io.*;
import static org.apache.ignite.transactions.TransactionConcurrency.*;
import static org.apache.ignite.transactions.TransactionIsolation.*;
/**
* Demonstrates how to use cache transactions.
* <p>
* Remote nodes should always be started with special configuration file which
* enables P2P class loading: {@code 'ignite.{sh|bat} examples/config/example-ignite.xml'}.
* <p>
* Alternatively you can run {@link ExampleNodeStartup} in another JVM which will
* start node with {@code examples/config/example-ignite.xml} configuration.
*/
public class CacheTransactionExample {
/** Cache name. */
private static final String CACHE_NAME = CacheTransactionExample.class.getSimpleName();
/**
* Executes example.
*
* @param args Command line arguments, none required.
* @throws IgniteException If example execution failed.
*/
public static void main(String[] args) throws IgniteException {
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println();
System.out.println(">>> Cache transaction example started.");
CacheConfiguration<Integer, Account> cfg = new CacheConfiguration<>(CACHE_NAME);
cfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL);
try (IgniteCache<Integer, Account> cache = ignite.getOrCreateCache(cfg, new NearCacheConfiguration<>())) {
// Initialize.
cache.put(1, new Account(1, 100));
cache.put(2, new Account(1, 200));
System.out.println();
System.out.println(">>> Accounts before deposit: ");
System.out.println(">>> " + cache.get(1));
System.out.println(">>> " + cache.get(2));
// Make transactional deposits.
deposit(cache, 1, 100);
deposit(cache, 2, 200);
System.out.println();
System.out.println(">>> Accounts after transfer: ");
System.out.println(">>> " + cache.get(1));
System.out.println(">>> " + cache.get(2));
System.out.println(">>> Cache transaction example finished.");
}
}
}
/**
* Make deposit into specified account.
*
* @param acctId Account ID.
* @param amount Amount to deposit.
* @throws IgniteException If failed.
*/
private static void deposit(IgniteCache<Integer, Account> cache, int acctId, double amount) throws IgniteException {
try (Transaction tx = Ignition.ignite().transactions().txStart(PESSIMISTIC, REPEATABLE_READ)) {
Account acct = cache.get(acctId);
assert acct != null;
// Deposit into account.
acct.update(amount);
// Store updated account in cache.
cache.put(acctId, acct);
tx.commit();
}
System.out.println();
System.out.println(">>> Transferred amount: $" + amount);
}
/**
* Account.
*/
private static class Account implements Serializable {
/** Account ID. */
private int id;
/** Account balance. */
private double balance;
/**
* @param id Account ID.
* @param balance Balance.
*/
Account(int id, double balance) {
this.id = id;
this.balance = balance;
}
/**
* Change balance by specified amount.
*
* @param amount Amount to add to balance (may be negative).
*/
void update(double amount) {
balance += amount;
}
/** {@inheritDoc} */
@Override public String toString() {
return "Account [id=" + id + ", balance=$" + balance + ']';
}
}
}
|
# ignite-sprint-3 fixed compilation
|
examples/src/main/java/org/apache/ignite/examples/datagrid/CacheTransactionExample.java
|
# ignite-sprint-3 fixed compilation
|
|
Java
|
apache-2.0
|
c5908974c6efc7dafc8cb7e1fc2bbde53fa1622f
| 0
|
material-components/material-components-android
|
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.material.internal;
import com.google.android.material.R;
import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP;
import android.content.Context;
import android.content.res.ColorStateList;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.StateListDrawable;
import androidx.annotation.Dimension;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RestrictTo;
import androidx.core.content.res.ResourcesCompat;
import androidx.core.graphics.drawable.DrawableCompat;
import androidx.core.view.AccessibilityDelegateCompat;
import androidx.core.view.ViewCompat;
import androidx.core.view.accessibility.AccessibilityEventCompat;
import androidx.core.view.accessibility.AccessibilityNodeInfoCompat;
import androidx.core.widget.TextViewCompat;
import androidx.appcompat.view.menu.MenuItemImpl;
import androidx.appcompat.view.menu.MenuView;
import androidx.appcompat.widget.TooltipCompat;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewStub;
import android.widget.CheckedTextView;
import android.widget.FrameLayout;
/** @hide */
@RestrictTo(LIBRARY_GROUP)
public class NavigationMenuItemView extends ForegroundLinearLayout implements MenuView.ItemView {
private static final int[] CHECKED_STATE_SET = {android.R.attr.state_checked};
private int iconSize;
private boolean needsEmptyIcon;
boolean checkable;
private final CheckedTextView textView;
private FrameLayout actionArea;
private MenuItemImpl itemData;
private ColorStateList iconTintList;
private boolean hasIconTintList;
private Drawable emptyDrawable;
private final AccessibilityDelegateCompat accessibilityDelegate =
new AccessibilityDelegateCompat() {
@Override
public void onInitializeAccessibilityNodeInfo(
View host, @NonNull AccessibilityNodeInfoCompat info) {
super.onInitializeAccessibilityNodeInfo(host, info);
info.setCheckable(checkable);
}
};
public NavigationMenuItemView(@NonNull Context context) {
this(context, null);
}
public NavigationMenuItemView(@NonNull Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public NavigationMenuItemView(
@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
setOrientation(HORIZONTAL);
LayoutInflater.from(context).inflate(R.layout.design_navigation_menu_item, this, true);
setIconSize(context.getResources().getDimensionPixelSize(R.dimen.design_navigation_icon_size));
textView = findViewById(R.id.design_menu_item_text);
textView.setDuplicateParentStateEnabled(true);
ViewCompat.setAccessibilityDelegate(textView, accessibilityDelegate);
}
@Override
public void initialize(@NonNull MenuItemImpl itemData, int menuType) {
this.itemData = itemData;
setId(itemData.getItemId());
setVisibility(itemData.isVisible() ? VISIBLE : GONE);
if (getBackground() == null) {
ViewCompat.setBackground(this, createDefaultBackground());
}
setCheckable(itemData.isCheckable());
setChecked(itemData.isChecked());
setEnabled(itemData.isEnabled());
setTitle(itemData.getTitle());
setIcon(itemData.getIcon());
setActionView(itemData.getActionView());
setContentDescription(itemData.getContentDescription());
TooltipCompat.setTooltipText(this, itemData.getTooltipText());
adjustAppearance();
}
private boolean shouldExpandActionArea() {
return itemData.getTitle() == null
&& itemData.getIcon() == null
&& itemData.getActionView() != null;
}
private void adjustAppearance() {
if (shouldExpandActionArea()) {
// Expand the actionView area
textView.setVisibility(View.GONE);
if (actionArea != null) {
LayoutParams params = (LayoutParams) actionArea.getLayoutParams();
params.width = LayoutParams.MATCH_PARENT;
actionArea.setLayoutParams(params);
}
} else {
textView.setVisibility(View.VISIBLE);
if (actionArea != null) {
LayoutParams params = (LayoutParams) actionArea.getLayoutParams();
params.width = LayoutParams.WRAP_CONTENT;
actionArea.setLayoutParams(params);
}
}
}
public void recycle() {
if (actionArea != null) {
actionArea.removeAllViews();
}
textView.setCompoundDrawables(null, null, null, null);
}
private void setActionView(@Nullable View actionView) {
if (actionView != null) {
if (actionArea == null) {
actionArea =
(FrameLayout)
((ViewStub) findViewById(R.id.design_menu_item_action_area_stub)).inflate();
}
actionArea.removeAllViews();
actionArea.addView(actionView);
}
}
@Nullable
private StateListDrawable createDefaultBackground() {
TypedValue value = new TypedValue();
if (getContext()
.getTheme()
.resolveAttribute(androidx.appcompat.R.attr.colorControlHighlight, value, true)) {
StateListDrawable drawable = new StateListDrawable();
drawable.addState(CHECKED_STATE_SET, new ColorDrawable(value.data));
drawable.addState(EMPTY_STATE_SET, new ColorDrawable(Color.TRANSPARENT));
return drawable;
}
return null;
}
@Override
public MenuItemImpl getItemData() {
return itemData;
}
@Override
public void setTitle(CharSequence title) {
textView.setText(title);
}
@Override
public void setCheckable(boolean checkable) {
refreshDrawableState();
if (this.checkable != checkable) {
this.checkable = checkable;
accessibilityDelegate.sendAccessibilityEvent(
textView, AccessibilityEventCompat.TYPE_WINDOW_CONTENT_CHANGED);
}
}
@Override
public void setChecked(boolean checked) {
refreshDrawableState();
textView.setChecked(checked);
}
@Override
public void setShortcut(boolean showShortcut, char shortcutKey) {}
@Override
public void setIcon(@Nullable Drawable icon) {
if (icon != null) {
if (hasIconTintList) {
Drawable.ConstantState state = icon.getConstantState();
icon = DrawableCompat.wrap(state == null ? icon : state.newDrawable()).mutate();
DrawableCompat.setTintList(icon, iconTintList);
}
icon.setBounds(0, 0, iconSize, iconSize);
} else if (needsEmptyIcon) {
if (emptyDrawable == null) {
emptyDrawable =
ResourcesCompat.getDrawable(
getResources(), R.drawable.navigation_empty_icon, getContext().getTheme());
if (emptyDrawable != null) {
emptyDrawable.setBounds(0, 0, iconSize, iconSize);
}
}
icon = emptyDrawable;
}
TextViewCompat.setCompoundDrawablesRelative(textView, icon, null, null, null);
}
public void setIconSize(@Dimension int iconSize) {
this.iconSize = iconSize;
}
@Override
public boolean prefersCondensedTitle() {
return false;
}
@Override
public boolean showsIcon() {
return true;
}
@Override
protected int[] onCreateDrawableState(int extraSpace) {
final int[] drawableState = super.onCreateDrawableState(extraSpace + 1);
if (itemData != null && itemData.isCheckable() && itemData.isChecked()) {
mergeDrawableStates(drawableState, CHECKED_STATE_SET);
}
return drawableState;
}
void setIconTintList(ColorStateList tintList) {
iconTintList = tintList;
hasIconTintList = iconTintList != null;
if (itemData != null) {
// Update the icon so that the tint takes effect
setIcon(itemData.getIcon());
}
}
public void setTextAppearance(int textAppearance) {
TextViewCompat.setTextAppearance(textView, textAppearance);
}
public void setTextColor(ColorStateList colors) {
textView.setTextColor(colors);
}
public void setNeedsEmptyIcon(boolean needsEmptyIcon) {
this.needsEmptyIcon = needsEmptyIcon;
}
public void setHorizontalPadding(int padding) {
setPadding(padding, 0, padding, 0);
}
public void setIconPadding(int padding) {
textView.setCompoundDrawablePadding(padding);
}
public void setMaxLines(int maxLines) {
textView.setMaxLines(maxLines);
}
}
|
lib/java/com/google/android/material/internal/NavigationMenuItemView.java
|
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.material.internal;
import com.google.android.material.R;
import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP;
import android.content.Context;
import android.content.res.ColorStateList;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.StateListDrawable;
import androidx.annotation.Dimension;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RestrictTo;
import androidx.core.content.res.ResourcesCompat;
import androidx.core.graphics.drawable.DrawableCompat;
import androidx.core.view.AccessibilityDelegateCompat;
import androidx.core.view.ViewCompat;
import androidx.core.view.accessibility.AccessibilityEventCompat;
import androidx.core.view.accessibility.AccessibilityNodeInfoCompat;
import androidx.core.widget.TextViewCompat;
import androidx.appcompat.view.menu.MenuItemImpl;
import androidx.appcompat.view.menu.MenuView;
import androidx.appcompat.widget.TooltipCompat;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewStub;
import android.widget.CheckedTextView;
import android.widget.FrameLayout;
/** @hide */
@RestrictTo(LIBRARY_GROUP)
public class NavigationMenuItemView extends ForegroundLinearLayout implements MenuView.ItemView {
private static final int[] CHECKED_STATE_SET = {android.R.attr.state_checked};
private int iconSize;
private boolean needsEmptyIcon;
boolean checkable;
private final CheckedTextView textView;
private FrameLayout actionArea;
private MenuItemImpl itemData;
private ColorStateList iconTintList;
private boolean hasIconTintList;
private Drawable emptyDrawable;
private final AccessibilityDelegateCompat accessibilityDelegate =
new AccessibilityDelegateCompat() {
@Override
public void onInitializeAccessibilityNodeInfo(
View host, @NonNull AccessibilityNodeInfoCompat info) {
super.onInitializeAccessibilityNodeInfo(host, info);
info.setCheckable(checkable);
}
};
public NavigationMenuItemView(@NonNull Context context) {
this(context, null);
}
public NavigationMenuItemView(@NonNull Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public NavigationMenuItemView(
@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
setOrientation(HORIZONTAL);
LayoutInflater.from(context).inflate(R.layout.design_navigation_menu_item, this, true);
setIconSize(context.getResources().getDimensionPixelSize(R.dimen.design_navigation_icon_size));
textView = findViewById(R.id.design_menu_item_text);
textView.setDuplicateParentStateEnabled(true);
ViewCompat.setAccessibilityDelegate(textView, accessibilityDelegate);
}
@Override
public void initialize(@NonNull MenuItemImpl itemData, int menuType) {
this.itemData = itemData;
setVisibility(itemData.isVisible() ? VISIBLE : GONE);
if (getBackground() == null) {
ViewCompat.setBackground(this, createDefaultBackground());
}
setCheckable(itemData.isCheckable());
setChecked(itemData.isChecked());
setEnabled(itemData.isEnabled());
setTitle(itemData.getTitle());
setIcon(itemData.getIcon());
setActionView(itemData.getActionView());
setContentDescription(itemData.getContentDescription());
TooltipCompat.setTooltipText(this, itemData.getTooltipText());
adjustAppearance();
}
private boolean shouldExpandActionArea() {
return itemData.getTitle() == null
&& itemData.getIcon() == null
&& itemData.getActionView() != null;
}
private void adjustAppearance() {
if (shouldExpandActionArea()) {
// Expand the actionView area
textView.setVisibility(View.GONE);
if (actionArea != null) {
LayoutParams params = (LayoutParams) actionArea.getLayoutParams();
params.width = LayoutParams.MATCH_PARENT;
actionArea.setLayoutParams(params);
}
} else {
textView.setVisibility(View.VISIBLE);
if (actionArea != null) {
LayoutParams params = (LayoutParams) actionArea.getLayoutParams();
params.width = LayoutParams.WRAP_CONTENT;
actionArea.setLayoutParams(params);
}
}
}
public void recycle() {
if (actionArea != null) {
actionArea.removeAllViews();
}
textView.setCompoundDrawables(null, null, null, null);
}
private void setActionView(@Nullable View actionView) {
if (actionView != null) {
if (actionArea == null) {
actionArea =
(FrameLayout)
((ViewStub) findViewById(R.id.design_menu_item_action_area_stub)).inflate();
}
actionArea.removeAllViews();
actionArea.addView(actionView);
}
}
@Nullable
private StateListDrawable createDefaultBackground() {
TypedValue value = new TypedValue();
if (getContext()
.getTheme()
.resolveAttribute(androidx.appcompat.R.attr.colorControlHighlight, value, true)) {
StateListDrawable drawable = new StateListDrawable();
drawable.addState(CHECKED_STATE_SET, new ColorDrawable(value.data));
drawable.addState(EMPTY_STATE_SET, new ColorDrawable(Color.TRANSPARENT));
return drawable;
}
return null;
}
@Override
public MenuItemImpl getItemData() {
return itemData;
}
@Override
public void setTitle(CharSequence title) {
textView.setText(title);
}
@Override
public void setCheckable(boolean checkable) {
refreshDrawableState();
if (this.checkable != checkable) {
this.checkable = checkable;
accessibilityDelegate.sendAccessibilityEvent(
textView, AccessibilityEventCompat.TYPE_WINDOW_CONTENT_CHANGED);
}
}
@Override
public void setChecked(boolean checked) {
refreshDrawableState();
textView.setChecked(checked);
}
@Override
public void setShortcut(boolean showShortcut, char shortcutKey) {}
@Override
public void setIcon(@Nullable Drawable icon) {
if (icon != null) {
if (hasIconTintList) {
Drawable.ConstantState state = icon.getConstantState();
icon = DrawableCompat.wrap(state == null ? icon : state.newDrawable()).mutate();
DrawableCompat.setTintList(icon, iconTintList);
}
icon.setBounds(0, 0, iconSize, iconSize);
} else if (needsEmptyIcon) {
if (emptyDrawable == null) {
emptyDrawable =
ResourcesCompat.getDrawable(
getResources(), R.drawable.navigation_empty_icon, getContext().getTheme());
if (emptyDrawable != null) {
emptyDrawable.setBounds(0, 0, iconSize, iconSize);
}
}
icon = emptyDrawable;
}
TextViewCompat.setCompoundDrawablesRelative(textView, icon, null, null, null);
}
public void setIconSize(@Dimension int iconSize) {
this.iconSize = iconSize;
}
@Override
public boolean prefersCondensedTitle() {
return false;
}
@Override
public boolean showsIcon() {
return true;
}
@Override
protected int[] onCreateDrawableState(int extraSpace) {
final int[] drawableState = super.onCreateDrawableState(extraSpace + 1);
if (itemData != null && itemData.isCheckable() && itemData.isChecked()) {
mergeDrawableStates(drawableState, CHECKED_STATE_SET);
}
return drawableState;
}
void setIconTintList(ColorStateList tintList) {
iconTintList = tintList;
hasIconTintList = iconTintList != null;
if (itemData != null) {
// Update the icon so that the tint takes effect
setIcon(itemData.getIcon());
}
}
public void setTextAppearance(int textAppearance) {
TextViewCompat.setTextAppearance(textView, textAppearance);
}
public void setTextColor(ColorStateList colors) {
textView.setTextColor(colors);
}
public void setNeedsEmptyIcon(boolean needsEmptyIcon) {
this.needsEmptyIcon = needsEmptyIcon;
}
public void setHorizontalPadding(int padding) {
setPadding(padding, 0, padding, 0);
}
public void setIconPadding(int padding) {
textView.setCompoundDrawablePadding(padding);
}
public void setMaxLines(int maxLines) {
textView.setMaxLines(maxLines);
}
}
|
Pass the item ID through to the item view which is created by the RecyclerView.
PiperOrigin-RevId: 296479321
|
lib/java/com/google/android/material/internal/NavigationMenuItemView.java
|
Pass the item ID through to the item view which is created by the RecyclerView.
|
|
Java
|
apache-2.0
|
eb1fe1c8c8c274c7b6cf92c1a33b5a8a509e076f
| 0
|
madanadit/alluxio,calvinjia/tachyon,calvinjia/tachyon,EvilMcJerkface/alluxio,aaudiber/alluxio,Reidddddd/alluxio,calvinjia/tachyon,maobaolong/alluxio,madanadit/alluxio,wwjiang007/alluxio,wwjiang007/alluxio,Alluxio/alluxio,bf8086/alluxio,aaudiber/alluxio,Reidddddd/alluxio,bf8086/alluxio,calvinjia/tachyon,wwjiang007/alluxio,Alluxio/alluxio,calvinjia/tachyon,Reidddddd/alluxio,bf8086/alluxio,maobaolong/alluxio,Alluxio/alluxio,aaudiber/alluxio,bf8086/alluxio,madanadit/alluxio,bf8086/alluxio,aaudiber/alluxio,maobaolong/alluxio,wwjiang007/alluxio,madanadit/alluxio,maobaolong/alluxio,aaudiber/alluxio,maobaolong/alluxio,bf8086/alluxio,maobaolong/alluxio,wwjiang007/alluxio,Alluxio/alluxio,Alluxio/alluxio,Reidddddd/alluxio,Alluxio/alluxio,maobaolong/alluxio,madanadit/alluxio,Reidddddd/alluxio,madanadit/alluxio,calvinjia/tachyon,bf8086/alluxio,EvilMcJerkface/alluxio,wwjiang007/alluxio,Reidddddd/alluxio,aaudiber/alluxio,calvinjia/tachyon,wwjiang007/alluxio,Reidddddd/alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,EvilMcJerkface/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,Alluxio/alluxio,madanadit/alluxio,EvilMcJerkface/alluxio,wwjiang007/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,wwjiang007/alluxio,maobaolong/alluxio,madanadit/alluxio,Alluxio/alluxio,wwjiang007/alluxio,aaudiber/alluxio,EvilMcJerkface/alluxio,calvinjia/tachyon,bf8086/alluxio,Alluxio/alluxio
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master;
import alluxio.AlluxioTestDirectory;
import alluxio.Configuration;
import alluxio.ConfigurationTestUtils;
import alluxio.PropertyKey;
import alluxio.cli.Format;
import alluxio.client.file.FileSystem;
import alluxio.client.file.FileSystemContext;
import alluxio.client.util.ClientTestUtils;
import alluxio.proxy.ProxyProcess;
import alluxio.security.GroupMappingServiceTestUtils;
import alluxio.security.LoginUserTestUtils;
import alluxio.underfs.UnderFileSystem;
import alluxio.util.UnderFileSystemUtils;
import alluxio.util.io.FileUtils;
import alluxio.util.network.NetworkAddressUtils;
import alluxio.worker.WorkerProcess;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
import java.util.Random;
import javax.annotation.concurrent.NotThreadSafe;
/**
* Local Alluxio cluster.
*/
@NotThreadSafe
public abstract class AbstractLocalAlluxioCluster {
private static final Logger LOG = LoggerFactory.getLogger(AbstractLocalAlluxioCluster.class);
private static final Random RANDOM_GENERATOR = new Random();
protected ProxyProcess mProxyProcess;
protected Thread mProxyThread;
protected List<WorkerProcess> mWorkers;
protected List<Thread> mWorkerThreads;
protected String mWorkDirectory;
protected String mHostname;
private int mNumWorkers;
/**
* @param numWorkers the number of workers to run
*/
AbstractLocalAlluxioCluster(int numWorkers) {
mProxyProcess = ProxyProcess.Factory.create();
mNumWorkers = numWorkers;
mWorkerThreads = new ArrayList<>();
}
/**
* Starts both master and a worker using the configurations in test conf respectively.
*/
public void start() throws Exception {
// Disable HDFS client caching to avoid file system close() affecting other clients
System.setProperty("fs.hdfs.impl.disable.cache", "true");
resetClientPools();
setupTest();
startMasters();
// Reset the file system context to make sure the correct master RPC port is used.
FileSystemContext.get().reset(Configuration.global());
startWorkers();
startProxy();
// Reset contexts so that they pick up the updated configuration.
reset();
}
/**
* Configures and starts the master(s).
*/
protected abstract void startMasters() throws Exception;
/**
* Restarts the master(s).
*/
public void restartMasters() throws Exception {
stopMasters();
startMasters();
}
/**
* Configures and starts the proxy.
*/
private void startProxy() throws Exception {
mProxyProcess = ProxyProcess.Factory.create();
Runnable runProxy = () -> {
try {
mProxyProcess.start();
} catch (InterruptedException e) {
// this is expected
} catch (Exception e) {
// Log the exception as the RuntimeException will be caught and handled silently by JUnit
LOG.error("Start proxy error", e);
throw new RuntimeException(e + " \n Start Proxy Error \n" + e.getMessage(), e);
}
};
mProxyThread = new Thread(runProxy);
mProxyThread.setName("ProxyThread-" + System.identityHashCode(mProxyThread));
mProxyThread.start();
TestUtils.waitForReady(mProxyProcess);
}
/**
* Configures and starts the worker(s).
*/
public void startWorkers() throws Exception {
mWorkers = new ArrayList<>();
for (int i = 0; i < mNumWorkers; i++) {
mWorkers.add(WorkerProcess.Factory.create());
}
for (final WorkerProcess worker : mWorkers) {
Runnable runWorker = () -> {
try {
worker.start();
} catch (InterruptedException e) {
// this is expected
} catch (Exception e) {
// Log the exception as the RuntimeException will be caught and handled silently by
// JUnit
LOG.error("Start worker error", e);
throw new RuntimeException(e + " \n Start Worker Error \n" + e.getMessage(), e);
}
};
Thread thread = new Thread(runWorker);
thread.setName("WorkerThread-" + System.identityHashCode(thread));
mWorkerThreads.add(thread);
thread.start();
}
for (WorkerProcess worker : mWorkers) {
TestUtils.waitForReady(worker);
}
}
/**
* Sets up corresponding directories for tests.
*/
protected void setupTest() throws IOException {
UnderFileSystem ufs = UnderFileSystem.Factory.createForRoot();
String underfsAddress = Configuration.get(PropertyKey.MASTER_MOUNT_TABLE_ROOT_UFS);
// Deletes the ufs dir for this test from to avoid permission problems
UnderFileSystemUtils.deleteDirIfExists(ufs, underfsAddress);
// Creates ufs dir. This must be called before starting UFS with UnderFileSystemCluster.get().
UnderFileSystemUtils.mkdirIfNotExists(ufs, underfsAddress);
// Creates storage dirs for worker
int numLevel = Configuration.getInt(PropertyKey.WORKER_TIERED_STORE_LEVELS);
for (int level = 0; level < numLevel; level++) {
PropertyKey tierLevelDirPath =
PropertyKey.Template.WORKER_TIERED_STORE_LEVEL_DIRS_PATH.format(level);
String[] dirPaths = Configuration.get(tierLevelDirPath).split(",");
for (String dirPath : dirPaths) {
FileUtils.createDir(dirPath);
}
}
// Formats the journal
Format.format(Format.Mode.MASTER);
}
/**
* Stops both the alluxio and underfs service threads.
*/
public void stop() throws Exception {
stopFS();
reset();
LoginUserTestUtils.resetLoginUser();
ConfigurationTestUtils.resetConfiguration();
}
/**
* Stops the alluxio filesystem's service thread only.
*/
public void stopFS() throws Exception {
LOG.info("stop Alluxio filesystem");
stopProxy();
stopWorkers();
stopMasters();
}
/**
* Stops the masters, formats them, and then restarts them. This is useful if a fresh state is
* desired, for example when restoring from a backup.
*/
public void formatAndRestartMasters() throws Exception {
stopMasters();
Format.format(Format.Mode.MASTER);
startMasters();
}
/**
* Stops the masters.
*/
protected abstract void stopMasters() throws Exception;
/**
* Stops the proxy.
*/
protected void stopProxy() throws Exception {
mProxyProcess.stop();
if (mProxyThread != null) {
while (mProxyThread.isAlive()) {
LOG.info("Stopping thread {}.", mProxyThread.getName());
mProxyThread.interrupt();
mProxyThread.join(1000);
}
mProxyThread = null;
}
}
/**
* Stops the workers.
*/
public void stopWorkers() throws Exception {
for (WorkerProcess worker : mWorkers) {
worker.stop();
}
for (Thread thread : mWorkerThreads) {
while (thread.isAlive()) {
LOG.info("Stopping thread {}.", thread.getName());
thread.interrupt();
thread.join(1000);
}
}
mWorkerThreads.clear();
}
/**
* Creates a default {@link Configuration} for testing.
*/
public void initConfiguration() throws IOException {
setAlluxioWorkDirectory();
setHostname();
for (Entry<PropertyKey, String> entry : ConfigurationTestUtils
.testConfigurationDefaults(mHostname, mWorkDirectory).entrySet()) {
Configuration.set(entry.getKey(), entry.getValue());
}
Configuration.set(PropertyKey.TEST_MODE, true);
Configuration.set(PropertyKey.MASTER_RPC_PORT, 0);
Configuration.set(PropertyKey.MASTER_WEB_PORT, 0);
Configuration.set(PropertyKey.PROXY_WEB_PORT, 0);
Configuration.set(PropertyKey.WORKER_RPC_PORT, 0);
Configuration.set(PropertyKey.WORKER_DATA_PORT, 0);
Configuration.set(PropertyKey.WORKER_WEB_PORT, 0);
}
/**
* Returns a {@link FileSystem} client.
*
* @return a {@link FileSystem} client
*/
public abstract FileSystem getClient() throws IOException;
/**
* @param context the FileSystemContext to use
* @return a {@link FileSystem} client, using a specific context
*/
public abstract FileSystem getClient(FileSystemContext context) throws IOException;
/**
* @return the local Alluxio master
*/
protected abstract LocalAlluxioMaster getLocalAlluxioMaster();
/**
* Gets the proxy process.
*
* @return the proxy
*/
public ProxyProcess getProxyProcess() {
return mProxyProcess;
}
/**
* Resets the cluster to original state.
*/
protected void reset() {
ClientTestUtils.resetClient();
GroupMappingServiceTestUtils.resetCache();
}
/**
* Resets the client pools to the original state.
*/
protected void resetClientPools() throws IOException {
Configuration.set(PropertyKey.USER_METRICS_COLLECTION_ENABLED, false);
FileSystemContext.clearCache();
FileSystemContext.get().reset(Configuration.global());
}
/**
* Sets hostname.
*/
protected void setHostname() {
mHostname = NetworkAddressUtils.getLocalHostName(100);
}
/**
* Sets Alluxio work directory.
*/
protected void setAlluxioWorkDirectory() {
mWorkDirectory =
AlluxioTestDirectory.createTemporaryDirectory("test-cluster").getAbsolutePath();
}
}
|
minicluster/src/main/java/alluxio/master/AbstractLocalAlluxioCluster.java
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master;
import alluxio.AlluxioTestDirectory;
import alluxio.Configuration;
import alluxio.ConfigurationTestUtils;
import alluxio.PropertyKey;
import alluxio.cli.Format;
import alluxio.client.file.FileSystem;
import alluxio.client.file.FileSystemContext;
import alluxio.client.util.ClientTestUtils;
import alluxio.proxy.ProxyProcess;
import alluxio.security.GroupMappingServiceTestUtils;
import alluxio.security.LoginUserTestUtils;
import alluxio.underfs.UnderFileSystem;
import alluxio.util.UnderFileSystemUtils;
import alluxio.util.io.FileUtils;
import alluxio.util.network.NetworkAddressUtils;
import alluxio.worker.WorkerProcess;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
import java.util.Random;
import javax.annotation.concurrent.NotThreadSafe;
/**
* Local Alluxio cluster.
*/
@NotThreadSafe
public abstract class AbstractLocalAlluxioCluster {
private static final Logger LOG = LoggerFactory.getLogger(AbstractLocalAlluxioCluster.class);
private static final Random RANDOM_GENERATOR = new Random();
protected ProxyProcess mProxyProcess;
protected Thread mProxyThread;
protected List<WorkerProcess> mWorkers;
protected List<Thread> mWorkerThreads;
protected String mWorkDirectory;
protected String mHostname;
private int mNumWorkers;
/**
* @param numWorkers the number of workers to run
*/
AbstractLocalAlluxioCluster(int numWorkers) {
mProxyProcess = ProxyProcess.Factory.create();
mNumWorkers = numWorkers;
mWorkerThreads = new ArrayList<>();
}
/**
* Starts both master and a worker using the configurations in test conf respectively.
*/
public void start() throws Exception {
// Disable HDFS client caching to avoid file system close() affecting other clients
System.setProperty("fs.hdfs.impl.disable.cache", "true");
resetClientPools();
setupTest();
startMasters();
// Reset the file system context to make sure the correct master RPC port is used.
FileSystemContext.get().reset(Configuration.global());
startWorkers();
startProxy();
// Reset contexts so that they pick up the updated configuration.
reset();
}
/**
* Configures and starts the master(s).
*/
protected abstract void startMasters() throws Exception;
/**
* Restarts the master(s).
*/
public void restartMasters() throws Exception {
stopMasters();
startMasters();
}
/**
* Configures and starts the proxy.
*/
private void startProxy() throws Exception {
mProxyProcess = ProxyProcess.Factory.create();
Runnable runProxy = new Runnable() {
@Override
public void run() {
try {
mProxyProcess.start();
} catch (InterruptedException e) {
// this is expected
} catch (Exception e) {
// Log the exception as the RuntimeException will be caught and handled silently by JUnit
LOG.error("Start proxy error", e);
throw new RuntimeException(e + " \n Start Proxy Error \n" + e.getMessage(), e);
}
}
};
mProxyThread = new Thread(runProxy);
mProxyThread.setName("ProxyThread-" + System.identityHashCode(mProxyThread));
mProxyThread.start();
TestUtils.waitForReady(mProxyProcess);
}
/**
* Configures and starts the worker(s).
*/
public void startWorkers() throws Exception {
mWorkers = new ArrayList<>();
for (int i = 0; i < mNumWorkers; i++) {
mWorkers.add(WorkerProcess.Factory.create());
}
for (final WorkerProcess worker : mWorkers) {
Runnable runWorker = () -> {
try {
worker.start();
} catch (InterruptedException e) {
// this is expected
} catch (Exception e) {
// Log the exception as the RuntimeException will be caught and handled silently by
// JUnit
LOG.error("Start worker error", e);
throw new RuntimeException(e + " \n Start Worker Error \n" + e.getMessage(), e);
}
};
Thread thread = new Thread(runWorker);
thread.setName("WorkerThread-" + System.identityHashCode(thread));
mWorkerThreads.add(thread);
thread.start();
}
for (WorkerProcess worker : mWorkers) {
TestUtils.waitForReady(worker);
}
}
/**
* Sets up corresponding directories for tests.
*/
protected void setupTest() throws IOException {
UnderFileSystem ufs = UnderFileSystem.Factory.createForRoot();
String underfsAddress = Configuration.get(PropertyKey.MASTER_MOUNT_TABLE_ROOT_UFS);
// Deletes the ufs dir for this test from to avoid permission problems
UnderFileSystemUtils.deleteDirIfExists(ufs, underfsAddress);
// Creates ufs dir. This must be called before starting UFS with UnderFileSystemCluster.get().
UnderFileSystemUtils.mkdirIfNotExists(ufs, underfsAddress);
// Creates storage dirs for worker
int numLevel = Configuration.getInt(PropertyKey.WORKER_TIERED_STORE_LEVELS);
for (int level = 0; level < numLevel; level++) {
PropertyKey tierLevelDirPath =
PropertyKey.Template.WORKER_TIERED_STORE_LEVEL_DIRS_PATH.format(level);
String[] dirPaths = Configuration.get(tierLevelDirPath).split(",");
for (String dirPath : dirPaths) {
FileUtils.createDir(dirPath);
}
}
// Formats the journal
Format.format(Format.Mode.MASTER);
}
/**
* Stops both the alluxio and underfs service threads.
*/
public void stop() throws Exception {
stopFS();
reset();
LoginUserTestUtils.resetLoginUser();
ConfigurationTestUtils.resetConfiguration();
}
/**
* Stops the alluxio filesystem's service thread only.
*/
public void stopFS() throws Exception {
LOG.info("stop Alluxio filesystem");
stopProxy();
stopWorkers();
stopMasters();
}
/**
* Stops the masters, formats them, and then restarts them. This is useful if a fresh state is
* desired, for example when restoring from a backup.
*/
public void formatAndRestartMasters() throws Exception {
stopMasters();
Format.format(Format.Mode.MASTER);
startMasters();
}
/**
* Stops the masters.
*/
protected abstract void stopMasters() throws Exception;
/**
* Stops the proxy.
*/
protected void stopProxy() throws Exception {
mProxyProcess.stop();
if (mProxyThread != null) {
while (mProxyThread.isAlive()) {
LOG.info("Stopping thread {}.", mProxyThread.getName());
mProxyThread.interrupt();
mProxyThread.join(1000);
}
mProxyThread = null;
}
}
/**
* Stops the workers.
*/
public void stopWorkers() throws Exception {
for (WorkerProcess worker : mWorkers) {
worker.stop();
}
for (Thread thread : mWorkerThreads) {
while (thread.isAlive()) {
LOG.info("Stopping thread {}.", thread.getName());
thread.interrupt();
thread.join(1000);
}
}
mWorkerThreads.clear();
}
/**
* Creates a default {@link Configuration} for testing.
*/
public void initConfiguration() throws IOException {
setAlluxioWorkDirectory();
setHostname();
for (Entry<PropertyKey, String> entry : ConfigurationTestUtils
.testConfigurationDefaults(mHostname, mWorkDirectory).entrySet()) {
Configuration.set(entry.getKey(), entry.getValue());
}
Configuration.set(PropertyKey.TEST_MODE, true);
Configuration.set(PropertyKey.MASTER_RPC_PORT, 0);
Configuration.set(PropertyKey.MASTER_WEB_PORT, 0);
Configuration.set(PropertyKey.PROXY_WEB_PORT, 0);
Configuration.set(PropertyKey.WORKER_RPC_PORT, 0);
Configuration.set(PropertyKey.WORKER_DATA_PORT, 0);
Configuration.set(PropertyKey.WORKER_WEB_PORT, 0);
}
/**
* Returns a {@link FileSystem} client.
*
* @return a {@link FileSystem} client
*/
public abstract FileSystem getClient() throws IOException;
/**
* @param context the FileSystemContext to use
* @return a {@link FileSystem} client, using a specific context
*/
public abstract FileSystem getClient(FileSystemContext context) throws IOException;
/**
* @return the local Alluxio master
*/
protected abstract LocalAlluxioMaster getLocalAlluxioMaster();
/**
* Gets the proxy process.
*
* @return the proxy
*/
public ProxyProcess getProxyProcess() {
return mProxyProcess;
}
/**
* Resets the cluster to original state.
*/
protected void reset() {
ClientTestUtils.resetClient();
GroupMappingServiceTestUtils.resetCache();
}
/**
* Resets the client pools to the original state.
*/
protected void resetClientPools() throws IOException {
Configuration.set(PropertyKey.USER_METRICS_COLLECTION_ENABLED, false);
FileSystemContext.clearCache();
FileSystemContext.get().reset(Configuration.global());
}
/**
* Sets hostname.
*/
protected void setHostname() {
mHostname = NetworkAddressUtils.getLocalHostName(100);
}
/**
* Sets Alluxio work directory.
*/
protected void setAlluxioWorkDirectory() {
mWorkDirectory =
AlluxioTestDirectory.createTemporaryDirectory("test-cluster").getAbsolutePath();
}
}
|
[SMALLFIX] Java 8 Improvement: replace anonymous type with lambda in alluxio.master.AbstractLocalAlluxioCluster#startProxy #278 (#8026)
* replace anonymous type with lambda in alluxio.master.AbstractLocalAlluxioCluster#startProxy
* replace anonymous type with lambda in alluxio.master.AbstractLocalAlluxioCluster#startProxy
* replace anonymous type with lambda in alluxio.master.AbstractLocalAlluxioCluster#startProxy
|
minicluster/src/main/java/alluxio/master/AbstractLocalAlluxioCluster.java
|
[SMALLFIX] Java 8 Improvement: replace anonymous type with lambda in alluxio.master.AbstractLocalAlluxioCluster#startProxy #278 (#8026)
|
|
Java
|
apache-2.0
|
27ea403ba474ba696718c506b041b7d31e6b23c6
| 0
|
mproch/apache-ode,apache/ode,mproch/apache-ode,aaronanderson/ode,apache/ode,apache/ode,Subasinghe/ode,Subasinghe/ode,Subasinghe/ode,aaronanderson/ode,apache/ode,aaronanderson/ode,apache/ode,aaronanderson/ode,Subasinghe/ode,Subasinghe/ode
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.bpel.engine;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ode.bpel.common.CorrelationKey;
import org.apache.ode.bpel.common.FaultException;
import org.apache.ode.bpel.common.InvalidMessageException;
import org.apache.ode.bpel.dao.CorrelatorDAO;
import org.apache.ode.bpel.dao.MessageRouteDAO;
import org.apache.ode.bpel.dao.ProcessDAO;
import org.apache.ode.bpel.dao.ProcessInstanceDAO;
import org.apache.ode.bpel.evt.CorrelationMatchEvent;
import org.apache.ode.bpel.evt.CorrelationNoMatchEvent;
import org.apache.ode.bpel.evt.NewProcessInstanceEvent;
import org.apache.ode.bpel.explang.EvaluationException;
import org.apache.ode.bpel.iapi.BpelEngineException;
import org.apache.ode.bpel.iapi.DeploymentUnit;
import org.apache.ode.bpel.iapi.Endpoint;
import org.apache.ode.bpel.iapi.EndpointReference;
import org.apache.ode.bpel.iapi.MessageExchange;
import org.apache.ode.bpel.iapi.MessageExchange.FailureType;
import org.apache.ode.bpel.iapi.MessageExchange.MessageExchangePattern;
import org.apache.ode.bpel.iapi.MessageExchange.Status;
import org.apache.ode.bpel.iapi.MyRoleMessageExchange.CorrelationStatus;
import org.apache.ode.bpel.iapi.PartnerRoleChannel;
import org.apache.ode.bpel.intercept.InterceptorInvoker;
import org.apache.ode.bpel.intercept.MessageExchangeInterceptor;
import org.apache.ode.bpel.o.OBase;
import org.apache.ode.bpel.o.OElementVarType;
import org.apache.ode.bpel.o.OMessageVarType;
import org.apache.ode.bpel.o.OPartnerLink;
import org.apache.ode.bpel.o.OProcess;
import org.apache.ode.bpel.o.OScope;
import org.apache.ode.bpel.runtime.ExpressionLanguageRuntimeRegistry;
import org.apache.ode.bpel.runtime.InvalidProcessException;
import org.apache.ode.bpel.runtime.PROCESS;
import org.apache.ode.bpel.runtime.PropertyAliasEvaluationContext;
import org.apache.ode.jacob.soup.ReplacementMap;
import org.apache.ode.utils.ArrayUtils;
import org.apache.ode.utils.ObjectPrinter;
import org.apache.ode.utils.msg.MessageBundle;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;
import javax.wsdl.Message;
import javax.wsdl.Operation;
import javax.xml.namespace.QName;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Entry point into the runtime of a BPEL process.
*
* @author mszefler
*/
public class BpelProcess {
static final Log __log = LogFactory.getLog(BpelProcess.class);
private static final Messages __msgs = MessageBundle.getMessages(Messages.class);
private final Map<OPartnerLink, PartnerLinkPartnerRoleImpl> _partnerRoles = new HashMap<OPartnerLink, PartnerLinkPartnerRoleImpl>();
private Map<OPartnerLink, PartnerLinkMyRoleImpl> _myRoles = new HashMap<OPartnerLink, PartnerLinkMyRoleImpl>();
BpelEngineImpl _engine;
DebuggerSupport _debugger;
final OProcess _oprocess;
final ExpressionLanguageRuntimeRegistry _expLangRuntimeRegistry;
final ReplacementMap _replacementMap;
final QName _pid;
/** Mapping from {"Service Name" (QNAME) / port} to a myrole. */
private Map<Endpoint, PartnerLinkMyRoleImpl> _endpointToMyRoleMap = new HashMap<Endpoint, PartnerLinkMyRoleImpl>();
/** {@link MessageExchangeInterceptor}s registered for this process. */
private final List<MessageExchangeInterceptor> _mexInterceptors = new ArrayList<MessageExchangeInterceptor>();
private DeploymentUnit _du;
public BpelProcess(QName pid, DeploymentUnit du, OProcess oprocess,
Map<OPartnerLink, Endpoint> myRoleEndpointNames, Map<OPartnerLink, Endpoint> initialPartners,
BpelEventListener debugger, ExpressionLanguageRuntimeRegistry expLangRuntimeRegistry,
List<MessageExchangeInterceptor> localMexInterceptors) {
_pid = pid;
_du = du;
_replacementMap = new ReplacementMapImpl(oprocess);
_oprocess = oprocess;
_expLangRuntimeRegistry = expLangRuntimeRegistry;
_mexInterceptors.addAll(localMexInterceptors);
for (OPartnerLink pl : _oprocess.getAllPartnerLinks()) {
if (pl.hasMyRole()) {
Endpoint endpoint = myRoleEndpointNames.get(pl);
if (endpoint == null)
throw new IllegalArgumentException("No service name for myRole plink " + pl.getName());
PartnerLinkMyRoleImpl myRole = new PartnerLinkMyRoleImpl(pl, endpoint);
_myRoles.put(pl, myRole);
_endpointToMyRoleMap.put(endpoint, myRole);
}
if (pl.hasPartnerRole()) {
PartnerLinkPartnerRoleImpl partnerRole = new PartnerLinkPartnerRoleImpl(pl, initialPartners.get(pl));
_partnerRoles.put(pl, partnerRole);
}
}
}
public String toString() {
return "BpelProcess[" + _pid + " in " + _du + "]";
}
static String generateMessageExchangeIdentifier(String partnerlinkName, String operationName) {
StringBuffer sb = new StringBuffer(partnerlinkName);
sb.append('.');
sb.append(operationName);
return sb.toString();
}
/**
* Entry point for message exchanges aimed at the my role.
*
* @param mex
*/
void invokeProcess(MyRoleMessageExchangeImpl mex) {
PartnerLinkMyRoleImpl target = getMyRoleForService(mex.getServiceName());
if (target == null) {
String errmsg = __msgs.msgMyRoleRoutingFailure(mex.getMessageExchangeId());
__log.error(errmsg);
mex.setFailure(MessageExchange.FailureType.UNKNOWN_ENDPOINT, errmsg, null);
return;
}
mex.getDAO().setProcess(getProcessDAO());
mex.setProcess(_oprocess);
if (!processInterceptors(mex, InterceptorInvoker.__onProcessInvoked)) {
__log.debug("Aborting processing of mex " + mex + " due to interceptors.");
return;
}
target.invokeMyRole(mex);
}
private PartnerLinkMyRoleImpl getMyRoleForService(QName serviceName) {
for (Map.Entry<Endpoint,PartnerLinkMyRoleImpl> e : _endpointToMyRoleMap.entrySet()) {
if (e.getKey().serviceName.equals(serviceName))
return e.getValue();
}
return null;
}
void initMyRoleMex(MyRoleMessageExchangeImpl mex) {
PartnerLinkMyRoleImpl target = null;
for (Endpoint endpoint : _endpointToMyRoleMap.keySet()) {
if (endpoint.serviceName.equals(mex.getServiceName()))
target = _endpointToMyRoleMap.get(endpoint);
}
if (target != null) {
mex.setPortOp(target._plinkDef.myRolePortType, target._plinkDef.getMyRoleOperation(mex.getOperationName()));
} else {
__log.warn("Couldn't find endpoint from service " + mex.getServiceName()
+ " when initializing a myRole mex.");
}
}
/**
* Extract the value of a BPEL property from a BPEL messsage variable.
*
* @param msgData
* message variable data
* @param alias
* alias to apply
* @param target
* description of the data (for error logging only)
* @return value of the property
* @throws FaultException
*/
String extractProperty(Element msgData, OProcess.OPropertyAlias alias, String target) throws FaultException {
PropertyAliasEvaluationContext ectx = new PropertyAliasEvaluationContext(msgData, alias);
Node lValue = ectx.getRootNode();
if (alias.location != null)
try {
lValue = _expLangRuntimeRegistry.evaluateNode(alias.location, ectx);
} catch (EvaluationException ec) {
throw new FaultException(_oprocess.constants.qnSelectionFailure, alias.getDescription());
}
if (lValue == null) {
String errmsg = __msgs.msgPropertyAliasReturnedNullSet(alias.getDescription(), target);
if (__log.isErrorEnabled()) {
__log.error(errmsg);
}
throw new FaultException(_oprocess.constants.qnSelectionFailure, errmsg);
}
if (lValue.getNodeType() == Node.ELEMENT_NODE) {
// This is a bit hokey, we concatenate all the children's values; we
// really should be checking to make sure that we are only dealing
// with
// text and attribute nodes.
StringBuffer val = new StringBuffer();
NodeList nl = lValue.getChildNodes();
for (int i = 0; i < nl.getLength(); ++i) {
Node n = nl.item(i);
val.append(n.getNodeValue());
}
return val.toString();
} else if (lValue.getNodeType() == Node.TEXT_NODE) {
return ((Text) lValue).getWholeText();
} else
return null;
}
/**
* Get the element name for a given WSDL part. If the part is an
* <em>element</em> part, the name of that element is returned. If the
* part is an XML schema typed part, then the name of the part is returned
* in the null namespace.
*
* @param part
* WSDL {@link javax.wsdl.Part}
* @return name of element containing said part
*/
static QName getElementNameForPart(OMessageVarType.Part part) {
return (part.type instanceof OElementVarType) ? ((OElementVarType) part.type).elementType : new QName(null,
part.name);
}
/** Create a version-appropriate runtime context. */
private BpelRuntimeContextImpl createRuntimeContext(ProcessInstanceDAO dao, PROCESS template,
MyRoleMessageExchangeImpl instantiatingMessageExchange) {
return new BpelRuntimeContextImpl(this, dao, template, instantiatingMessageExchange);
}
/**
* Process the message-exchange interceptors.
*
* @param mex
* message exchange
* @return <code>true</code> if execution should continue,
* <code>false</code> otherwise
*/
private boolean processInterceptors(MyRoleMessageExchangeImpl mex, InterceptorInvoker invoker) {
InterceptorContextImpl ictx = new InterceptorContextImpl(_engine._contexts.dao.getConnection(), getProcessDAO());
for (MessageExchangeInterceptor i : _mexInterceptors)
if (!mex.processInterceptor(i,mex, ictx, invoker))
return false;
for (MessageExchangeInterceptor i : _engine.getGlobalInterceptors())
if (!mex.processInterceptor(i,mex, ictx, invoker))
return false;
return true;
}
/**
* Replacement object for serializtation of the {@link OBase} (compiled
* BPEL) objects in the JACOB VPU.
*/
public static final class OBaseReplacementImpl implements Externalizable {
private static final long serialVersionUID = 1L;
int _id;
public OBaseReplacementImpl() {
}
public OBaseReplacementImpl(int id) {
_id = id;
}
public void readExternal(ObjectInput in) throws IOException {
_id = in.readInt();
}
public void writeExternal(ObjectOutput out) throws IOException {
out.writeInt(_id);
}
}
private abstract class PartnerLinkRoleImpl {
protected OPartnerLink _plinkDef;
protected EndpointReference _initialEPR;
PartnerLinkRoleImpl(OPartnerLink plink) {
_plinkDef = plink;
}
String getPartnerLinkName() {
return _plinkDef.name;
}
/**
* Get the initial value of this role's EPR. This value is obtained from
* the integration layer when the process is enabled on the server.
*
* @return
*/
EndpointReference getInitialEPR() {
return _initialEPR;
}
}
class PartnerLinkMyRoleImpl extends PartnerLinkRoleImpl {
/** The local endpoint for this "myrole". */
public Endpoint _endpoint;
PartnerLinkMyRoleImpl(OPartnerLink plink, Endpoint endpoint) {
super(plink);
_endpoint = endpoint;
}
public String toString() {
StringBuffer buf = new StringBuffer("{PartnerLinkRole-");
buf.append(_plinkDef.name);
buf.append('.');
buf.append(_plinkDef.myRoleName);
buf.append(" on ");
buf.append(_endpoint);
buf.append('}');
return buf.toString();
}
/**
* Called when an input message has been received.
*
* @param mex
* exchange to which the message is related
*/
public void invokeMyRole(MyRoleMessageExchangeImpl mex) {
if (__log.isTraceEnabled()) {
__log.trace(ObjectPrinter.stringifyMethodEnter(this + ":inputMsgRcvd", new Object[] {
"messageExchange", mex }));
}
Operation operation = getMyRoleOperation(mex.getOperationName());
if (operation == null) {
__log.error(__msgs.msgUnknownOperation(mex.getOperationName(), _plinkDef.myRolePortType.getQName()));
mex.setFailure(FailureType.UNKNOWN_OPERATION, mex.getOperationName(), null);
return;
}
mex.getDAO().setPartnerLinkModelId(_plinkDef.getId());
mex.setPortOp(_plinkDef.myRolePortType, operation);
mex.setPattern(operation.getOutput() == null ? MessageExchangePattern.REQUEST_ONLY
: MessageExchangePattern.REQUEST_RESPONSE);
// Is this a /possible/ createInstance Operation?
boolean isCreateInstnace = _plinkDef.isCreateInstanceOperation(operation);
// now, the tricks begin: when a message arrives we have to see if
// there
// is anyone waiting for it.
// Get the correlator, a persisted communnication-reduction data
// structure
// supporting correlation correlationKey matching!
String correlatorId = genCorrelatorId(_plinkDef, operation.getName());
CorrelatorDAO correlator = getProcessDAO().getCorrelator(correlatorId);
CorrelationKey[] keys;
MessageRouteDAO messageRoute = null;
// We need to compute the correlation keys (based on the operation
// we can
// infer which correlation keys to compute - this is merely a set
// consisting of each correlationKey used in each correlation sets
// that is
// ever
// referenced in an <receive>/<onMessage> on this
// partnerlink/operation.
keys = computeCorrelationKeys(mex);
String mySessionId = mex.getProperty(MessageExchange.PROPERTY_SEP_MYROLE_SESSIONID);
String partnerSessionId = mex.getProperty(MessageExchange.PROPERTY_SEP_PARTNERROLE_SESSIONID);
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId + ": MSG RCVD keys="
+ ArrayUtils.makeCollection(HashSet.class, keys)
+ " mySessionId=" + mySessionId + " partnerSessionId=" + partnerSessionId);
}
CorrelationKey matchedKey = null;
// Try to find a route for one of our keys.
for (CorrelationKey key : keys) {
messageRoute = correlator.findRoute(key);
if (messageRoute != null) {
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId + ": ckey " + key + " route is to " + messageRoute);
}
matchedKey = key;
break;
}
}
// Handling the "opaque correlation case": correlation is done on a
// session identifier associated with my epr
if (messageRoute == null) {
String sessionId = mex.getProperty(MessageExchange.PROPERTY_SEP_MYROLE_SESSIONID);
if (sessionId != null) {
CorrelationKey key = new CorrelationKey(-1, new String[] { sessionId });
messageRoute = correlator.findRoute(key);
if (__log.isDebugEnabled())
__log.debug("INPUTMSG: rouing based on session id " + sessionId + " --> " + messageRoute);
}
}
// TODO - ODE-58
// If no luck try to find a default route.
if (messageRoute == null) {
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId
+ ": CorrelationKey routing failed; checking default route.");
}
messageRoute = correlator.findRoute(null);
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId + ": default route is to " + messageRoute);
}
}
// If still no luck, and this operation qualifies for
// create-instance
// treatment,
// then create a new process instance.
if (messageRoute == null && isCreateInstnace) {
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId + ": default routing failed, CREATING NEW INSTANCE");
}
ProcessDAO processDAO = getProcessDAO();
if (processDAO.isRetired()) {
throw new InvalidProcessException("Process is retired.", InvalidProcessException.RETIRED_CAUSE_CODE);
}
if (!processInterceptors(mex, InterceptorInvoker.__onNewInstanceInvoked)) {
__log.debug("Not creating a new instance for mex " + mex + "; interceptor prevented!");
return;
}
ProcessInstanceDAO newInstance = processDAO.createInstance(correlator);
BpelRuntimeContextImpl instance = createRuntimeContext(newInstance, new PROCESS(_oprocess), mex);
// send process instance event
NewProcessInstanceEvent evt = new NewProcessInstanceEvent(new QName(_oprocess.targetNamespace,
_oprocess.getName()), getProcessDAO().getProcessId(), newInstance.getInstanceId());
evt.setPortType(mex.getPortType().getQName());
evt.setOperation(operation.getName());
evt.setMexId(mex.getMessageExchangeId());
_debugger.onEvent(evt);
newInstance.insertBpelEvent(evt);
mex.setCorrelationStatus(CorrelationStatus.CREATE_INSTANCE);
mex.getDAO().setInstance(newInstance);
instance.execute();
} else if (messageRoute != null) {
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId + ": ROUTING to instance "
+ messageRoute.getTargetInstance().getInstanceId());
}
// Reload process instance for DAO.
BpelRuntimeContextImpl instance = createRuntimeContext(messageRoute.getTargetInstance(), null, null);
instance.inputMsgMatch(messageRoute.getGroupId(), messageRoute.getIndex(), mex);
// Kill the route so some new message does not get routed to
// same
// process
// instance.
correlator.removeRoutes(messageRoute.getGroupId(), messageRoute.getTargetInstance());
Long iid = messageRoute.getTargetInstance().getInstanceId();
// send process instance event
CorrelationMatchEvent evt = new CorrelationMatchEvent(new QName(_oprocess.targetNamespace, _oprocess
.getName()), getProcessDAO().getProcessId(), iid, matchedKey);
evt.setPortType(mex.getPortType().getQName());
evt.setOperation(operation.getName());
evt.setMexId(mex.getMessageExchangeId());
_debugger.onEvent(evt);
// store event
getProcessDAO().getInstance(iid).insertBpelEvent(evt);
mex.setCorrelationStatus(CorrelationStatus.MATCHED);
mex.getDAO().setInstance(messageRoute.getTargetInstance());
// run the vpu
instance.execute();
} else {
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId + ": SAVING to DB (no match) ");
}
// send event
CorrelationNoMatchEvent evt = new CorrelationNoMatchEvent(mex.getPortType().getQName(), mex
.getOperation().getName(), mex.getMessageExchangeId(), keys);
evt.setProcessId(getProcessDAO().getProcessId());
evt.setProcessName(new QName(_oprocess.targetNamespace, _oprocess.getName()));
_debugger.onEvent(evt);
mex.setCorrelationStatus(CorrelationStatus.QUEUED);
// No match, means we add message exchange to the queue.
correlator.enqueueMessage(mex.getDAO(), keys);
}
// Now we have to update our message exchange status. If the <reply>
// was
// not hit during the invocation, then we will be in the "REQUEST"
// phase
// which means that either this was a one-way or a two-way that
// needs to
// delivery the reply asynchronously.
if (mex.getStatus() == Status.REQUEST) {
switch (mex.getPattern()) {
case REQUEST_ONLY:
mex.setStatus(Status.ONE_WAY);
break;
case REQUEST_RESPONSE:
mex.setStatus(Status.ASYNC);
break;
default:
String errmsg = "BpelProcess: internal error, message exchange pattern not set";
__log.fatal(errmsg);
throw new BpelEngineException(errmsg);
}
}
}
@SuppressWarnings("unchecked")
private Operation getMyRoleOperation(String operationName) {
Operation op = _plinkDef.getMyRoleOperation(operationName);
return op;
}
private CorrelationKey[] computeCorrelationKeys(MyRoleMessageExchangeImpl mex) {
Operation operation = mex.getOperation();
Element msg = mex.getRequest().getMessage();
Message msgDescription = operation.getInput().getMessage();
List<CorrelationKey> keys = new ArrayList<CorrelationKey>();
Set<OScope.CorrelationSet> csets = _plinkDef.getCorrelationSetsForOperation(operation);
for (OScope.CorrelationSet cset : csets) {
CorrelationKey key = computeCorrelationKey(cset, _oprocess.messageTypes.get(msgDescription.getQName()),
msg);
keys.add(key);
}
return keys.toArray(new CorrelationKey[keys.size()]);
}
private CorrelationKey computeCorrelationKey(OScope.CorrelationSet cset, OMessageVarType messagetype,
Element msg) {
String[] values = new String[cset.properties.size()];
int jIdx = 0;
for (Iterator j = cset.properties.iterator(); j.hasNext(); ++jIdx) {
OProcess.OProperty property = (OProcess.OProperty) j.next();
OProcess.OPropertyAlias alias = property.getAlias(messagetype);
if (alias == null) {
// TODO: Throw a real exception! And catch this at compile
// time.
throw new IllegalArgumentException("No alias matching property '" + property.name
+ "' with message type '" + messagetype + "'");
}
String value;
try {
value = extractProperty(msg, alias, msg.toString());
} catch (FaultException fe) {
String emsg = __msgs.msgPropertyAliasDerefFailedOnMessage(alias.getDescription(), fe.getMessage());
__log.error(emsg, fe);
throw new InvalidMessageException(emsg, fe);
}
values[jIdx] = value;
}
CorrelationKey key = new CorrelationKey(cset.getId(), values);
return key;
}
}
private class PartnerLinkPartnerRoleImpl extends PartnerLinkRoleImpl {
Endpoint _initialPartner;
public PartnerRoleChannel _channel;
private PartnerLinkPartnerRoleImpl(OPartnerLink plink, Endpoint initialPartner) {
super(plink);
_initialPartner = initialPartner;
}
public void processPartnerResponse(PartnerRoleMessageExchangeImpl messageExchange) {
if (__log.isDebugEnabled()) {
__log.debug("Processing partner's response for partnerLink: " + messageExchange);
}
BpelRuntimeContextImpl processInstance = createRuntimeContext(messageExchange.getDAO().getInstance(), null,
null);
processInstance.invocationResponse(messageExchange);
processInstance.execute();
}
}
/**
* @see org.apache.ode.bpel.engine.BpelProcess#handleWorkEvent(java.io.Serializable)
*/
public void handleWorkEvent(Map<String, Object> jobData) {
ProcessInstanceDAO procInstance;
if (__log.isDebugEnabled()) {
__log.debug(ObjectPrinter.stringifyMethodEnter("handleWorkEvent", new Object[] { "jobData", jobData }));
}
WorkEvent we = new WorkEvent(jobData);
procInstance = getProcessDAO().getInstance(we.getIID());
if (procInstance == null) {
if (__log.isDebugEnabled()) {
__log.debug("handleWorkEvent: no ProcessInstance found with iid " + we.getIID() + "; ignoring.");
}
return;
}
BpelRuntimeContextImpl processInstance = createRuntimeContext(procInstance, null, null);
switch (we.getType()) {
case TIMER:
if (__log.isDebugEnabled()) {
__log.debug("handleWorkEvent: TimerWork event for process instance " + processInstance);
}
processInstance.timerEvent(we.getChannel());
break;
case RESUME:
if (__log.isDebugEnabled()) {
__log.debug("handleWorkEvent: ResumeWork event for iid " + we.getIID());
}
processInstance.execute();
break;
case INVOKE_RESPONSE:
if (__log.isDebugEnabled()) {
__log.debug("InvokeResponse event for iid " + we.getIID());
}
processInstance.invocationResponse(we.getMexId(), we.getChannel());
processInstance.execute();
break;
}
}
ProcessDAO getProcessDAO() {
return _engine._contexts.dao.getConnection().getProcess(_pid);
}
static String genCorrelatorId(OPartnerLink plink, String opName) {
return plink.getId() + "." + opName;
}
/**
* Get all the services that are implemented by this process.
*
* @return list of qualified names corresponding to the myroles.
*/
public Set<Endpoint> getServiceNames() {
return _endpointToMyRoleMap.keySet();
}
void activate(BpelEngineImpl engine) {
_engine = engine;
_debugger = new DebuggerSupport(this);
__log.debug("Activating " + _pid);
// Activate all the my-role endpoints.
for (PartnerLinkMyRoleImpl myrole : _myRoles.values()) {
myrole._initialEPR = _engine._contexts.bindingContext.activateMyRoleEndpoint(_pid, _du, myrole._endpoint,
myrole._plinkDef.myRolePortType);
__log.debug("Activated " + _pid + " myrole " + myrole.getPartnerLinkName()
+ ": EPR is " + myrole._initialEPR);
}
for (PartnerLinkPartnerRoleImpl prole : _partnerRoles.values()) {
PartnerRoleChannel channel = _engine._contexts.bindingContext.createPartnerRoleChannel(_pid, _du,
prole._plinkDef.partnerRolePortType, prole._initialPartner);
prole._channel = channel;
EndpointReference epr = channel.getInitialEndpointReference();
if (epr != null) {
prole._initialEPR = epr;
}
__log.debug("Activated " + _pid + " partnerrole " + prole.getPartnerLinkName()
+ ": EPR is " + prole._initialEPR);
}
__log.debug("Activated " + _pid);
}
void deactivate() {
// Deactivate all the my-role endpoints.
for (Endpoint endpoint : _endpointToMyRoleMap.keySet())
_engine._contexts.bindingContext.deactivateMyRoleEndpoint(endpoint);
// TODO Deactivate all the partner-role channels
}
EndpointReference getInitialPartnerRoleEPR(OPartnerLink link) {
PartnerLinkPartnerRoleImpl prole = _partnerRoles.get(link);
if (prole == null)
throw new IllegalStateException("Unknown partner link " + link);
return prole.getInitialEPR();
}
EndpointReference getInitialMyRoleEPR(OPartnerLink link) {
PartnerLinkMyRoleImpl myRole = _myRoles.get(link);
if (myRole == null)
throw new IllegalStateException("Unknown partner link " + link);
return myRole.getInitialEPR();
}
QName getPID() {
return _pid;
}
PartnerRoleChannel getPartnerRoleChannel(OPartnerLink partnerLink) {
PartnerLinkPartnerRoleImpl prole = _partnerRoles.get(partnerLink);
if (prole == null)
throw new IllegalStateException("Unknown partner link " + partnerLink);
return prole._channel;
}
}
|
bpel-runtime/src/main/java/org/apache/ode/bpel/engine/BpelProcess.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.bpel.engine;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ode.bpel.common.CorrelationKey;
import org.apache.ode.bpel.common.FaultException;
import org.apache.ode.bpel.common.InvalidMessageException;
import org.apache.ode.bpel.dao.CorrelatorDAO;
import org.apache.ode.bpel.dao.MessageRouteDAO;
import org.apache.ode.bpel.dao.ProcessDAO;
import org.apache.ode.bpel.dao.ProcessInstanceDAO;
import org.apache.ode.bpel.evt.CorrelationMatchEvent;
import org.apache.ode.bpel.evt.CorrelationNoMatchEvent;
import org.apache.ode.bpel.evt.NewProcessInstanceEvent;
import org.apache.ode.bpel.explang.EvaluationException;
import org.apache.ode.bpel.iapi.BpelEngineException;
import org.apache.ode.bpel.iapi.DeploymentUnit;
import org.apache.ode.bpel.iapi.Endpoint;
import org.apache.ode.bpel.iapi.EndpointReference;
import org.apache.ode.bpel.iapi.MessageExchange;
import org.apache.ode.bpel.iapi.MessageExchange.FailureType;
import org.apache.ode.bpel.iapi.MessageExchange.MessageExchangePattern;
import org.apache.ode.bpel.iapi.MessageExchange.Status;
import org.apache.ode.bpel.iapi.MyRoleMessageExchange.CorrelationStatus;
import org.apache.ode.bpel.iapi.PartnerRoleChannel;
import org.apache.ode.bpel.intercept.InterceptorInvoker;
import org.apache.ode.bpel.intercept.MessageExchangeInterceptor;
import org.apache.ode.bpel.o.OBase;
import org.apache.ode.bpel.o.OElementVarType;
import org.apache.ode.bpel.o.OMessageVarType;
import org.apache.ode.bpel.o.OPartnerLink;
import org.apache.ode.bpel.o.OProcess;
import org.apache.ode.bpel.o.OScope;
import org.apache.ode.bpel.runtime.ExpressionLanguageRuntimeRegistry;
import org.apache.ode.bpel.runtime.InvalidProcessException;
import org.apache.ode.bpel.runtime.PROCESS;
import org.apache.ode.bpel.runtime.PropertyAliasEvaluationContext;
import org.apache.ode.jacob.soup.ReplacementMap;
import org.apache.ode.utils.ArrayUtils;
import org.apache.ode.utils.ObjectPrinter;
import org.apache.ode.utils.msg.MessageBundle;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;
import javax.wsdl.Message;
import javax.wsdl.Operation;
import javax.xml.namespace.QName;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Entry point into the runtime of a BPEL process.
*
* @author mszefler
*/
public class BpelProcess {
static final Log __log = LogFactory.getLog(BpelProcess.class);
private static final Messages __msgs = MessageBundle.getMessages(Messages.class);
private final Map<OPartnerLink, PartnerLinkPartnerRoleImpl> _partnerRoles = new HashMap<OPartnerLink, PartnerLinkPartnerRoleImpl>();
private Map<OPartnerLink, PartnerLinkMyRoleImpl> _myRoles = new HashMap<OPartnerLink, PartnerLinkMyRoleImpl>();
BpelEngineImpl _engine;
DebuggerSupport _debugger;
final OProcess _oprocess;
final ExpressionLanguageRuntimeRegistry _expLangRuntimeRegistry;
final ReplacementMap _replacementMap;
final QName _pid;
/** Mapping from {"Service Name" (QNAME) / port} to a myrole. */
private Map<Endpoint, PartnerLinkMyRoleImpl> _endpointToMyRoleMap = new HashMap<Endpoint, PartnerLinkMyRoleImpl>();
/** {@link MessageExchangeInterceptor}s registered for this process. */
private final List<MessageExchangeInterceptor> _mexInterceptors = new ArrayList<MessageExchangeInterceptor>();
private DeploymentUnit _du;
public BpelProcess(QName pid, DeploymentUnit du, OProcess oprocess,
Map<OPartnerLink, Endpoint> myRoleEndpointNames, Map<OPartnerLink, Endpoint> initialPartners,
BpelEventListener debugger, ExpressionLanguageRuntimeRegistry expLangRuntimeRegistry,
List<MessageExchangeInterceptor> localMexInterceptors) {
_pid = pid;
_du = du;
_replacementMap = new ReplacementMapImpl(oprocess);
_oprocess = oprocess;
_expLangRuntimeRegistry = expLangRuntimeRegistry;
_mexInterceptors.addAll(localMexInterceptors);
for (OPartnerLink pl : _oprocess.getAllPartnerLinks()) {
if (pl.hasMyRole()) {
Endpoint endpoint = myRoleEndpointNames.get(pl);
if (endpoint == null)
throw new NullPointerException("No service name for plink " + pl);
PartnerLinkMyRoleImpl myRole = new PartnerLinkMyRoleImpl(pl, endpoint);
_myRoles.put(pl, myRole);
_endpointToMyRoleMap.put(endpoint, myRole);
}
if (pl.hasPartnerRole()) {
PartnerLinkPartnerRoleImpl partnerRole = new PartnerLinkPartnerRoleImpl(pl, initialPartners.get(pl));
_partnerRoles.put(pl, partnerRole);
}
}
}
public String toString() {
return "BpelProcess[" + _pid + " in " + _du + "]";
}
static String generateMessageExchangeIdentifier(String partnerlinkName, String operationName) {
StringBuffer sb = new StringBuffer(partnerlinkName);
sb.append('.');
sb.append(operationName);
return sb.toString();
}
/**
* Entry point for message exchanges aimed at the my role.
*
* @param mex
*/
void invokeProcess(MyRoleMessageExchangeImpl mex) {
PartnerLinkMyRoleImpl target = getMyRoleForService(mex.getServiceName());
if (target == null) {
String errmsg = __msgs.msgMyRoleRoutingFailure(mex.getMessageExchangeId());
__log.error(errmsg);
mex.setFailure(MessageExchange.FailureType.UNKNOWN_ENDPOINT, errmsg, null);
return;
}
mex.getDAO().setProcess(getProcessDAO());
mex.setProcess(_oprocess);
if (!processInterceptors(mex, InterceptorInvoker.__onProcessInvoked)) {
__log.debug("Aborting processing of mex " + mex + " due to interceptors.");
return;
}
target.invokeMyRole(mex);
}
private PartnerLinkMyRoleImpl getMyRoleForService(QName serviceName) {
for (Map.Entry<Endpoint,PartnerLinkMyRoleImpl> e : _endpointToMyRoleMap.entrySet()) {
if (e.getKey().serviceName.equals(serviceName))
return e.getValue();
}
return null;
}
void initMyRoleMex(MyRoleMessageExchangeImpl mex) {
PartnerLinkMyRoleImpl target = null;
for (Endpoint endpoint : _endpointToMyRoleMap.keySet()) {
if (endpoint.serviceName.equals(mex.getServiceName()))
target = _endpointToMyRoleMap.get(endpoint);
}
if (target != null) {
mex.setPortOp(target._plinkDef.myRolePortType, target._plinkDef.getMyRoleOperation(mex.getOperationName()));
} else {
__log.warn("Couldn't find endpoint from service " + mex.getServiceName()
+ " when initializing a myRole mex.");
}
}
/**
* Extract the value of a BPEL property from a BPEL messsage variable.
*
* @param msgData
* message variable data
* @param alias
* alias to apply
* @param target
* description of the data (for error logging only)
* @return value of the property
* @throws FaultException
*/
String extractProperty(Element msgData, OProcess.OPropertyAlias alias, String target) throws FaultException {
PropertyAliasEvaluationContext ectx = new PropertyAliasEvaluationContext(msgData, alias);
Node lValue = ectx.getRootNode();
if (alias.location != null)
try {
lValue = _expLangRuntimeRegistry.evaluateNode(alias.location, ectx);
} catch (EvaluationException ec) {
throw new FaultException(_oprocess.constants.qnSelectionFailure, alias.getDescription());
}
if (lValue == null) {
String errmsg = __msgs.msgPropertyAliasReturnedNullSet(alias.getDescription(), target);
if (__log.isErrorEnabled()) {
__log.error(errmsg);
}
throw new FaultException(_oprocess.constants.qnSelectionFailure, errmsg);
}
if (lValue.getNodeType() == Node.ELEMENT_NODE) {
// This is a bit hokey, we concatenate all the children's values; we
// really should be checking to make sure that we are only dealing
// with
// text and attribute nodes.
StringBuffer val = new StringBuffer();
NodeList nl = lValue.getChildNodes();
for (int i = 0; i < nl.getLength(); ++i) {
Node n = nl.item(i);
val.append(n.getNodeValue());
}
return val.toString();
} else if (lValue.getNodeType() == Node.TEXT_NODE) {
return ((Text) lValue).getWholeText();
} else
return null;
}
/**
* Get the element name for a given WSDL part. If the part is an
* <em>element</em> part, the name of that element is returned. If the
* part is an XML schema typed part, then the name of the part is returned
* in the null namespace.
*
* @param part
* WSDL {@link javax.wsdl.Part}
* @return name of element containing said part
*/
static QName getElementNameForPart(OMessageVarType.Part part) {
return (part.type instanceof OElementVarType) ? ((OElementVarType) part.type).elementType : new QName(null,
part.name);
}
/** Create a version-appropriate runtime context. */
private BpelRuntimeContextImpl createRuntimeContext(ProcessInstanceDAO dao, PROCESS template,
MyRoleMessageExchangeImpl instantiatingMessageExchange) {
return new BpelRuntimeContextImpl(this, dao, template, instantiatingMessageExchange);
}
/**
* Process the message-exchange interceptors.
*
* @param mex
* message exchange
* @return <code>true</code> if execution should continue,
* <code>false</code> otherwise
*/
private boolean processInterceptors(MyRoleMessageExchangeImpl mex, InterceptorInvoker invoker) {
InterceptorContextImpl ictx = new InterceptorContextImpl(_engine._contexts.dao.getConnection(), getProcessDAO());
for (MessageExchangeInterceptor i : _mexInterceptors)
if (!mex.processInterceptor(i,mex, ictx, invoker))
return false;
for (MessageExchangeInterceptor i : _engine.getGlobalInterceptors())
if (!mex.processInterceptor(i,mex, ictx, invoker))
return false;
return true;
}
/**
* Replacement object for serializtation of the {@link OBase} (compiled
* BPEL) objects in the JACOB VPU.
*/
public static final class OBaseReplacementImpl implements Externalizable {
private static final long serialVersionUID = 1L;
int _id;
public OBaseReplacementImpl() {
}
public OBaseReplacementImpl(int id) {
_id = id;
}
public void readExternal(ObjectInput in) throws IOException {
_id = in.readInt();
}
public void writeExternal(ObjectOutput out) throws IOException {
out.writeInt(_id);
}
}
private abstract class PartnerLinkRoleImpl {
protected OPartnerLink _plinkDef;
protected EndpointReference _initialEPR;
PartnerLinkRoleImpl(OPartnerLink plink) {
_plinkDef = plink;
}
String getPartnerLinkName() {
return _plinkDef.name;
}
/**
* Get the initial value of this role's EPR. This value is obtained from
* the integration layer when the process is enabled on the server.
*
* @return
*/
EndpointReference getInitialEPR() {
return _initialEPR;
}
}
class PartnerLinkMyRoleImpl extends PartnerLinkRoleImpl {
/** The local endpoint for this "myrole". */
public Endpoint _endpoint;
PartnerLinkMyRoleImpl(OPartnerLink plink, Endpoint endpoint) {
super(plink);
_endpoint = endpoint;
}
public String toString() {
StringBuffer buf = new StringBuffer("{PartnerLinkRole-");
buf.append(_plinkDef.name);
buf.append('.');
buf.append(_plinkDef.myRoleName);
buf.append(" on ");
buf.append(_endpoint);
buf.append('}');
return buf.toString();
}
/**
* Called when an input message has been received.
*
* @param mex
* exchange to which the message is related
*/
public void invokeMyRole(MyRoleMessageExchangeImpl mex) {
if (__log.isTraceEnabled()) {
__log.trace(ObjectPrinter.stringifyMethodEnter(this + ":inputMsgRcvd", new Object[] {
"messageExchange", mex }));
}
Operation operation = getMyRoleOperation(mex.getOperationName());
if (operation == null) {
__log.error(__msgs.msgUnknownOperation(mex.getOperationName(), _plinkDef.myRolePortType.getQName()));
mex.setFailure(FailureType.UNKNOWN_OPERATION, mex.getOperationName(), null);
return;
}
mex.getDAO().setPartnerLinkModelId(_plinkDef.getId());
mex.setPortOp(_plinkDef.myRolePortType, operation);
mex.setPattern(operation.getOutput() == null ? MessageExchangePattern.REQUEST_ONLY
: MessageExchangePattern.REQUEST_RESPONSE);
// Is this a /possible/ createInstance Operation?
boolean isCreateInstnace = _plinkDef.isCreateInstanceOperation(operation);
// now, the tricks begin: when a message arrives we have to see if
// there
// is anyone waiting for it.
// Get the correlator, a persisted communnication-reduction data
// structure
// supporting correlation correlationKey matching!
String correlatorId = genCorrelatorId(_plinkDef, operation.getName());
CorrelatorDAO correlator = getProcessDAO().getCorrelator(correlatorId);
CorrelationKey[] keys;
MessageRouteDAO messageRoute = null;
// We need to compute the correlation keys (based on the operation
// we can
// infer which correlation keys to compute - this is merely a set
// consisting of each correlationKey used in each correlation sets
// that is
// ever
// referenced in an <receive>/<onMessage> on this
// partnerlink/operation.
keys = computeCorrelationKeys(mex);
String mySessionId = mex.getProperty(MessageExchange.PROPERTY_SEP_MYROLE_SESSIONID);
String partnerSessionId = mex.getProperty(MessageExchange.PROPERTY_SEP_PARTNERROLE_SESSIONID);
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId + ": MSG RCVD keys="
+ ArrayUtils.makeCollection(HashSet.class, keys)
+ " mySessionId=" + mySessionId + " partnerSessionId=" + partnerSessionId);
}
CorrelationKey matchedKey = null;
// Try to find a route for one of our keys.
for (CorrelationKey key : keys) {
messageRoute = correlator.findRoute(key);
if (messageRoute != null) {
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId + ": ckey " + key + " route is to " + messageRoute);
}
matchedKey = key;
break;
}
}
// Handling the "opaque correlation case": correlation is done on a
// session identifier associated with my epr
if (messageRoute == null) {
String sessionId = mex.getProperty(MessageExchange.PROPERTY_SEP_MYROLE_SESSIONID);
if (sessionId != null) {
CorrelationKey key = new CorrelationKey(-1, new String[] { sessionId });
messageRoute = correlator.findRoute(key);
if (__log.isDebugEnabled())
__log.debug("INPUTMSG: rouing based on session id " + sessionId + " --> " + messageRoute);
}
}
// TODO - ODE-58
// If no luck try to find a default route.
if (messageRoute == null) {
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId
+ ": CorrelationKey routing failed; checking default route.");
}
messageRoute = correlator.findRoute(null);
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId + ": default route is to " + messageRoute);
}
}
// If still no luck, and this operation qualifies for
// create-instance
// treatment,
// then create a new process instance.
if (messageRoute == null && isCreateInstnace) {
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId + ": default routing failed, CREATING NEW INSTANCE");
}
ProcessDAO processDAO = getProcessDAO();
if (processDAO.isRetired()) {
throw new InvalidProcessException("Process is retired.", InvalidProcessException.RETIRED_CAUSE_CODE);
}
if (!processInterceptors(mex, InterceptorInvoker.__onNewInstanceInvoked)) {
__log.debug("Not creating a new instance for mex " + mex + "; interceptor prevented!");
return;
}
ProcessInstanceDAO newInstance = processDAO.createInstance(correlator);
BpelRuntimeContextImpl instance = createRuntimeContext(newInstance, new PROCESS(_oprocess), mex);
// send process instance event
NewProcessInstanceEvent evt = new NewProcessInstanceEvent(new QName(_oprocess.targetNamespace,
_oprocess.getName()), getProcessDAO().getProcessId(), newInstance.getInstanceId());
evt.setPortType(mex.getPortType().getQName());
evt.setOperation(operation.getName());
evt.setMexId(mex.getMessageExchangeId());
_debugger.onEvent(evt);
newInstance.insertBpelEvent(evt);
mex.setCorrelationStatus(CorrelationStatus.CREATE_INSTANCE);
mex.getDAO().setInstance(newInstance);
instance.execute();
} else if (messageRoute != null) {
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId + ": ROUTING to instance "
+ messageRoute.getTargetInstance().getInstanceId());
}
// Reload process instance for DAO.
BpelRuntimeContextImpl instance = createRuntimeContext(messageRoute.getTargetInstance(), null, null);
instance.inputMsgMatch(messageRoute.getGroupId(), messageRoute.getIndex(), mex);
// Kill the route so some new message does not get routed to
// same
// process
// instance.
correlator.removeRoutes(messageRoute.getGroupId(), messageRoute.getTargetInstance());
Long iid = messageRoute.getTargetInstance().getInstanceId();
// send process instance event
CorrelationMatchEvent evt = new CorrelationMatchEvent(new QName(_oprocess.targetNamespace, _oprocess
.getName()), getProcessDAO().getProcessId(), iid, matchedKey);
evt.setPortType(mex.getPortType().getQName());
evt.setOperation(operation.getName());
evt.setMexId(mex.getMessageExchangeId());
_debugger.onEvent(evt);
// store event
getProcessDAO().getInstance(iid).insertBpelEvent(evt);
mex.setCorrelationStatus(CorrelationStatus.MATCHED);
mex.getDAO().setInstance(messageRoute.getTargetInstance());
// run the vpu
instance.execute();
} else {
if (__log.isDebugEnabled()) {
__log.debug("INPUTMSG: " + correlatorId + ": SAVING to DB (no match) ");
}
// send event
CorrelationNoMatchEvent evt = new CorrelationNoMatchEvent(mex.getPortType().getQName(), mex
.getOperation().getName(), mex.getMessageExchangeId(), keys);
evt.setProcessId(getProcessDAO().getProcessId());
evt.setProcessName(new QName(_oprocess.targetNamespace, _oprocess.getName()));
_debugger.onEvent(evt);
mex.setCorrelationStatus(CorrelationStatus.QUEUED);
// No match, means we add message exchange to the queue.
correlator.enqueueMessage(mex.getDAO(), keys);
}
// Now we have to update our message exchange status. If the <reply>
// was
// not hit during the invocation, then we will be in the "REQUEST"
// phase
// which means that either this was a one-way or a two-way that
// needs to
// delivery the reply asynchronously.
if (mex.getStatus() == Status.REQUEST) {
switch (mex.getPattern()) {
case REQUEST_ONLY:
mex.setStatus(Status.ONE_WAY);
break;
case REQUEST_RESPONSE:
mex.setStatus(Status.ASYNC);
break;
default:
String errmsg = "BpelProcess: internal error, message exchange pattern not set";
__log.fatal(errmsg);
throw new BpelEngineException(errmsg);
}
}
}
@SuppressWarnings("unchecked")
private Operation getMyRoleOperation(String operationName) {
Operation op = _plinkDef.getMyRoleOperation(operationName);
return op;
}
private CorrelationKey[] computeCorrelationKeys(MyRoleMessageExchangeImpl mex) {
Operation operation = mex.getOperation();
Element msg = mex.getRequest().getMessage();
Message msgDescription = operation.getInput().getMessage();
List<CorrelationKey> keys = new ArrayList<CorrelationKey>();
Set<OScope.CorrelationSet> csets = _plinkDef.getCorrelationSetsForOperation(operation);
for (OScope.CorrelationSet cset : csets) {
CorrelationKey key = computeCorrelationKey(cset, _oprocess.messageTypes.get(msgDescription.getQName()),
msg);
keys.add(key);
}
return keys.toArray(new CorrelationKey[keys.size()]);
}
private CorrelationKey computeCorrelationKey(OScope.CorrelationSet cset, OMessageVarType messagetype,
Element msg) {
String[] values = new String[cset.properties.size()];
int jIdx = 0;
for (Iterator j = cset.properties.iterator(); j.hasNext(); ++jIdx) {
OProcess.OProperty property = (OProcess.OProperty) j.next();
OProcess.OPropertyAlias alias = property.getAlias(messagetype);
if (alias == null) {
// TODO: Throw a real exception! And catch this at compile
// time.
throw new IllegalArgumentException("No alias matching property '" + property.name
+ "' with message type '" + messagetype + "'");
}
String value;
try {
value = extractProperty(msg, alias, msg.toString());
} catch (FaultException fe) {
String emsg = __msgs.msgPropertyAliasDerefFailedOnMessage(alias.getDescription(), fe.getMessage());
__log.error(emsg, fe);
throw new InvalidMessageException(emsg, fe);
}
values[jIdx] = value;
}
CorrelationKey key = new CorrelationKey(cset.getId(), values);
return key;
}
}
private class PartnerLinkPartnerRoleImpl extends PartnerLinkRoleImpl {
Endpoint _initialPartner;
public PartnerRoleChannel _channel;
private PartnerLinkPartnerRoleImpl(OPartnerLink plink, Endpoint initialPartner) {
super(plink);
_initialPartner = initialPartner;
}
public void processPartnerResponse(PartnerRoleMessageExchangeImpl messageExchange) {
if (__log.isDebugEnabled()) {
__log.debug("Processing partner's response for partnerLink: " + messageExchange);
}
BpelRuntimeContextImpl processInstance = createRuntimeContext(messageExchange.getDAO().getInstance(), null,
null);
processInstance.invocationResponse(messageExchange);
processInstance.execute();
}
}
/**
* @see org.apache.ode.bpel.engine.BpelProcess#handleWorkEvent(java.io.Serializable)
*/
public void handleWorkEvent(Map<String, Object> jobData) {
ProcessInstanceDAO procInstance;
if (__log.isDebugEnabled()) {
__log.debug(ObjectPrinter.stringifyMethodEnter("handleWorkEvent", new Object[] { "jobData", jobData }));
}
WorkEvent we = new WorkEvent(jobData);
procInstance = getProcessDAO().getInstance(we.getIID());
if (procInstance == null) {
if (__log.isDebugEnabled()) {
__log.debug("handleWorkEvent: no ProcessInstance found with iid " + we.getIID() + "; ignoring.");
}
return;
}
BpelRuntimeContextImpl processInstance = createRuntimeContext(procInstance, null, null);
switch (we.getType()) {
case TIMER:
if (__log.isDebugEnabled()) {
__log.debug("handleWorkEvent: TimerWork event for process instance " + processInstance);
}
processInstance.timerEvent(we.getChannel());
break;
case RESUME:
if (__log.isDebugEnabled()) {
__log.debug("handleWorkEvent: ResumeWork event for iid " + we.getIID());
}
processInstance.execute();
break;
case INVOKE_RESPONSE:
if (__log.isDebugEnabled()) {
__log.debug("InvokeResponse event for iid " + we.getIID());
}
processInstance.invocationResponse(we.getMexId(), we.getChannel());
processInstance.execute();
break;
}
}
ProcessDAO getProcessDAO() {
return _engine._contexts.dao.getConnection().getProcess(_pid);
}
static String genCorrelatorId(OPartnerLink plink, String opName) {
return plink.getId() + "." + opName;
}
/**
* Get all the services that are implemented by this process.
*
* @return list of qualified names corresponding to the myroles.
*/
public Set<Endpoint> getServiceNames() {
return _endpointToMyRoleMap.keySet();
}
void activate(BpelEngineImpl engine) {
_engine = engine;
_debugger = new DebuggerSupport(this);
__log.debug("Activating " + _pid);
// Activate all the my-role endpoints.
for (PartnerLinkMyRoleImpl myrole : _myRoles.values()) {
myrole._initialEPR = _engine._contexts.bindingContext.activateMyRoleEndpoint(_pid, _du, myrole._endpoint,
myrole._plinkDef.myRolePortType);
__log.debug("Activated " + _pid + " myrole " + myrole.getPartnerLinkName()
+ ": EPR is " + myrole._initialEPR);
}
for (PartnerLinkPartnerRoleImpl prole : _partnerRoles.values()) {
PartnerRoleChannel channel = _engine._contexts.bindingContext.createPartnerRoleChannel(_pid, _du,
prole._plinkDef.partnerRolePortType, prole._initialPartner);
prole._channel = channel;
EndpointReference epr = channel.getInitialEndpointReference();
if (epr != null) {
prole._initialEPR = epr;
}
__log.debug("Activated " + _pid + " partnerrole " + prole.getPartnerLinkName()
+ ": EPR is " + prole._initialEPR);
}
__log.debug("Activated " + _pid);
}
void deactivate() {
// Deactivate all the my-role endpoints.
for (Endpoint endpoint : _endpointToMyRoleMap.keySet())
_engine._contexts.bindingContext.deactivateMyRoleEndpoint(endpoint);
// TODO Deactivate all the partner-role channels
}
EndpointReference getInitialPartnerRoleEPR(OPartnerLink link) {
PartnerLinkPartnerRoleImpl prole = _partnerRoles.get(link);
if (prole == null)
throw new IllegalStateException("Unknown partner link " + link);
return prole.getInitialEPR();
}
EndpointReference getInitialMyRoleEPR(OPartnerLink link) {
PartnerLinkMyRoleImpl myRole = _myRoles.get(link);
if (myRole == null)
throw new IllegalStateException("Unknown partner link " + link);
return myRole.getInitialEPR();
}
QName getPID() {
return _pid;
}
PartnerRoleChannel getPartnerRoleChannel(OPartnerLink partnerLink) {
PartnerLinkPartnerRoleImpl prole = _partnerRoles.get(partnerLink);
if (prole == null)
throw new IllegalStateException("Unknown partner link " + partnerLink);
return prole._channel;
}
}
|
Nicer exception.
git-svn-id: aa171c635fe092da43bc3212e891e07172936fe5@442389 13f79535-47bb-0310-9956-ffa450edef68
|
bpel-runtime/src/main/java/org/apache/ode/bpel/engine/BpelProcess.java
|
Nicer exception.
|
|
Java
|
apache-2.0
|
f00757049f2b0b4f5f6f2665041839a7ae53aeaa
| 0
|
fkeglevich/Raw-Dumper,fkeglevich/Raw-Dumper,fkeglevich/Raw-Dumper
|
/*
* Copyright 2018, Flávio Keglevich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asus.camera.extensions;
import android.hardware.Camera;
import android.util.Log;
import com.fkeglevich.rawdumper.camera.data.Aperture;
import com.fkeglevich.rawdumper.camera.data.Ev;
import com.fkeglevich.rawdumper.camera.data.Iso;
import com.fkeglevich.rawdumper.camera.data.ShutterSpeed;
import com.fkeglevich.rawdumper.util.event.AsyncEventDispatcher;
import com.fkeglevich.rawdumper.util.event.EventDispatcher;
import java.lang.ref.WeakReference;
import androidx.annotation.Keep;
import androidx.annotation.Nullable;
/**
* Extension shim for AsusCameraExtension
*
* Created by Flávio Keglevich on 26/06/18.
*/
@SuppressWarnings({"JniMissingFunction", "unused"})
@Keep
public class AsusCameraExtension
{
private static final String TAG = "AsusCameraExt";
private static final String LIBRARY_NAME = "asuscameraext_jni";
private static final int CAPTURE_FRAME_STATUS_MSG = 24577;
private static final int PRO_DATA_MSG = 24579;
private static final boolean available;
static
{
boolean success = false;
try
{
System.loadLibrary(LIBRARY_NAME);
success = true;
}
catch (UnsatisfiedLinkError ignored)
{ }
available = success;
}
public static boolean isAvailable()
{
return available;
}
@SuppressWarnings("FieldCanBeLocal")
private final Camera mCameraDevice;
private int mNativeContext;
private long mNativeContextLong;
public final EventDispatcher<CaptureFrameData> onGotCaptureFrameData = new AsyncEventDispatcher<>();
public final EventDispatcher<ProfessionalData> onGotProfessionalData = new AsyncEventDispatcher<>();
public AsusCameraExtension(Camera camera)
{
mCameraDevice = camera;
if (isAvailable())
native_setup(new WeakReference<>(this), this.mCameraDevice);
}
private static void postEventFromNative(Object extensionRef, int what, int arg1, int arg2, Object obj)
{
AsusCameraExtension cameraExtension = (AsusCameraExtension)((WeakReference)extensionRef).get();
switch (what)
{
case CAPTURE_FRAME_STATUS_MSG:
cameraExtension.onGotCaptureFrameData.dispatchEvent((CaptureFrameData) obj);
Log.i(TAG, "frame status: " + obj.toString());
break;
case PRO_DATA_MSG:
cameraExtension.onGotProfessionalData.dispatchEvent((ProfessionalData) obj);
//Log.i(TAG, "Aperture: " + obj.toString());
break;
default:
Log.i(TAG, "Unknown message type: " + what);
break;
}
}
@Keep
public static class CaptureFrameData
{
private int mStatus;
private int getStatus()
{
return mStatus;
}
@Override
public String toString()
{
return "{ status: " + mStatus + " }";
}
}
@Keep
public static class ProfessionalData
{
private int mISO;
private float mShutterSpeed;
private float mAperture;
private float mOriginalEV;
private float mDifferentialEV;
@Nullable
public Iso getIso()
{
if (Iso.isInvalidIso(mISO)) return null;
return Iso.create(mISO);
}
@Nullable
public ShutterSpeed getShutterSpeed()
{
if (ShutterSpeed.isInvalidExposure(mShutterSpeed)) return null;
return ShutterSpeed.create(mShutterSpeed);
}
@Nullable
public Aperture getAperture()
{
if (Aperture.isInvalidAperture(mAperture)) return null;
return Aperture.create(mAperture);
}
@Nullable
public Ev getOriginalEv()
{
if (Ev.isInvalidEv(mOriginalEV)) return null;
return Ev.create(mOriginalEV);
}
@Nullable
public Ev getDifferentialEv()
{
if (Ev.isInvalidEv(mDifferentialEV)) return null;
return Ev.create(mDifferentialEV);
}
public void copyFrom(ProfessionalData other)
{
mISO = other.mISO;
mShutterSpeed = other.mShutterSpeed;
mAperture = other.mAperture;
mOriginalEV = other.mOriginalEV;
mDifferentialEV = other.mDifferentialEV;
}
@Override
public String toString()
{
return "{ iso: " + mISO +
", shutterSpeed: " + mShutterSpeed +
", aperture: " + mAperture +
", originalEv: " + mOriginalEV + "" +
", differentialEv: " + mDifferentialEV + " }";
}
}
public void release()
{
onGotCaptureFrameData.removeAllListeners();
onGotProfessionalData.removeAllListeners();
if (isAvailable())
{
stopQueryingData();
native_release();
}
}
public void startQueryData(int queryInterval)
{
if (isAvailable())
{
native_startQueryCaptureFrameStatus();
native_startQueryProfessionalData(queryInterval);
}
}
public void stopQueryingData()
{
if (isAvailable())
{
native_stopQueryCaptureFrameStatus();
native_stopQueryProfessionalData();
}
}
private native void native_release();
private native void native_setup(Object weakRef, Camera camera);
private native void native_startQueryCaptureFrameStatus();
private native void native_startQueryProfessionalData(int interval);
private native void native_stopQueryCaptureFrameStatus();
private native void native_stopQueryProfessionalData();
}
|
app/src/main/java/com/asus/camera/extensions/AsusCameraExtension.java
|
/*
* Copyright 2018, Flávio Keglevich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asus.camera.extensions;
import android.hardware.Camera;
import android.util.Log;
import com.fkeglevich.rawdumper.camera.data.Aperture;
import com.fkeglevich.rawdumper.camera.data.Ev;
import com.fkeglevich.rawdumper.camera.data.Iso;
import com.fkeglevich.rawdumper.camera.data.ShutterSpeed;
import com.fkeglevich.rawdumper.util.event.AsyncEventDispatcher;
import com.fkeglevich.rawdumper.util.event.EventDispatcher;
import java.lang.ref.WeakReference;
import androidx.annotation.Keep;
import androidx.annotation.Nullable;
/**
* Extension shim for AsusCameraExtension
*
* Created by Flávio Keglevich on 26/06/18.
*/
@SuppressWarnings({"JniMissingFunction", "unused"})
@Keep
public class AsusCameraExtension
{
private static final String TAG = "AsusCameraExt";
private static final String LIBRARY_NAME = "asuscameraext_jni";
private static final int CAPTURE_FRAME_STATUS_MSG = 24577;
private static final int PRO_DATA_MSG = 24579;
private static final boolean available;
static
{
boolean success = false;
try
{
System.loadLibrary(LIBRARY_NAME);
success = true;
}
catch (UnsatisfiedLinkError ignored)
{ }
available = success;
}
public static boolean isAvailable()
{
return available;
}
@SuppressWarnings("FieldCanBeLocal")
private final Camera mCameraDevice;
private int mNativeContext;
private long mNativeContextLong;
public final EventDispatcher<CaptureFrameData> onGotCaptureFrameData = new AsyncEventDispatcher<>();
public final EventDispatcher<ProfessionalData> onGotProfessionalData = new AsyncEventDispatcher<>();
public AsusCameraExtension(Camera camera)
{
mCameraDevice = camera;
if (isAvailable())
native_setup(new WeakReference<>(this), this.mCameraDevice);
}
private static void postEventFromNative(Object extensionRef, int what, int arg1, int arg2, Object obj)
{
AsusCameraExtension cameraExtension = (AsusCameraExtension)((WeakReference)extensionRef).get();
switch (what)
{
case CAPTURE_FRAME_STATUS_MSG:
cameraExtension.onGotCaptureFrameData.dispatchEvent((CaptureFrameData) obj);
Log.i(TAG, "frame status: " + obj.toString());
break;
case PRO_DATA_MSG:
cameraExtension.onGotProfessionalData.dispatchEvent((ProfessionalData) obj);
//Log.i(TAG, "Aperture: " + obj.toString());
break;
default:
Log.i(TAG, "Unknown message type: " + what);
break;
}
}
public static class CaptureFrameData
{
private int mStatus;
private int getStatus()
{
return mStatus;
}
@Override
public String toString()
{
return "{ status: " + mStatus + " }";
}
}
public static class ProfessionalData
{
private int mISO;
private float mShutterSpeed;
private float mAperture;
private float mOriginalEV;
private float mDifferentialEV;
@Nullable
public Iso getIso()
{
if (Iso.isInvalidIso(mISO)) return null;
return Iso.create(mISO);
}
@Nullable
public ShutterSpeed getShutterSpeed()
{
if (ShutterSpeed.isInvalidExposure(mShutterSpeed)) return null;
return ShutterSpeed.create(mShutterSpeed);
}
@Nullable
public Aperture getAperture()
{
if (Aperture.isInvalidAperture(mAperture)) return null;
return Aperture.create(mAperture);
}
@Nullable
public Ev getOriginalEv()
{
if (Ev.isInvalidEv(mOriginalEV)) return null;
return Ev.create(mOriginalEV);
}
@Nullable
public Ev getDifferentialEv()
{
if (Ev.isInvalidEv(mDifferentialEV)) return null;
return Ev.create(mDifferentialEV);
}
public void copyFrom(ProfessionalData other)
{
mISO = other.mISO;
mShutterSpeed = other.mShutterSpeed;
mAperture = other.mAperture;
mOriginalEV = other.mOriginalEV;
mDifferentialEV = other.mDifferentialEV;
}
@Override
public String toString()
{
return "{ iso: " + mISO +
", shutterSpeed: " + mShutterSpeed +
", aperture: " + mAperture +
", originalEv: " + mOriginalEV + "" +
", differentialEv: " + mDifferentialEV + " }";
}
}
public void release()
{
onGotCaptureFrameData.removeAllListeners();
onGotProfessionalData.removeAllListeners();
if (isAvailable())
{
stopQueryingData();
native_release();
}
}
public void startQueryData(int queryInterval)
{
if (isAvailable())
{
native_startQueryCaptureFrameStatus();
native_startQueryProfessionalData(queryInterval);
}
}
public void stopQueryingData()
{
if (isAvailable())
{
native_stopQueryCaptureFrameStatus();
native_stopQueryProfessionalData();
}
}
private native void native_release();
private native void native_setup(Object weakRef, Camera camera);
private native void native_startQueryCaptureFrameStatus();
private native void native_startQueryProfessionalData(int interval);
private native void native_stopQueryCaptureFrameStatus();
private native void native_stopQueryProfessionalData();
}
|
Fixed bug when native code search for these classes
|
app/src/main/java/com/asus/camera/extensions/AsusCameraExtension.java
|
Fixed bug when native code search for these classes
|
|
Java
|
apache-2.0
|
4413a77ebfdbb1d13e255f3e18d716746abc9c94
| 0
|
mortegac/liquibase,iherasymenko/liquibase,vfpfafrf/liquibase,mwaylabs/liquibase,syncron/liquibase,OpenCST/liquibase,foxel/liquibase,vbekiaris/liquibase,cleiter/liquibase,NSIT/liquibase,EVODelavega/liquibase,maberle/liquibase,Willem1987/liquibase,ivaylo5ev/liquibase,dbmanul/dbmanul,russ-p/liquibase,iherasymenko/liquibase,talklittle/liquibase,vfpfafrf/liquibase,vfpfafrf/liquibase,lazaronixon/liquibase,mattbertolini/liquibase,Datical/liquibase,dbmanul/dbmanul,danielkec/liquibase,dbmanul/dbmanul,instantdelay/liquibase,dprguard2000/liquibase,instantdelay/liquibase,mattbertolini/liquibase,FreshGrade/liquibase,FreshGrade/liquibase,balazs-zsoldos/liquibase,pellcorp/liquibase,dprguard2000/liquibase,Willem1987/liquibase,hbogaards/liquibase,fbiville/liquibase,mbreslow/liquibase,pellcorp/liquibase,vast-engineering/liquibase,Datical/liquibase,vast-engineering/liquibase,danielkec/liquibase,Datical/liquibase,Willem1987/liquibase,foxel/liquibase,cleiter/liquibase,iherasymenko/liquibase,syncron/liquibase,mattbertolini/liquibase,NSIT/liquibase,danielkec/liquibase,mbreslow/liquibase,dprguard2000/liquibase,instantdelay/liquibase,OpenCST/liquibase,vbekiaris/liquibase,instantdelay/liquibase,NSIT/liquibase,russ-p/liquibase,liquibase/liquibase,foxel/liquibase,EVODelavega/liquibase,fbiville/liquibase,hbogaards/liquibase,EVODelavega/liquibase,hbogaards/liquibase,maberle/liquibase,mwaylabs/liquibase,FreshGrade/liquibase,syncron/liquibase,lazaronixon/liquibase,fbiville/liquibase,vast-engineering/liquibase,evigeant/liquibase,evigeant/liquibase,vbekiaris/liquibase,vast-engineering/liquibase,klopfdreh/liquibase,fossamagna/liquibase,CoderPaulK/liquibase,balazs-zsoldos/liquibase,jimmycd/liquibase,syncron/liquibase,Willem1987/liquibase,foxel/liquibase,liquibase/liquibase,fossamagna/liquibase,dyk/liquibase,CoderPaulK/liquibase,lazaronixon/liquibase,mortegac/liquibase,lazaronixon/liquibase,talklittle/liquibase,C0mmi3/liquibase,balazs-zsoldos/liquibase,vbekiaris/liquibase,CoderPaulK/liquibase,mbreslow/liquibase,mwaylabs/liquibase,iherasymenko/liquibase,dyk/liquibase,mwaylabs/liquibase,cleiter/liquibase,dyk/liquibase,C0mmi3/liquibase,FreshGrade/liquibase,C0mmi3/liquibase,jimmycd/liquibase,russ-p/liquibase,hbogaards/liquibase,evigeant/liquibase,evigeant/liquibase,klopfdreh/liquibase,balazs-zsoldos/liquibase,CoderPaulK/liquibase,dbmanul/dbmanul,jimmycd/liquibase,dyk/liquibase,klopfdreh/liquibase,fbiville/liquibase,liquibase/liquibase,vfpfafrf/liquibase,mbreslow/liquibase,talklittle/liquibase,pellcorp/liquibase,maberle/liquibase,C0mmi3/liquibase,pellcorp/liquibase,OpenCST/liquibase,danielkec/liquibase,dprguard2000/liquibase,talklittle/liquibase,maberle/liquibase,mattbertolini/liquibase,Datical/liquibase,ivaylo5ev/liquibase,klopfdreh/liquibase,fossamagna/liquibase,jimmycd/liquibase,mortegac/liquibase,NSIT/liquibase,OpenCST/liquibase,russ-p/liquibase,cleiter/liquibase,EVODelavega/liquibase,mortegac/liquibase
|
package liquibase;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Writer;
import java.text.DateFormat;
import java.util.*;
import liquibase.change.CheckSum;
import liquibase.changelog.*;
import liquibase.changelog.filter.*;
import liquibase.changelog.visitor.*;
import liquibase.database.Database;
import liquibase.database.DatabaseConnection;
import liquibase.database.DatabaseFactory;
import liquibase.database.ObjectQuotingStrategy;
import liquibase.database.core.OracleDatabase;
import liquibase.diff.DiffGeneratorFactory;
import liquibase.diff.DiffResult;
import liquibase.diff.compare.CompareControl;
import liquibase.diff.output.changelog.DiffToChangeLog;
import liquibase.exception.DatabaseException;
import liquibase.exception.LiquibaseException;
import liquibase.exception.LockException;
import liquibase.exception.UnexpectedLiquibaseException;
import liquibase.executor.Executor;
import liquibase.executor.ExecutorService;
import liquibase.executor.LoggingExecutor;
import liquibase.lockservice.DatabaseChangeLogLock;
import liquibase.lockservice.LockService;
import liquibase.lockservice.LockServiceFactory;
import liquibase.logging.LogFactory;
import liquibase.logging.Logger;
import liquibase.parser.ChangeLogParser;
import liquibase.parser.ChangeLogParserFactory;
import liquibase.resource.ResourceAccessor;
import liquibase.serializer.ChangeLogSerializer;
import liquibase.snapshot.DatabaseSnapshot;
import liquibase.snapshot.InvalidExampleException;
import liquibase.snapshot.SnapshotControl;
import liquibase.snapshot.SnapshotGeneratorFactory;
import liquibase.statement.core.RawSqlStatement;
import liquibase.statement.core.UpdateStatement;
import liquibase.structure.DatabaseObject;
import liquibase.util.LiquibaseUtil;
import liquibase.util.StreamUtil;
import liquibase.util.StringUtils;
import javax.xml.parsers.ParserConfigurationException;
/**
* Primary facade class for interacting with Liquibase.
* The built in command line, Ant, Maven and other ways of running Liquibase are wrappers around methods in this class.
*/
public class Liquibase {
private DatabaseChangeLog databaseChangeLog;
private String changeLogFile;
private ResourceAccessor resourceAccessor;
protected Database database;
private Logger log;
private ChangeLogParameters changeLogParameters;
private ChangeExecListener changeExecListener;
private ChangeLogSyncListener changeLogSyncListener;
private boolean ignoreClasspathPrefix = true;
/**
* Creates a Liquibase instance for a given DatabaseConnection. The Database instance used will be found with {@link DatabaseFactory#findCorrectDatabaseImplementation(liquibase.database.DatabaseConnection)}
*
* @See DatabaseConnection
* @See Database
* @see #Liquibase(String, liquibase.resource.ResourceAccessor, liquibase.database.Database)
* @see ResourceAccessor
*/
public Liquibase(String changeLogFile, ResourceAccessor resourceAccessor, DatabaseConnection conn) throws LiquibaseException {
this(changeLogFile, resourceAccessor, DatabaseFactory.getInstance().findCorrectDatabaseImplementation(conn));
}
/**
* Creates a Liquibase instance. The changeLogFile parameter must be a path that can be resolved by the passed ResourceAccessor.
* If windows style path separators are used for the changeLogFile, they will be standardized to unix style for better cross-system compatib.
*
* @See DatabaseConnection
* @See Database
* @see ResourceAccessor
*/
public Liquibase(String changeLogFile, ResourceAccessor resourceAccessor, Database database) throws LiquibaseException {
log = LogFactory.getLogger();
if (changeLogFile != null) {
this.changeLogFile = changeLogFile.replace('\\', '/'); //convert to standard / if using absolute path on windows
}
this.resourceAccessor = resourceAccessor;
this.changeLogParameters = new ChangeLogParameters(database);
this.database = database;
}
public Liquibase(DatabaseChangeLog changeLog, ResourceAccessor resourceAccessor, Database database) {
log = LogFactory.getLogger();
this.databaseChangeLog = changeLog;
this.changeLogFile = changeLog.getPhysicalFilePath();
if (this.changeLogFile != null) {
changeLogFile = changeLogFile.replace('\\', '/'); //convert to standard / if using absolute path on windows
}
this.resourceAccessor = resourceAccessor;
this.database = database;
this.changeLogParameters = new ChangeLogParameters(database);
}
/**
* Return the change log file used by this Liquibase instance.
*/
public String getChangeLogFile() {
return changeLogFile;
}
/**
* Return the log used by this Liquibase instance.
*/
public Logger getLog() {
return log;
}
/**
* Returns the ChangeLogParameters container used by this Liquibase instance.
*/
public ChangeLogParameters getChangeLogParameters() {
return changeLogParameters;
}
/**
* Returns the Database used by this Liquibase instance.
*/
public Database getDatabase() {
return database;
}
/**
* Return ResourceAccessor used by this Liquibase instance.
* @deprecated use the newer-terminology version {@link #getResourceAccessor()}
*/
public ResourceAccessor getFileOpener() {
return resourceAccessor;
}
/**
* Return ResourceAccessor used by this Liquibase instance.
*/
public ResourceAccessor getResourceAccessor() {
return resourceAccessor;
}
/**
* Use this function to override the current date/time function used to insert dates into the database.
* Especially useful when using an unsupported database.
*
* @deprecated Should call {@link Database#setCurrentDateTimeFunction(String)} directly
*/
public void setCurrentDateTimeFunction(String currentDateTimeFunction) {
this.database.setCurrentDateTimeFunction(currentDateTimeFunction);
}
/**
* Convience method for {@link #update(Contexts)} that constructs the Context object from the passed string.
*/
public void update(String contexts) throws LiquibaseException {
this.update(new Contexts(contexts));
}
/**
* Executes Liquibase "update" logic which ensures that the configured {@link Database} is up to date according to the configured changelog file.
* To run in "no context mode", pass a null or empty context object.
*/
public void update(Contexts contexts) throws LiquibaseException {
update(contexts, new LabelExpression());
}
public void update(Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(true, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator changeLogIterator = getStandardChangelogIterator(contexts, labelExpression, changeLog);
changeLogIterator.run(createUpdateVisitor(), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
database.setObjectQuotingStrategy(ObjectQuotingStrategy.LEGACY);
try {
lockService.releaseLock();
} catch (LockException e) {
log.severe("Could not release lock", e);
}
resetServices();
}
}
public DatabaseChangeLog getDatabaseChangeLog() throws LiquibaseException {
if (databaseChangeLog == null) {
ChangeLogParser parser = ChangeLogParserFactory.getInstance().getParser(changeLogFile, resourceAccessor);
databaseChangeLog = parser.parse(changeLogFile, changeLogParameters, resourceAccessor);
}
return databaseChangeLog;
}
protected UpdateVisitor createUpdateVisitor() {
return new UpdateVisitor(database, changeExecListener);
}
protected ChangeLogIterator getStandardChangelogIterator(Contexts contexts, LabelExpression labelExpression, DatabaseChangeLog changeLog) throws DatabaseException {
return new ChangeLogIterator(changeLog,
new ShouldRunChangeSetFilter(database, ignoreClasspathPrefix),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database));
}
public void update(String contexts, Writer output) throws LiquibaseException {
this.update(new Contexts(contexts), output);
}
public void update(Contexts contexts, Writer output) throws LiquibaseException {
update(contexts, new LabelExpression(), output);
}
public void update(Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
LoggingExecutor loggingExecutor = new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database);
ExecutorService.getInstance().setExecutor(database, loggingExecutor);
outputHeader("Update Database Script");
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
update(contexts, labelExpression);
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
public void update(int changesToApply, String contexts) throws LiquibaseException {
update(changesToApply, new Contexts(contexts), new LabelExpression());
}
public void update(int changesToApply, Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(true, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator = new ChangeLogIterator(changeLog,
new ShouldRunChangeSetFilter(database, ignoreClasspathPrefix),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database),
new CountChangeSetFilter(changesToApply));
logIterator.run(createUpdateVisitor(), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
lockService.releaseLock();
resetServices();
}
}
public void update(int changesToApply, String contexts, Writer output) throws LiquibaseException {
this.update(changesToApply, new Contexts(contexts), new LabelExpression(), output);
}
public void update(int changesToApply, Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
LoggingExecutor loggingExecutor = new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database);
ExecutorService.getInstance().setExecutor(database, loggingExecutor);
outputHeader("Update " + changesToApply + " Change Sets Database Script");
update(changesToApply, contexts, labelExpression);
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
resetServices();
ExecutorService.getInstance().setExecutor(database, oldTemplate);
}
private void outputHeader(String message) throws DatabaseException {
Executor executor = ExecutorService.getInstance().getExecutor(database);
executor.comment("*********************************************************************");
executor.comment(message);
executor.comment("*********************************************************************");
executor.comment("Change Log: " + changeLogFile);
executor.comment("Ran at: " + DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT).format(new Date()));
DatabaseConnection connection = getDatabase().getConnection();
if (connection != null) {
executor.comment("Against: " + connection.getConnectionUserName() + "@" + connection.getURL());
}
executor.comment("Liquibase version: " + LiquibaseUtil.getBuildVersion());
executor.comment("*********************************************************************" + StreamUtil.getLineSeparator());
if (database instanceof OracleDatabase) {
executor.execute(new RawSqlStatement("SET DEFINE OFF;"));
}
}
public void rollback(int changesToRollback, String contexts, Writer output) throws LiquibaseException {
rollback(changesToRollback, new Contexts(contexts), output);
}
public void rollback(int changesToRollback, Contexts contexts, Writer output) throws LiquibaseException {
rollback(changesToRollback, contexts, new LabelExpression(), output);
}
public void rollback(int changesToRollback, Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
ExecutorService.getInstance().setExecutor(database, new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database));
outputHeader("Rollback " + changesToRollback + " Change(s) Script");
rollback(changesToRollback, contexts, labelExpression);
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
public void rollback(int changesToRollback, String contexts) throws LiquibaseException {
rollback(changesToRollback, new Contexts(contexts), new LabelExpression());
}
public void rollback(int changesToRollback, Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(false, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
changeLog.setIgnoreClasspathPrefix(ignoreClasspathPrefix);
ChangeLogIterator logIterator = new ChangeLogIterator(database.getRanChangeSetList(), changeLog,
new AlreadyRanChangeSetFilter(database.getRanChangeSetList(), ignoreClasspathPrefix),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database),
new CountChangeSetFilter(changesToRollback));
logIterator.run(new RollbackVisitor(database), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
try {
lockService.releaseLock();
} catch (LockException e) {
log.severe("Error releasing lock", e);
}
resetServices();
}
}
public void rollback(String tagToRollBackTo, String contexts, Writer output) throws LiquibaseException {
rollback(tagToRollBackTo, new Contexts(contexts), output);
}
public void rollback(String tagToRollBackTo, Contexts contexts, Writer output) throws LiquibaseException {
rollback(tagToRollBackTo, contexts, new LabelExpression(), output);
}
public void rollback(String tagToRollBackTo, Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
ExecutorService.getInstance().setExecutor(database, new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database));
outputHeader("Rollback to '" + tagToRollBackTo + "' Script");
rollback(tagToRollBackTo, contexts, labelExpression);
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
public void rollback(String tagToRollBackTo, String contexts) throws LiquibaseException {
rollback(tagToRollBackTo, new Contexts(contexts));
}
public void rollback(String tagToRollBackTo, Contexts contexts) throws LiquibaseException {
rollback(tagToRollBackTo, contexts, new LabelExpression());
}
public void rollback(String tagToRollBackTo, Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(false, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
changeLog.setIgnoreClasspathPrefix(ignoreClasspathPrefix);
List<RanChangeSet> ranChangeSetList = database.getRanChangeSetList();
ChangeLogIterator logIterator = new ChangeLogIterator(ranChangeSetList, changeLog,
new AfterTagChangeSetFilter(tagToRollBackTo, ranChangeSetList),
new AlreadyRanChangeSetFilter(ranChangeSetList, ignoreClasspathPrefix),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database));
logIterator.run(new RollbackVisitor(database), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
lockService.releaseLock();
}
resetServices();
}
public void rollback(Date dateToRollBackTo, String contexts, Writer output) throws LiquibaseException {
rollback(dateToRollBackTo, new Contexts(contexts), new LabelExpression(), output);
}
public void rollback(Date dateToRollBackTo, Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
ExecutorService.getInstance().setExecutor(database, new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database));
outputHeader("Rollback to " + dateToRollBackTo + " Script");
rollback(dateToRollBackTo, contexts, labelExpression);
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
public void rollback(Date dateToRollBackTo, String contexts) throws LiquibaseException {
rollback(dateToRollBackTo, new Contexts(contexts), new LabelExpression());
}
public void rollback(Date dateToRollBackTo, Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(false, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
changeLog.setIgnoreClasspathPrefix(ignoreClasspathPrefix);
List<RanChangeSet> ranChangeSetList = database.getRanChangeSetList();
ChangeLogIterator logIterator = new ChangeLogIterator(ranChangeSetList, changeLog,
new ExecutedAfterChangeSetFilter(dateToRollBackTo, ranChangeSetList),
new AlreadyRanChangeSetFilter(ranChangeSetList, ignoreClasspathPrefix),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database));
logIterator.run(new RollbackVisitor(database), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
lockService.releaseLock();
}
resetServices();
}
public void changeLogSync(String contexts, Writer output) throws LiquibaseException {
changeLogSync(new Contexts(contexts), new LabelExpression(), output);
}
public void changeLogSync(Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LoggingExecutor outputTemplate = new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
ExecutorService.getInstance().setExecutor(database, outputTemplate);
outputHeader("SQL to add all changesets to database history table");
changeLogSync(contexts, labelExpression);
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
public void changeLogSync(String contexts) throws LiquibaseException {
changeLogSync(new Contexts(contexts), new LabelExpression());
}
/**
* @deprecated use version with LabelExpression
*/
public void changeLogSync(Contexts contexts) throws LiquibaseException {
changeLogSync(contexts, new LabelExpression());
}
public void changeLogSync(Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(true, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator = new ChangeLogIterator(changeLog,
new NotRanChangeSetFilter(database.getRanChangeSetList()),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database));
logIterator.run(new ChangeLogSyncVisitor(database, changeLogSyncListener), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
lockService.releaseLock();
resetServices();
}
}
public void markNextChangeSetRan(String contexts, Writer output) throws LiquibaseException {
markNextChangeSetRan(new Contexts(contexts), new LabelExpression(), output);
}
public void markNextChangeSetRan(Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LoggingExecutor outputTemplate = new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
ExecutorService.getInstance().setExecutor(database, outputTemplate);
outputHeader("SQL to add all changesets to database history table");
markNextChangeSetRan(contexts, labelExpression);
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
public void markNextChangeSetRan(String contexts) throws LiquibaseException {
markNextChangeSetRan(new Contexts(contexts), new LabelExpression());
}
public void markNextChangeSetRan(Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(false, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator = new ChangeLogIterator(changeLog,
new NotRanChangeSetFilter(database.getRanChangeSetList()),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database),
new CountChangeSetFilter(1));
logIterator.run(new ChangeLogSyncVisitor(database), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
lockService.releaseLock();
resetServices();
}
}
public void futureRollbackSQL(String contexts, Writer output) throws LiquibaseException {
futureRollbackSQL(null, contexts, output);
}
public void futureRollbackSQL(Integer count, String contexts, Writer output) throws LiquibaseException {
futureRollbackSQL(count, new Contexts(contexts), new LabelExpression(), output);
}
public void futureRollbackSQL(Integer count, Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LoggingExecutor outputTemplate = new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
ExecutorService.getInstance().setExecutor(database, outputTemplate);
outputHeader("SQL to roll back currently unexecuted changes");
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(false, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator;
if (count == null) {
logIterator = new ChangeLogIterator(changeLog,
new NotRanChangeSetFilter(database.getRanChangeSetList()),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database));
} else {
ChangeLogIterator forwardIterator = new ChangeLogIterator(changeLog,
new NotRanChangeSetFilter(database.getRanChangeSetList()),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database),
new CountChangeSetFilter(count));
final ListVisitor listVisitor = new ListVisitor();
forwardIterator.run(listVisitor, new RuntimeEnvironment(database, contexts, labelExpression));
logIterator = new ChangeLogIterator(changeLog,
new NotRanChangeSetFilter(database.getRanChangeSetList()),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database),
new ChangeSetFilter() {
@Override
public ChangeSetFilterResult accepts(ChangeSet changeSet) {
return new ChangeSetFilterResult(listVisitor.getSeenChangeSets().contains(changeSet), null, null);
}
});
}
logIterator.run(new RollbackVisitor(database), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
lockService.releaseLock();
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
}
protected void resetServices() {
LockServiceFactory.getInstance().resetAll();
ChangeLogHistoryServiceFactory.getInstance().resetAll();
ExecutorService.getInstance().reset();
}
/**
* Drops all database objects owned by the current user.
*/
public final void dropAll() throws DatabaseException, LockException {
dropAll(new CatalogAndSchema(getDatabase().getDefaultCatalogName(), getDatabase().getDefaultSchemaName()));
}
/**
* Drops all database objects owned by the current user.
*/
public final void dropAll(CatalogAndSchema... schemas) throws DatabaseException {
try {
LockServiceFactory.getInstance().getLockService(database).waitForLock();
for (CatalogAndSchema schema : schemas) {
log.info("Dropping Database Objects in schema: " + schema);
checkLiquibaseTables(false, null, new Contexts(), new LabelExpression());
getDatabase().dropDatabaseObjects(schema);
}
} catch (DatabaseException e) {
throw e;
} catch (Exception e) {
throw new DatabaseException(e);
} finally {
LockServiceFactory.getInstance().getLockService(database).destroy();
resetServices();
}
}
/**
* 'Tags' the database for future rollback
*/
public void tag(String tagString) throws LiquibaseException {
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
checkLiquibaseTables(false, null, new Contexts(), new LabelExpression());
getDatabase().tag(tagString);
} finally {
lockService.releaseLock();
}
}
public void updateTestingRollback(String contexts) throws LiquibaseException {
updateTestingRollback(new Contexts(contexts), new LabelExpression());
}
public void updateTestingRollback(Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
Date baseDate = new Date();
update(contexts, labelExpression);
rollback(baseDate, contexts, labelExpression);
update(contexts, labelExpression);
}
public void checkLiquibaseTables(boolean updateExistingNullChecksums, DatabaseChangeLog databaseChangeLog, Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
ChangeLogHistoryService changeLogHistoryService = ChangeLogHistoryServiceFactory.getInstance().getChangeLogService(getDatabase());
changeLogHistoryService.init();
if (updateExistingNullChecksums) {
changeLogHistoryService.upgradeChecksums(databaseChangeLog, contexts, labelExpression);
}
LockServiceFactory.getInstance().getLockService(getDatabase()).init();
}
/**
* Returns true if it is "save" to migrate the database.
* Currently, "safe" is defined as running in an output-sql mode or against a database on localhost.
* It is fine to run Liquibase against a "non-safe" database, the method is mainly used to determine if the user
* should be prompted before continuing.
*/
public boolean isSafeToRunUpdate() throws DatabaseException {
return getDatabase().isSafeToRunUpdate();
}
/**
* Display change log lock information.
*/
public DatabaseChangeLogLock[] listLocks() throws LiquibaseException {
checkLiquibaseTables(false, null, new Contexts(), new LabelExpression());
return LockServiceFactory.getInstance().getLockService(database).listLocks();
}
public void reportLocks(PrintStream out) throws LiquibaseException {
DatabaseChangeLogLock[] locks = listLocks();
out.println("Database change log locks for " + getDatabase().getConnection().getConnectionUserName() + "@" + getDatabase().getConnection().getURL());
if (locks.length == 0) {
out.println(" - No locks");
}
for (DatabaseChangeLogLock lock : locks) {
out.println(" - " + lock.getLockedBy() + " at " + DateFormat.getDateTimeInstance().format(lock.getLockGranted()));
}
}
public void forceReleaseLocks() throws LiquibaseException {
checkLiquibaseTables(false, null, new Contexts(), new LabelExpression());
LockServiceFactory.getInstance().getLockService(database).forceReleaseLock();
}
/**
* @deprecated use version with LabelExpression
*/
public List<ChangeSet> listUnrunChangeSets(Contexts contexts) throws LiquibaseException {
return listUnrunChangeSets(contexts, new LabelExpression());
}
public List<ChangeSet> listUnrunChangeSets(Contexts contexts, LabelExpression labels) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labels);
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(true, changeLog, contexts, labels);
changeLog.validate(database, contexts, labels);
ChangeLogIterator logIterator = getStandardChangelogIterator(contexts, labels, changeLog);
ListVisitor visitor = new ListVisitor();
logIterator.run(visitor, new RuntimeEnvironment(database, contexts, labels));
return visitor.getSeenChangeSets();
}
/**
* @deprecated use version with LabelExpression
*/
public List<ChangeSetStatus> getChangeSetStatuses(Contexts contexts) throws LiquibaseException {
return getChangeSetStatuses(contexts, new LabelExpression());
}
/**
* Returns the ChangeSetStatuses of all changesets in the change log file and history in the order they would be ran.
*/
public List<ChangeSetStatus> getChangeSetStatuses(Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(true, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator = getStandardChangelogIterator(contexts, labelExpression, changeLog);
StatusVisitor visitor = new StatusVisitor(database);
logIterator.run(visitor, new RuntimeEnvironment(database, contexts, labelExpression));
return visitor.getStatuses();
}
public void reportStatus(boolean verbose, String contexts, Writer out) throws LiquibaseException {
reportStatus(verbose, new Contexts(contexts), new LabelExpression(), out);
}
public void reportStatus(boolean verbose, Contexts contexts, Writer out) throws LiquibaseException {
reportStatus(verbose, contexts, new LabelExpression(), out);
}
public void reportStatus(boolean verbose, Contexts contexts, LabelExpression labels, Writer out) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labels);
try {
List<ChangeSet> unrunChangeSets = listUnrunChangeSets(contexts, labels);
if (unrunChangeSets.size() == 0) {
out.append(getDatabase().getConnection().getConnectionUserName());
out.append("@");
out.append(getDatabase().getConnection().getURL());
out.append(" is up to date");
out.append(StreamUtil.getLineSeparator());
} else {
out.append(String.valueOf(unrunChangeSets.size()));
out.append(" change sets have not been applied to ");
out.append(getDatabase().getConnection().getConnectionUserName());
out.append("@");
out.append(getDatabase().getConnection().getURL());
out.append(StreamUtil.getLineSeparator());
if (verbose) {
for (ChangeSet changeSet : unrunChangeSets) {
out.append(" ").append(changeSet.toString(false)).append(StreamUtil.getLineSeparator());
}
}
}
out.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
}
public Collection<RanChangeSet> listUnexpectedChangeSets(String contexts) throws LiquibaseException {
return listUnexpectedChangeSets(new Contexts(contexts), new LabelExpression());
}
public Collection<RanChangeSet> listUnexpectedChangeSets(Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
DatabaseChangeLog changeLog = getDatabaseChangeLog();
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator = new ChangeLogIterator(changeLog,
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database));
ExpectedChangesVisitor visitor = new ExpectedChangesVisitor(database.getRanChangeSetList());
logIterator.run(visitor, new RuntimeEnvironment(database, contexts, labelExpression));
return visitor.getUnexpectedChangeSets();
}
public void reportUnexpectedChangeSets(boolean verbose, String contexts, Writer out) throws LiquibaseException {
reportUnexpectedChangeSets(verbose, new Contexts(contexts), new LabelExpression(), out);
}
public void reportUnexpectedChangeSets(boolean verbose, Contexts contexts, LabelExpression labelExpression, Writer out) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
try {
Collection<RanChangeSet> unexpectedChangeSets = listUnexpectedChangeSets(contexts, labelExpression);
if (unexpectedChangeSets.size() == 0) {
out.append(getDatabase().getConnection().getConnectionUserName());
out.append("@");
out.append(getDatabase().getConnection().getURL());
out.append(" contains no unexpected changes!");
out.append(StreamUtil.getLineSeparator());
} else {
out.append(String.valueOf(unexpectedChangeSets.size()));
out.append(" unexpected changes were found in ");
out.append(getDatabase().getConnection().getConnectionUserName());
out.append("@");
out.append(getDatabase().getConnection().getURL());
out.append(StreamUtil.getLineSeparator());
if (verbose) {
for (RanChangeSet ranChangeSet : unexpectedChangeSets) {
out.append(" ").append(ranChangeSet.toString()).append(StreamUtil.getLineSeparator());
}
}
}
out.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
}
/**
* Sets checksums to null so they will be repopulated next run
*/
public void clearCheckSums() throws LiquibaseException {
log.info("Clearing database change log checksums");
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
checkLiquibaseTables(false, null, new Contexts(), new LabelExpression());
UpdateStatement updateStatement = new UpdateStatement(getDatabase().getLiquibaseCatalogName(), getDatabase().getLiquibaseSchemaName(), getDatabase().getDatabaseChangeLogTableName());
updateStatement.addNewColumnValue("MD5SUM", null);
ExecutorService.getInstance().getExecutor(database).execute(updateStatement);
getDatabase().commit();
} finally {
lockService.releaseLock();
}
resetServices();
}
public final CheckSum calculateCheckSum(final String changeSetIdentifier) throws LiquibaseException {
if (changeSetIdentifier == null) {
throw new LiquibaseException(new IllegalArgumentException("changeSetIdentifier"));
}
final List<String> parts = StringUtils.splitAndTrim(changeSetIdentifier, "::");
if (parts == null || parts.size() < 3) {
throw new LiquibaseException(new IllegalArgumentException("Invalid changeSet identifier: " + changeSetIdentifier));
}
return this.calculateCheckSum(parts.get(0), parts.get(1), parts.get(2));
}
public CheckSum calculateCheckSum(final String filename, final String id, final String author) throws LiquibaseException {
log.info(String.format("Calculating checksum for changeset %s::%s::%s", filename, id, author));
final ChangeLogParameters changeLogParameters = this.getChangeLogParameters();
final ResourceAccessor resourceAccessor = this.getResourceAccessor();
final DatabaseChangeLog changeLog = ChangeLogParserFactory.getInstance().getParser(this.changeLogFile, resourceAccessor).parse(this.changeLogFile, changeLogParameters, resourceAccessor);
// TODO: validate?
final ChangeSet changeSet = changeLog.getChangeSet(filename, author, id);
if (changeSet == null) {
throw new LiquibaseException(new IllegalArgumentException("No such changeSet: " + filename + "::" + id + "::" + author));
}
return changeSet.generateCheckSum();
}
public void generateDocumentation(String outputDirectory) throws LiquibaseException {
// call without context
generateDocumentation(outputDirectory, new Contexts(), new LabelExpression());
}
public void generateDocumentation(String outputDirectory, String contexts) throws LiquibaseException {
generateDocumentation(outputDirectory, new Contexts(contexts), new LabelExpression());
}
public void generateDocumentation(String outputDirectory, Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
log.info("Generating Database Documentation");
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(false, changeLog, new Contexts(), new LabelExpression());
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator = new ChangeLogIterator(changeLog,
new DbmsChangeSetFilter(database));
DBDocVisitor visitor = new DBDocVisitor(database);
logIterator.run(visitor, new RuntimeEnvironment(database, contexts, labelExpression));
visitor.writeHTML(new File(outputDirectory), resourceAccessor);
} catch (IOException e) {
throw new LiquibaseException(e);
} finally {
lockService.releaseLock();
}
// try {
// if (!LockService.getExecutor(database).waitForLock()) {
// return;
// }
//
// DBDocChangeLogHandler changeLogHandler = new DBDocChangeLogHandler(outputDirectory, this, changeLogFile,resourceAccessor);
// runChangeLogs(changeLogHandler);
//
// changeLogHandler.writeHTML(this);
// } finally {
// releaseLock();
// }
}
public DiffResult diff(Database referenceDatabase, Database targetDatabase, CompareControl compareControl) throws LiquibaseException {
return DiffGeneratorFactory.getInstance().compare(referenceDatabase, targetDatabase, compareControl);
}
/**
* Checks changelogs for bad MD5Sums and preconditions before attempting a migration
*/
public void validate() throws LiquibaseException {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
changeLog.validate(database);
}
public void setChangeLogParameter(String key, Object value) {
this.changeLogParameters.set(key, value);
}
/**
* Add safe database properties as changelog parameters.<br/>
* Safe properties are the ones that doesn't have side effects in liquibase state and also don't change in during the liquibase execution
* @param database Database which propeties are put in the changelog
* @throws DatabaseException
*/
private void setDatabasePropertiesAsChangelogParameters(Database database) throws DatabaseException {
setChangeLogParameter("database.autoIncrementClause", database.getAutoIncrementClause(null, null));
setChangeLogParameter("database.currentDateTimeFunction", database.getCurrentDateTimeFunction());
setChangeLogParameter("database.databaseChangeLogLockTableName", database.getDatabaseChangeLogLockTableName());
setChangeLogParameter("database.databaseChangeLogTableName", database.getDatabaseChangeLogTableName());
setChangeLogParameter("database.databaseMajorVersion", database.getDatabaseMajorVersion());
setChangeLogParameter("database.databaseMinorVersion", database.getDatabaseMinorVersion());
setChangeLogParameter("database.databaseProductName", database.getDatabaseProductName());
setChangeLogParameter("database.databaseProductVersion", database.getDatabaseProductVersion());
setChangeLogParameter("database.defaultCatalogName", database.getDefaultCatalogName());
setChangeLogParameter("database.defaultSchemaName", database.getDefaultSchemaName());
setChangeLogParameter("database.defaultSchemaNamePrefix", StringUtils.trimToNull(database.getDefaultSchemaName())==null?"":"."+database.getDefaultSchemaName());
setChangeLogParameter("database.lineComment", database.getLineComment());
setChangeLogParameter("database.liquibaseSchemaName", database.getLiquibaseSchemaName());
setChangeLogParameter("database.liquibaseTablespaceName", database.getLiquibaseTablespaceName());
setChangeLogParameter("database.typeName", database.getShortName());
setChangeLogParameter("database.isSafeToRunUpdate", database.isSafeToRunUpdate());
setChangeLogParameter("database.requiresPassword", database.requiresPassword());
setChangeLogParameter("database.requiresUsername", database.requiresUsername());
setChangeLogParameter("database.supportsForeignKeyDisable", database.supportsForeignKeyDisable());
setChangeLogParameter("database.supportsInitiallyDeferrableColumns", database.supportsInitiallyDeferrableColumns());
setChangeLogParameter("database.supportsRestrictForeignKeys", database.supportsRestrictForeignKeys());
setChangeLogParameter("database.supportsSchemas", database.supportsSchemas());
setChangeLogParameter("database.supportsSequences", database.supportsSequences());
setChangeLogParameter("database.supportsTablespaces", database.supportsTablespaces());
}
private LockService getLockService() {
return LockServiceFactory.getInstance().getLockService(database);
}
public void setChangeExecListener(ChangeExecListener listener) {
this.changeExecListener = listener;
}
public void setChangeLogSyncListener(ChangeLogSyncListener changeLogSyncListener) {
this.changeLogSyncListener = changeLogSyncListener;
}
public void setIgnoreClasspathPrefix(boolean ignoreClasspathPrefix) {
this.ignoreClasspathPrefix = ignoreClasspathPrefix;
}
public boolean isIgnoreClasspathPrefix() {
return ignoreClasspathPrefix;
}
public void generateChangeLog(CatalogAndSchema catalogAndSchema, DiffToChangeLog changeLogWriter, PrintStream outputStream, Class<? extends DatabaseObject>... snapshotTypes) throws DatabaseException, IOException, ParserConfigurationException {
generateChangeLog(catalogAndSchema, changeLogWriter, outputStream, null, snapshotTypes);
}
public void generateChangeLog(CatalogAndSchema catalogAndSchema, DiffToChangeLog changeLogWriter, PrintStream outputStream, ChangeLogSerializer changeLogSerializer, Class<? extends DatabaseObject>... snapshotTypes) throws DatabaseException, IOException, ParserConfigurationException {
Set<Class<? extends DatabaseObject>> finalCompareTypes = null;
if (snapshotTypes != null && snapshotTypes.length > 0) {
finalCompareTypes = new HashSet<Class<? extends DatabaseObject>>(Arrays.asList(snapshotTypes));
}
SnapshotControl snapshotControl = new SnapshotControl(this.getDatabase(), snapshotTypes);
CompareControl compareControl = new CompareControl(new CompareControl.SchemaComparison[]{new CompareControl.SchemaComparison(catalogAndSchema, catalogAndSchema)}, finalCompareTypes);
// compareControl.addStatusListener(new OutDiffStatusListener());
DatabaseSnapshot originalDatabaseSnapshot = null;
try {
originalDatabaseSnapshot = SnapshotGeneratorFactory.getInstance().createSnapshot(compareControl.getSchemas(CompareControl.DatabaseRole.REFERENCE), getDatabase(), snapshotControl);
DiffResult diffResult = DiffGeneratorFactory.getInstance().compare(originalDatabaseSnapshot, SnapshotGeneratorFactory.getInstance().createSnapshot(compareControl.getSchemas(CompareControl.DatabaseRole.REFERENCE), null, snapshotControl), compareControl);
changeLogWriter.setDiffResult(diffResult);
if(changeLogSerializer != null) {
changeLogWriter.print(outputStream, changeLogSerializer);
} else {
changeLogWriter.print(outputStream);
}
} catch (InvalidExampleException e) {
throw new UnexpectedLiquibaseException(e);
}
}
}
|
liquibase-core/src/main/java/liquibase/Liquibase.java
|
package liquibase;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Writer;
import java.text.DateFormat;
import java.util.*;
import liquibase.change.CheckSum;
import liquibase.changelog.*;
import liquibase.changelog.filter.*;
import liquibase.changelog.visitor.*;
import liquibase.database.Database;
import liquibase.database.DatabaseConnection;
import liquibase.database.DatabaseFactory;
import liquibase.database.ObjectQuotingStrategy;
import liquibase.database.core.OracleDatabase;
import liquibase.diff.DiffGeneratorFactory;
import liquibase.diff.DiffResult;
import liquibase.diff.compare.CompareControl;
import liquibase.diff.output.changelog.DiffToChangeLog;
import liquibase.exception.DatabaseException;
import liquibase.exception.LiquibaseException;
import liquibase.exception.LockException;
import liquibase.exception.UnexpectedLiquibaseException;
import liquibase.executor.Executor;
import liquibase.executor.ExecutorService;
import liquibase.executor.LoggingExecutor;
import liquibase.lockservice.DatabaseChangeLogLock;
import liquibase.lockservice.LockService;
import liquibase.lockservice.LockServiceFactory;
import liquibase.logging.LogFactory;
import liquibase.logging.Logger;
import liquibase.parser.ChangeLogParser;
import liquibase.parser.ChangeLogParserFactory;
import liquibase.resource.ResourceAccessor;
import liquibase.serializer.ChangeLogSerializer;
import liquibase.snapshot.DatabaseSnapshot;
import liquibase.snapshot.InvalidExampleException;
import liquibase.snapshot.SnapshotControl;
import liquibase.snapshot.SnapshotGeneratorFactory;
import liquibase.statement.core.RawSqlStatement;
import liquibase.statement.core.UpdateStatement;
import liquibase.structure.DatabaseObject;
import liquibase.util.LiquibaseUtil;
import liquibase.util.StreamUtil;
import liquibase.util.StringUtils;
import javax.xml.parsers.ParserConfigurationException;
/**
* Primary facade class for interacting with Liquibase.
* The built in command line, Ant, Maven and other ways of running Liquibase are wrappers around methods in this class.
*/
public class Liquibase {
private DatabaseChangeLog databaseChangeLog;
private String changeLogFile;
private ResourceAccessor resourceAccessor;
protected Database database;
private Logger log;
private ChangeLogParameters changeLogParameters;
private ChangeExecListener changeExecListener;
private ChangeLogSyncListener changeLogSyncListener;
private boolean ignoreClasspathPrefix = true;
/**
* Creates a Liquibase instance for a given DatabaseConnection. The Database instance used will be found with {@link DatabaseFactory#findCorrectDatabaseImplementation(liquibase.database.DatabaseConnection)}
*
* @See DatabaseConnection
* @See Database
* @see #Liquibase(String, liquibase.resource.ResourceAccessor, liquibase.database.Database)
* @see ResourceAccessor
*/
public Liquibase(String changeLogFile, ResourceAccessor resourceAccessor, DatabaseConnection conn) throws LiquibaseException {
this(changeLogFile, resourceAccessor, DatabaseFactory.getInstance().findCorrectDatabaseImplementation(conn));
}
/**
* Creates a Liquibase instance. The changeLogFile parameter must be a path that can be resolved by the passed ResourceAccessor.
* If windows style path separators are used for the changeLogFile, they will be standardized to unix style for better cross-system compatib.
*
* @See DatabaseConnection
* @See Database
* @see ResourceAccessor
*/
public Liquibase(String changeLogFile, ResourceAccessor resourceAccessor, Database database) throws LiquibaseException {
log = LogFactory.getLogger();
if (changeLogFile != null) {
this.changeLogFile = changeLogFile.replace('\\', '/'); //convert to standard / if using absolute path on windows
}
this.resourceAccessor = resourceAccessor;
this.changeLogParameters = new ChangeLogParameters(database);
this.database = database;
}
public Liquibase(DatabaseChangeLog changeLog, ResourceAccessor resourceAccessor, Database database) {
log = LogFactory.getLogger();
this.databaseChangeLog = changeLog;
this.changeLogFile = changeLog.getPhysicalFilePath();
if (this.changeLogFile != null) {
changeLogFile = changeLogFile.replace('\\', '/'); //convert to standard / if using absolute path on windows
}
this.resourceAccessor = resourceAccessor;
this.database = database;
this.changeLogParameters = new ChangeLogParameters(database);
}
/**
* Return the change log file used by this Liquibase instance.
*/
public String getChangeLogFile() {
return changeLogFile;
}
/**
* Return the log used by this Liquibase instance.
*/
public Logger getLog() {
return log;
}
/**
* Returns the ChangeLogParameters container used by this Liquibase instance.
*/
public ChangeLogParameters getChangeLogParameters() {
return changeLogParameters;
}
/**
* Returns the Database used by this Liquibase instance.
*/
public Database getDatabase() {
return database;
}
/**
* Return ResourceAccessor used by this Liquibase instance.
* @deprecated use the newer-terminology version {@link #getResourceAccessor()}
*/
public ResourceAccessor getFileOpener() {
return resourceAccessor;
}
/**
* Return ResourceAccessor used by this Liquibase instance.
*/
public ResourceAccessor getResourceAccessor() {
return resourceAccessor;
}
/**
* Use this function to override the current date/time function used to insert dates into the database.
* Especially useful when using an unsupported database.
*
* @deprecated Should call {@link Database#setCurrentDateTimeFunction(String)} directly
*/
public void setCurrentDateTimeFunction(String currentDateTimeFunction) {
this.database.setCurrentDateTimeFunction(currentDateTimeFunction);
}
/**
* Convience method for {@link #update(Contexts)} that constructs the Context object from the passed string.
*/
public void update(String contexts) throws LiquibaseException {
this.update(new Contexts(contexts));
}
/**
* Executes Liquibase "update" logic which ensures that the configured {@link Database} is up to date according to the configured changelog file.
* To run in "no context mode", pass a null or empty context object.
*/
public void update(Contexts contexts) throws LiquibaseException {
update(contexts, new LabelExpression());
}
public void update(Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(true, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator changeLogIterator = getStandardChangelogIterator(contexts, labelExpression, changeLog);
changeLogIterator.run(createUpdateVisitor(), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
database.setObjectQuotingStrategy(ObjectQuotingStrategy.LEGACY);
try {
lockService.releaseLock();
} catch (LockException e) {
log.severe("Could not release lock", e);
}
resetServices();
}
}
public DatabaseChangeLog getDatabaseChangeLog() throws LiquibaseException {
if (databaseChangeLog == null) {
ChangeLogParser parser = ChangeLogParserFactory.getInstance().getParser(changeLogFile, resourceAccessor);
databaseChangeLog = parser.parse(changeLogFile, changeLogParameters, resourceAccessor);
}
return databaseChangeLog;
}
protected UpdateVisitor createUpdateVisitor() {
return new UpdateVisitor(database, changeExecListener);
}
protected ChangeLogIterator getStandardChangelogIterator(Contexts contexts, LabelExpression labelExpression, DatabaseChangeLog changeLog) throws DatabaseException {
return new ChangeLogIterator(changeLog,
new ShouldRunChangeSetFilter(database, ignoreClasspathPrefix),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database));
}
public void update(String contexts, Writer output) throws LiquibaseException {
this.update(new Contexts(contexts), output);
}
public void update(Contexts contexts, Writer output) throws LiquibaseException {
update(contexts, new LabelExpression(), output);
}
public void update(Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
LoggingExecutor loggingExecutor = new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database);
ExecutorService.getInstance().setExecutor(database, loggingExecutor);
outputHeader("Update Database Script");
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
update(contexts, labelExpression);
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
public void update(int changesToApply, String contexts) throws LiquibaseException {
update(changesToApply, new Contexts(contexts), new LabelExpression());
}
public void update(int changesToApply, Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(true, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator = new ChangeLogIterator(changeLog,
new ShouldRunChangeSetFilter(database, ignoreClasspathPrefix),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database),
new CountChangeSetFilter(changesToApply));
logIterator.run(createUpdateVisitor(), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
lockService.releaseLock();
resetServices();
}
}
public void update(int changesToApply, String contexts, Writer output) throws LiquibaseException {
this.update(changesToApply, new Contexts(contexts), new LabelExpression(), output);
}
public void update(int changesToApply, Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
LoggingExecutor loggingExecutor = new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database);
ExecutorService.getInstance().setExecutor(database, loggingExecutor);
outputHeader("Update " + changesToApply + " Change Sets Database Script");
update(changesToApply, contexts, labelExpression);
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
resetServices();
ExecutorService.getInstance().setExecutor(database, oldTemplate);
}
private void outputHeader(String message) throws DatabaseException {
Executor executor = ExecutorService.getInstance().getExecutor(database);
executor.comment("*********************************************************************");
executor.comment(message);
executor.comment("*********************************************************************");
executor.comment("Change Log: " + changeLogFile);
executor.comment("Ran at: " + DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT).format(new Date()));
DatabaseConnection connection = getDatabase().getConnection();
if (connection != null) {
executor.comment("Against: " + connection.getConnectionUserName() + "@" + connection.getURL());
}
executor.comment("Liquibase version: " + LiquibaseUtil.getBuildVersion());
executor.comment("*********************************************************************" + StreamUtil.getLineSeparator());
if (database instanceof OracleDatabase) {
executor.execute(new RawSqlStatement("SET DEFINE OFF;"));
}
}
public void rollback(int changesToRollback, String contexts, Writer output) throws LiquibaseException {
rollback(changesToRollback, new Contexts(contexts), output);
}
public void rollback(int changesToRollback, Contexts contexts, Writer output) throws LiquibaseException {
rollback(changesToRollback, contexts, new LabelExpression(), output);
}
public void rollback(int changesToRollback, Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
ExecutorService.getInstance().setExecutor(database, new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database));
outputHeader("Rollback " + changesToRollback + " Change(s) Script");
rollback(changesToRollback, contexts, labelExpression);
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
public void rollback(int changesToRollback, String contexts) throws LiquibaseException {
rollback(changesToRollback, new Contexts(contexts), new LabelExpression());
}
public void rollback(int changesToRollback, Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(false, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
changeLog.setIgnoreClasspathPrefix(ignoreClasspathPrefix);
ChangeLogIterator logIterator = new ChangeLogIterator(database.getRanChangeSetList(), changeLog,
new AlreadyRanChangeSetFilter(database.getRanChangeSetList(), ignoreClasspathPrefix),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database),
new CountChangeSetFilter(changesToRollback));
logIterator.run(new RollbackVisitor(database), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
try {
lockService.releaseLock();
} catch (LockException e) {
log.severe("Error releasing lock", e);
}
resetServices();
}
}
public void rollback(String tagToRollBackTo, String contexts, Writer output) throws LiquibaseException {
rollback(tagToRollBackTo, new Contexts(contexts), output);
}
public void rollback(String tagToRollBackTo, Contexts contexts, Writer output) throws LiquibaseException {
rollback(tagToRollBackTo, contexts, new LabelExpression(), output);
}
public void rollback(String tagToRollBackTo, Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
ExecutorService.getInstance().setExecutor(database, new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database));
outputHeader("Rollback to '" + tagToRollBackTo + "' Script");
rollback(tagToRollBackTo, contexts, labelExpression);
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
public void rollback(String tagToRollBackTo, String contexts) throws LiquibaseException {
rollback(tagToRollBackTo, new Contexts(contexts));
}
public void rollback(String tagToRollBackTo, Contexts contexts) throws LiquibaseException {
rollback(tagToRollBackTo, contexts, new LabelExpression());
}
public void rollback(String tagToRollBackTo, Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(false, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
changeLog.setIgnoreClasspathPrefix(ignoreClasspathPrefix);
List<RanChangeSet> ranChangeSetList = database.getRanChangeSetList();
ChangeLogIterator logIterator = new ChangeLogIterator(ranChangeSetList, changeLog,
new AfterTagChangeSetFilter(tagToRollBackTo, ranChangeSetList),
new AlreadyRanChangeSetFilter(ranChangeSetList, ignoreClasspathPrefix),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database));
logIterator.run(new RollbackVisitor(database), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
lockService.releaseLock();
}
resetServices();
}
public void rollback(Date dateToRollBackTo, String contexts, Writer output) throws LiquibaseException {
rollback(dateToRollBackTo, new Contexts(contexts), new LabelExpression(), output);
}
public void rollback(Date dateToRollBackTo, Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
ExecutorService.getInstance().setExecutor(database, new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database));
outputHeader("Rollback to " + dateToRollBackTo + " Script");
rollback(dateToRollBackTo, contexts, labelExpression);
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
public void rollback(Date dateToRollBackTo, String contexts) throws LiquibaseException {
rollback(dateToRollBackTo, new Contexts(contexts), new LabelExpression());
}
public void rollback(Date dateToRollBackTo, Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(false, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
changeLog.setIgnoreClasspathPrefix(ignoreClasspathPrefix);
List<RanChangeSet> ranChangeSetList = database.getRanChangeSetList();
ChangeLogIterator logIterator = new ChangeLogIterator(ranChangeSetList, changeLog,
new ExecutedAfterChangeSetFilter(dateToRollBackTo, ranChangeSetList),
new AlreadyRanChangeSetFilter(ranChangeSetList, ignoreClasspathPrefix),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database));
logIterator.run(new RollbackVisitor(database), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
lockService.releaseLock();
}
resetServices();
}
public void changeLogSync(String contexts, Writer output) throws LiquibaseException {
changeLogSync(new Contexts(contexts), new LabelExpression(), output);
}
public void changeLogSync(Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LoggingExecutor outputTemplate = new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
ExecutorService.getInstance().setExecutor(database, outputTemplate);
outputHeader("SQL to add all changesets to database history table");
changeLogSync(contexts, labelExpression);
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
public void changeLogSync(String contexts) throws LiquibaseException {
changeLogSync(new Contexts(contexts), new LabelExpression());
}
/**
* @deprecated use version with LabelExpression
*/
public void changeLogSync(Contexts contexts) throws LiquibaseException {
changeLogSync(contexts, new LabelExpression());
}
public void changeLogSync(Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(true, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator = new ChangeLogIterator(changeLog,
new NotRanChangeSetFilter(database.getRanChangeSetList()),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database));
logIterator.run(new ChangeLogSyncVisitor(database, changeLogSyncListener), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
lockService.releaseLock();
resetServices();
}
}
public void markNextChangeSetRan(String contexts, Writer output) throws LiquibaseException {
markNextChangeSetRan(new Contexts(contexts), new LabelExpression(), output);
}
public void markNextChangeSetRan(Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LoggingExecutor outputTemplate = new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
ExecutorService.getInstance().setExecutor(database, outputTemplate);
outputHeader("SQL to add all changesets to database history table");
markNextChangeSetRan(contexts, labelExpression);
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
public void markNextChangeSetRan(String contexts) throws LiquibaseException {
markNextChangeSetRan(new Contexts(contexts), new LabelExpression());
}
public void markNextChangeSetRan(Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(false, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator = new ChangeLogIterator(changeLog,
new NotRanChangeSetFilter(database.getRanChangeSetList()),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database),
new CountChangeSetFilter(1));
logIterator.run(new ChangeLogSyncVisitor(database), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
lockService.releaseLock();
resetServices();
}
}
public void futureRollbackSQL(String contexts, Writer output) throws LiquibaseException {
futureRollbackSQL(null, contexts, output);
}
public void futureRollbackSQL(Integer count, String contexts, Writer output) throws LiquibaseException {
futureRollbackSQL(count, new Contexts(contexts), new LabelExpression(), output);
}
public void futureRollbackSQL(Integer count, Contexts contexts, LabelExpression labelExpression, Writer output) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LoggingExecutor outputTemplate = new LoggingExecutor(ExecutorService.getInstance().getExecutor(database), output, database);
Executor oldTemplate = ExecutorService.getInstance().getExecutor(database);
ExecutorService.getInstance().setExecutor(database, outputTemplate);
outputHeader("SQL to roll back currently unexecuted changes");
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(false, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator;
if (count == null) {
logIterator = new ChangeLogIterator(changeLog,
new NotRanChangeSetFilter(database.getRanChangeSetList()),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database));
} else {
ChangeLogIterator forwardIterator = new ChangeLogIterator(changeLog,
new NotRanChangeSetFilter(database.getRanChangeSetList()),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database),
new CountChangeSetFilter(count));
final ListVisitor listVisitor = new ListVisitor();
forwardIterator.run(listVisitor, new RuntimeEnvironment(database, contexts, labelExpression));
logIterator = new ChangeLogIterator(changeLog,
new NotRanChangeSetFilter(database.getRanChangeSetList()),
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database),
new ChangeSetFilter() {
@Override
public ChangeSetFilterResult accepts(ChangeSet changeSet) {
return new ChangeSetFilterResult(listVisitor.getSeenChangeSets().contains(changeSet), null, null);
}
});
}
logIterator.run(new RollbackVisitor(database), new RuntimeEnvironment(database, contexts, labelExpression));
} finally {
lockService.releaseLock();
ExecutorService.getInstance().setExecutor(database, oldTemplate);
resetServices();
}
try {
output.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
}
protected void resetServices() {
LockServiceFactory.getInstance().resetAll();
ChangeLogHistoryServiceFactory.getInstance().resetAll();
ExecutorService.getInstance().reset();
}
/**
* Drops all database objects owned by the current user.
*/
public final void dropAll() throws DatabaseException, LockException {
dropAll(new CatalogAndSchema(getDatabase().getDefaultCatalogName(), getDatabase().getDefaultSchemaName()));
}
/**
* Drops all database objects owned by the current user.
*/
public final void dropAll(CatalogAndSchema... schemas) throws DatabaseException {
try {
LockServiceFactory.getInstance().getLockService(database).waitForLock();
for (CatalogAndSchema schema : schemas) {
log.info("Dropping Database Objects in schema: " + schema);
checkLiquibaseTables(false, null, new Contexts(), new LabelExpression());
getDatabase().dropDatabaseObjects(schema);
}
} catch (DatabaseException e) {
throw e;
} catch (Exception e) {
throw new DatabaseException(e);
} finally {
try {
LockServiceFactory.getInstance().getLockService(database).releaseLock();
} catch (LockException e) {
log.severe("Unable to release lock: " + e.getMessage());
}
resetServices();
}
}
/**
* 'Tags' the database for future rollback
*/
public void tag(String tagString) throws LiquibaseException {
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
checkLiquibaseTables(false, null, new Contexts(), new LabelExpression());
getDatabase().tag(tagString);
} finally {
lockService.releaseLock();
}
}
public void updateTestingRollback(String contexts) throws LiquibaseException {
updateTestingRollback(new Contexts(contexts), new LabelExpression());
}
public void updateTestingRollback(Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
Date baseDate = new Date();
update(contexts, labelExpression);
rollback(baseDate, contexts, labelExpression);
update(contexts, labelExpression);
}
public void checkLiquibaseTables(boolean updateExistingNullChecksums, DatabaseChangeLog databaseChangeLog, Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
ChangeLogHistoryService changeLogHistoryService = ChangeLogHistoryServiceFactory.getInstance().getChangeLogService(getDatabase());
changeLogHistoryService.init();
if (updateExistingNullChecksums) {
changeLogHistoryService.upgradeChecksums(databaseChangeLog, contexts, labelExpression);
}
LockServiceFactory.getInstance().getLockService(getDatabase()).init();
}
/**
* Returns true if it is "save" to migrate the database.
* Currently, "safe" is defined as running in an output-sql mode or against a database on localhost.
* It is fine to run Liquibase against a "non-safe" database, the method is mainly used to determine if the user
* should be prompted before continuing.
*/
public boolean isSafeToRunUpdate() throws DatabaseException {
return getDatabase().isSafeToRunUpdate();
}
/**
* Display change log lock information.
*/
public DatabaseChangeLogLock[] listLocks() throws LiquibaseException {
checkLiquibaseTables(false, null, new Contexts(), new LabelExpression());
return LockServiceFactory.getInstance().getLockService(database).listLocks();
}
public void reportLocks(PrintStream out) throws LiquibaseException {
DatabaseChangeLogLock[] locks = listLocks();
out.println("Database change log locks for " + getDatabase().getConnection().getConnectionUserName() + "@" + getDatabase().getConnection().getURL());
if (locks.length == 0) {
out.println(" - No locks");
}
for (DatabaseChangeLogLock lock : locks) {
out.println(" - " + lock.getLockedBy() + " at " + DateFormat.getDateTimeInstance().format(lock.getLockGranted()));
}
}
public void forceReleaseLocks() throws LiquibaseException {
checkLiquibaseTables(false, null, new Contexts(), new LabelExpression());
LockServiceFactory.getInstance().getLockService(database).forceReleaseLock();
}
/**
* @deprecated use version with LabelExpression
*/
public List<ChangeSet> listUnrunChangeSets(Contexts contexts) throws LiquibaseException {
return listUnrunChangeSets(contexts, new LabelExpression());
}
public List<ChangeSet> listUnrunChangeSets(Contexts contexts, LabelExpression labels) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labels);
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(true, changeLog, contexts, labels);
changeLog.validate(database, contexts, labels);
ChangeLogIterator logIterator = getStandardChangelogIterator(contexts, labels, changeLog);
ListVisitor visitor = new ListVisitor();
logIterator.run(visitor, new RuntimeEnvironment(database, contexts, labels));
return visitor.getSeenChangeSets();
}
/**
* @deprecated use version with LabelExpression
*/
public List<ChangeSetStatus> getChangeSetStatuses(Contexts contexts) throws LiquibaseException {
return getChangeSetStatuses(contexts, new LabelExpression());
}
/**
* Returns the ChangeSetStatuses of all changesets in the change log file and history in the order they would be ran.
*/
public List<ChangeSetStatus> getChangeSetStatuses(Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(true, changeLog, contexts, labelExpression);
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator = getStandardChangelogIterator(contexts, labelExpression, changeLog);
StatusVisitor visitor = new StatusVisitor(database);
logIterator.run(visitor, new RuntimeEnvironment(database, contexts, labelExpression));
return visitor.getStatuses();
}
public void reportStatus(boolean verbose, String contexts, Writer out) throws LiquibaseException {
reportStatus(verbose, new Contexts(contexts), new LabelExpression(), out);
}
public void reportStatus(boolean verbose, Contexts contexts, Writer out) throws LiquibaseException {
reportStatus(verbose, contexts, new LabelExpression(), out);
}
public void reportStatus(boolean verbose, Contexts contexts, LabelExpression labels, Writer out) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labels);
try {
List<ChangeSet> unrunChangeSets = listUnrunChangeSets(contexts, labels);
if (unrunChangeSets.size() == 0) {
out.append(getDatabase().getConnection().getConnectionUserName());
out.append("@");
out.append(getDatabase().getConnection().getURL());
out.append(" is up to date");
out.append(StreamUtil.getLineSeparator());
} else {
out.append(String.valueOf(unrunChangeSets.size()));
out.append(" change sets have not been applied to ");
out.append(getDatabase().getConnection().getConnectionUserName());
out.append("@");
out.append(getDatabase().getConnection().getURL());
out.append(StreamUtil.getLineSeparator());
if (verbose) {
for (ChangeSet changeSet : unrunChangeSets) {
out.append(" ").append(changeSet.toString(false)).append(StreamUtil.getLineSeparator());
}
}
}
out.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
}
public Collection<RanChangeSet> listUnexpectedChangeSets(String contexts) throws LiquibaseException {
return listUnexpectedChangeSets(new Contexts(contexts), new LabelExpression());
}
public Collection<RanChangeSet> listUnexpectedChangeSets(Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
DatabaseChangeLog changeLog = getDatabaseChangeLog();
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator = new ChangeLogIterator(changeLog,
new ContextChangeSetFilter(contexts),
new LabelChangeSetFilter(labelExpression),
new DbmsChangeSetFilter(database));
ExpectedChangesVisitor visitor = new ExpectedChangesVisitor(database.getRanChangeSetList());
logIterator.run(visitor, new RuntimeEnvironment(database, contexts, labelExpression));
return visitor.getUnexpectedChangeSets();
}
public void reportUnexpectedChangeSets(boolean verbose, String contexts, Writer out) throws LiquibaseException {
reportUnexpectedChangeSets(verbose, new Contexts(contexts), new LabelExpression(), out);
}
public void reportUnexpectedChangeSets(boolean verbose, Contexts contexts, LabelExpression labelExpression, Writer out) throws LiquibaseException {
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
try {
Collection<RanChangeSet> unexpectedChangeSets = listUnexpectedChangeSets(contexts, labelExpression);
if (unexpectedChangeSets.size() == 0) {
out.append(getDatabase().getConnection().getConnectionUserName());
out.append("@");
out.append(getDatabase().getConnection().getURL());
out.append(" contains no unexpected changes!");
out.append(StreamUtil.getLineSeparator());
} else {
out.append(String.valueOf(unexpectedChangeSets.size()));
out.append(" unexpected changes were found in ");
out.append(getDatabase().getConnection().getConnectionUserName());
out.append("@");
out.append(getDatabase().getConnection().getURL());
out.append(StreamUtil.getLineSeparator());
if (verbose) {
for (RanChangeSet ranChangeSet : unexpectedChangeSets) {
out.append(" ").append(ranChangeSet.toString()).append(StreamUtil.getLineSeparator());
}
}
}
out.flush();
} catch (IOException e) {
throw new LiquibaseException(e);
}
}
/**
* Sets checksums to null so they will be repopulated next run
*/
public void clearCheckSums() throws LiquibaseException {
log.info("Clearing database change log checksums");
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
checkLiquibaseTables(false, null, new Contexts(), new LabelExpression());
UpdateStatement updateStatement = new UpdateStatement(getDatabase().getLiquibaseCatalogName(), getDatabase().getLiquibaseSchemaName(), getDatabase().getDatabaseChangeLogTableName());
updateStatement.addNewColumnValue("MD5SUM", null);
ExecutorService.getInstance().getExecutor(database).execute(updateStatement);
getDatabase().commit();
} finally {
lockService.releaseLock();
}
resetServices();
}
public final CheckSum calculateCheckSum(final String changeSetIdentifier) throws LiquibaseException {
if (changeSetIdentifier == null) {
throw new LiquibaseException(new IllegalArgumentException("changeSetIdentifier"));
}
final List<String> parts = StringUtils.splitAndTrim(changeSetIdentifier, "::");
if (parts == null || parts.size() < 3) {
throw new LiquibaseException(new IllegalArgumentException("Invalid changeSet identifier: " + changeSetIdentifier));
}
return this.calculateCheckSum(parts.get(0), parts.get(1), parts.get(2));
}
public CheckSum calculateCheckSum(final String filename, final String id, final String author) throws LiquibaseException {
log.info(String.format("Calculating checksum for changeset %s::%s::%s", filename, id, author));
final ChangeLogParameters changeLogParameters = this.getChangeLogParameters();
final ResourceAccessor resourceAccessor = this.getResourceAccessor();
final DatabaseChangeLog changeLog = ChangeLogParserFactory.getInstance().getParser(this.changeLogFile, resourceAccessor).parse(this.changeLogFile, changeLogParameters, resourceAccessor);
// TODO: validate?
final ChangeSet changeSet = changeLog.getChangeSet(filename, author, id);
if (changeSet == null) {
throw new LiquibaseException(new IllegalArgumentException("No such changeSet: " + filename + "::" + id + "::" + author));
}
return changeSet.generateCheckSum();
}
public void generateDocumentation(String outputDirectory) throws LiquibaseException {
// call without context
generateDocumentation(outputDirectory, new Contexts(), new LabelExpression());
}
public void generateDocumentation(String outputDirectory, String contexts) throws LiquibaseException {
generateDocumentation(outputDirectory, new Contexts(contexts), new LabelExpression());
}
public void generateDocumentation(String outputDirectory, Contexts contexts, LabelExpression labelExpression) throws LiquibaseException {
log.info("Generating Database Documentation");
changeLogParameters.setContexts(contexts);
changeLogParameters.setLabels(labelExpression);
LockService lockService = LockServiceFactory.getInstance().getLockService(database);
lockService.waitForLock();
try {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
checkLiquibaseTables(false, changeLog, new Contexts(), new LabelExpression());
changeLog.validate(database, contexts, labelExpression);
ChangeLogIterator logIterator = new ChangeLogIterator(changeLog,
new DbmsChangeSetFilter(database));
DBDocVisitor visitor = new DBDocVisitor(database);
logIterator.run(visitor, new RuntimeEnvironment(database, contexts, labelExpression));
visitor.writeHTML(new File(outputDirectory), resourceAccessor);
} catch (IOException e) {
throw new LiquibaseException(e);
} finally {
lockService.releaseLock();
}
// try {
// if (!LockService.getExecutor(database).waitForLock()) {
// return;
// }
//
// DBDocChangeLogHandler changeLogHandler = new DBDocChangeLogHandler(outputDirectory, this, changeLogFile,resourceAccessor);
// runChangeLogs(changeLogHandler);
//
// changeLogHandler.writeHTML(this);
// } finally {
// releaseLock();
// }
}
public DiffResult diff(Database referenceDatabase, Database targetDatabase, CompareControl compareControl) throws LiquibaseException {
return DiffGeneratorFactory.getInstance().compare(referenceDatabase, targetDatabase, compareControl);
}
/**
* Checks changelogs for bad MD5Sums and preconditions before attempting a migration
*/
public void validate() throws LiquibaseException {
DatabaseChangeLog changeLog = getDatabaseChangeLog();
changeLog.validate(database);
}
public void setChangeLogParameter(String key, Object value) {
this.changeLogParameters.set(key, value);
}
/**
* Add safe database properties as changelog parameters.<br/>
* Safe properties are the ones that doesn't have side effects in liquibase state and also don't change in during the liquibase execution
* @param database Database which propeties are put in the changelog
* @throws DatabaseException
*/
private void setDatabasePropertiesAsChangelogParameters(Database database) throws DatabaseException {
setChangeLogParameter("database.autoIncrementClause", database.getAutoIncrementClause(null, null));
setChangeLogParameter("database.currentDateTimeFunction", database.getCurrentDateTimeFunction());
setChangeLogParameter("database.databaseChangeLogLockTableName", database.getDatabaseChangeLogLockTableName());
setChangeLogParameter("database.databaseChangeLogTableName", database.getDatabaseChangeLogTableName());
setChangeLogParameter("database.databaseMajorVersion", database.getDatabaseMajorVersion());
setChangeLogParameter("database.databaseMinorVersion", database.getDatabaseMinorVersion());
setChangeLogParameter("database.databaseProductName", database.getDatabaseProductName());
setChangeLogParameter("database.databaseProductVersion", database.getDatabaseProductVersion());
setChangeLogParameter("database.defaultCatalogName", database.getDefaultCatalogName());
setChangeLogParameter("database.defaultSchemaName", database.getDefaultSchemaName());
setChangeLogParameter("database.defaultSchemaNamePrefix", StringUtils.trimToNull(database.getDefaultSchemaName())==null?"":"."+database.getDefaultSchemaName());
setChangeLogParameter("database.lineComment", database.getLineComment());
setChangeLogParameter("database.liquibaseSchemaName", database.getLiquibaseSchemaName());
setChangeLogParameter("database.liquibaseTablespaceName", database.getLiquibaseTablespaceName());
setChangeLogParameter("database.typeName", database.getShortName());
setChangeLogParameter("database.isSafeToRunUpdate", database.isSafeToRunUpdate());
setChangeLogParameter("database.requiresPassword", database.requiresPassword());
setChangeLogParameter("database.requiresUsername", database.requiresUsername());
setChangeLogParameter("database.supportsForeignKeyDisable", database.supportsForeignKeyDisable());
setChangeLogParameter("database.supportsInitiallyDeferrableColumns", database.supportsInitiallyDeferrableColumns());
setChangeLogParameter("database.supportsRestrictForeignKeys", database.supportsRestrictForeignKeys());
setChangeLogParameter("database.supportsSchemas", database.supportsSchemas());
setChangeLogParameter("database.supportsSequences", database.supportsSequences());
setChangeLogParameter("database.supportsTablespaces", database.supportsTablespaces());
}
private LockService getLockService() {
return LockServiceFactory.getInstance().getLockService(database);
}
public void setChangeExecListener(ChangeExecListener listener) {
this.changeExecListener = listener;
}
public void setChangeLogSyncListener(ChangeLogSyncListener changeLogSyncListener) {
this.changeLogSyncListener = changeLogSyncListener;
}
public void setIgnoreClasspathPrefix(boolean ignoreClasspathPrefix) {
this.ignoreClasspathPrefix = ignoreClasspathPrefix;
}
public boolean isIgnoreClasspathPrefix() {
return ignoreClasspathPrefix;
}
public void generateChangeLog(CatalogAndSchema catalogAndSchema, DiffToChangeLog changeLogWriter, PrintStream outputStream, Class<? extends DatabaseObject>... snapshotTypes) throws DatabaseException, IOException, ParserConfigurationException {
generateChangeLog(catalogAndSchema, changeLogWriter, outputStream, null, snapshotTypes);
}
public void generateChangeLog(CatalogAndSchema catalogAndSchema, DiffToChangeLog changeLogWriter, PrintStream outputStream, ChangeLogSerializer changeLogSerializer, Class<? extends DatabaseObject>... snapshotTypes) throws DatabaseException, IOException, ParserConfigurationException {
Set<Class<? extends DatabaseObject>> finalCompareTypes = null;
if (snapshotTypes != null && snapshotTypes.length > 0) {
finalCompareTypes = new HashSet<Class<? extends DatabaseObject>>(Arrays.asList(snapshotTypes));
}
SnapshotControl snapshotControl = new SnapshotControl(this.getDatabase(), snapshotTypes);
CompareControl compareControl = new CompareControl(new CompareControl.SchemaComparison[]{new CompareControl.SchemaComparison(catalogAndSchema, catalogAndSchema)}, finalCompareTypes);
// compareControl.addStatusListener(new OutDiffStatusListener());
DatabaseSnapshot originalDatabaseSnapshot = null;
try {
originalDatabaseSnapshot = SnapshotGeneratorFactory.getInstance().createSnapshot(compareControl.getSchemas(CompareControl.DatabaseRole.REFERENCE), getDatabase(), snapshotControl);
DiffResult diffResult = DiffGeneratorFactory.getInstance().compare(originalDatabaseSnapshot, SnapshotGeneratorFactory.getInstance().createSnapshot(compareControl.getSchemas(CompareControl.DatabaseRole.REFERENCE), null, snapshotControl), compareControl);
changeLogWriter.setDiffResult(diffResult);
if(changeLogSerializer != null) {
changeLogWriter.print(outputStream, changeLogSerializer);
} else {
changeLogWriter.print(outputStream);
}
} catch (InvalidExampleException e) {
throw new UnexpectedLiquibaseException(e);
}
}
}
|
CORE-2240 setDropFirst(true) still broken on empty database
|
liquibase-core/src/main/java/liquibase/Liquibase.java
|
CORE-2240 setDropFirst(true) still broken on empty database
|
|
Java
|
apache-2.0
|
624ca88f38924990f4080a149b9e725be540d5ad
| 0
|
indeedeng/proctor-webapp-library,indeedeng/proctor,indeedeng/proctor,indeedeng/proctor-webapp-library
|
package com.indeed.proctor.webapp.controllers;
import com.google.common.collect.Lists;
import com.indeed.proctor.webapp.extensions.AfterBackgroundJobExecute;
import com.indeed.proctor.webapp.extensions.BeforeBackgroundJobExecute;
import org.apache.log4j.Logger;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
/**
*/
public abstract class BackgroundJob<T> implements Callable<T> {
private static final Logger LOGGER = Logger.getLogger(BackgroundJob.class);
private Future<T> future;
private JobStatus status = JobStatus.PENDING;
protected final StringBuilder logBuilder = new StringBuilder();
private Long id;
private final long createdTime = System.currentTimeMillis();
// URL to direct users to upon completion
private final List<ResultUrl> urls = Lists.newArrayList();
private String endMessage = "";
private Throwable error = null;
private boolean executeFinished = false;
public void log(final String message) {
logBuilder.append(message).append("\n");
}
public String getLog() {
return logBuilder.toString();
}
public JobStatus getStatus() {
if (future != null && status == JobStatus.PENDING) {
if (future.isCancelled()) {
setStatus(JobStatus.CANCELLED);
} else if(error != null) {
setStatus(JobStatus.FAILED);
} else if (executeFinished){
setStatus(JobStatus.DONE);
}
}
return status;
}
public void setStatus(final JobStatus status) {
this.status = status;
}
public Future<T> getFuture() {
return future;
}
public void setFuture(final Future<T> future) {
this.future = future;
}
public void setId(final long id) {
this.id = id;
}
public Long getId() {
return id;
}
public List<ResultUrl> getUrls() {
return urls;
}
public void addUrl(final String url, final String text) {
this.addUrl(url, text, "");
}
public void addUrl(final String url, final String text, final String target) {
this.addUrl(new ResultUrl(url, text, target));
}
public void addUrl(final ResultUrl url) {
this.urls.add(url);
}
public String getEndMessage() {
return endMessage;
}
public void setEndMessage(final String endMessage) {
this.endMessage = endMessage;
}
public Throwable getError() {
return error;
}
public void setError(final Throwable error) {
this.error = error;
}
public long getCreatedTime() {
return createdTime;
}
public String toString() {
return id + ": " + status;
}
public boolean isRunning() {
return future == null || (!future.isDone() && !future.isCancelled());
}
public abstract String getTitle();
public JobType getJobType() {
return JobType.UNKNOWN;
}
public void logFailedJob(final Throwable t) {
log("Failed:");
Throwable cause = t;
final StringBuilder level = new StringBuilder(10);
while (cause != null) {
log(level.toString() + cause.getMessage());
cause = cause.getCause();
level.append("-- ");
}
if (!future.isCancelled() && !executeFinished) {
setError(t);
}
}
public static class ResultUrl {
private final String href;
private final String text;
private final String target;
public ResultUrl(final String href,
final String text,
final String target) {
this.href = href;
this.text = text;
this.target = target;
}
public String getHref() {
return href;
}
public String getTarget() {
return target;
}
public String getText() {
return text;
}
}
protected abstract List<BeforeBackgroundJobExecute> getBeforeBackgroundJobExecutes();
protected abstract List<AfterBackgroundJobExecute> getAfterBackgroundJobExecutes();
protected abstract T execute() throws Exception;
@Override
public T call() throws Exception {
T result = null;
try {
for (final BeforeBackgroundJobExecute beforeBackgroundJobExecute : getBeforeBackgroundJobExecutes()) {
beforeBackgroundJobExecute.beforeExecute(this);
}
} catch (final Exception e) {
LOGGER.error("BeforeBackgroundJobExecute Failed: " + getTitle(), e);
logFailedJob(e);
return null;
}
try {
result = execute();
} catch (final Exception e) {
LOGGER.error("Background Job Failed: " + getTitle(), e);
logFailedJob(e);
} finally {
executeFinished = true;
}
try {
for (final AfterBackgroundJobExecute afterBackgroundJobExecute : getAfterBackgroundJobExecutes()) {
afterBackgroundJobExecute.afterExecute(this, result);
}
} catch (final Exception e) {
LOGGER.error("AfterBackgroundJobExecute Failed: " + getTitle(), e);
logFailedJob(e);
}
return result;
}
public enum JobType {
TEST_CREATION("test-creation"),
TEST_EDIT("test-edit"),
TEST_DELETION("test-deletion"),
TEST_PROMOTION("test-promotion"),
WORKING_DIRECTORY_CLEANING("working-directory-cleaning"),
JOB_TEST("job-test"),
UNKNOWN("unknown");
private final String name;
JobType(final String name) {
this.name = name;
}
public String getName() {
return name;
}
}
public enum JobStatus {
PENDING("PENDING"),
DONE("DONE"),
CANCELLED("CANCELLED"),
FAILED("FAILED");
private final String name;
JobStatus(final String name) {
this.name = name;
}
public String getName() {
return name;
}
@Override
public String toString() {
return name;
}
}
}
|
src/main/java/com/indeed/proctor/webapp/controllers/BackgroundJob.java
|
package com.indeed.proctor.webapp.controllers;
import com.google.common.collect.Lists;
import com.indeed.proctor.webapp.extensions.AfterBackgroundJobExecute;
import com.indeed.proctor.webapp.extensions.BeforeBackgroundJobExecute;
import org.apache.log4j.Logger;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
/**
*/
public abstract class BackgroundJob<T> implements Callable<T> {
private static final Logger LOGGER = Logger.getLogger(BackgroundJob.class);
private Future<T> future;
private JobStatus status = JobStatus.PENDING;
protected final StringBuilder logBuilder = new StringBuilder();
private Long id;
private final long createdTime = System.currentTimeMillis();
// URL to direct users to upon completion
private final List<ResultUrl> urls = Lists.newArrayList();
private String endMessage = "";
private Throwable error = null;
public void log(final String message) {
logBuilder.append(message).append("\n");
}
public String getLog() {
return logBuilder.toString();
}
public JobStatus getStatus() {
return status;
}
public void setStatus(final JobStatus status) {
this.status = status;
}
public Future<T> getFuture() {
return future;
}
public void setFuture(final Future<T> future) {
this.future = future;
}
public void setId(final long id) {
this.id = id;
}
public Long getId() {
return id;
}
public List<ResultUrl> getUrls() {
return urls;
}
public void addUrl(final String url, final String text) {
this.addUrl(url, text, "");
}
public void addUrl(final String url, final String text, final String target) {
this.addUrl(new ResultUrl(url, text, target));
}
public void addUrl(final ResultUrl url) {
this.urls.add(url);
}
public String getEndMessage() {
return endMessage;
}
public void setEndMessage(final String endMessage) {
this.endMessage = endMessage;
}
public Throwable getError() {
return error;
}
public void setError(final Throwable error) {
this.error = error;
}
public long getCreatedTime() {
return createdTime;
}
public String toString() {
return id + ": " + status;
}
public boolean isRunning() {
return future == null || (!future.isDone() && !future.isCancelled());
}
public abstract String getTitle();
public JobType getJobType() {
return JobType.UNKNOWN;
}
public void logFailedJob(final Throwable t) {
log("Failed:");
JobStatus status = JobStatus.FAILED;
Throwable cause = t;
final StringBuilder level = new StringBuilder(10);
while (cause != null) {
log(level.toString() + cause.getMessage());
cause = cause.getCause();
if (cause instanceof InterruptedException) {
status = JobStatus.CANCELLED;
}
level.append("-- ");
}
setError(t);
setStatus(status);
}
public static class ResultUrl {
private final String href;
private final String text;
private final String target;
public ResultUrl(final String href,
final String text,
final String target) {
this.href = href;
this.text = text;
this.target = target;
}
public String getHref() {
return href;
}
public String getTarget() {
return target;
}
public String getText() {
return text;
}
}
protected abstract List<BeforeBackgroundJobExecute> getBeforeBackgroundJobExecutes();
protected abstract List<AfterBackgroundJobExecute> getAfterBackgroundJobExecutes();
protected abstract T execute() throws Exception;
@Override
public T call() throws Exception {
T result = null;
try {
for (final BeforeBackgroundJobExecute beforeBackgroundJobExecute : getBeforeBackgroundJobExecutes()) {
beforeBackgroundJobExecute.beforeExecute(this);
}
} catch (final Exception e) {
LOGGER.error("BeforeBackgroundJobExecute Failed: " + getTitle(), e);
logFailedJob(e);
return null;
}
try {
result = execute();
setStatus(JobStatus.DONE);
} catch (final Exception e) {
LOGGER.error("Background Job Failed: " + getTitle(), e);
logFailedJob(e);
}
try {
for (final AfterBackgroundJobExecute afterBackgroundJobExecute : getAfterBackgroundJobExecutes()) {
afterBackgroundJobExecute.afterExecute(this, result);
}
} catch (final Exception e) {
LOGGER.error("AfterBackgroundJobExecute Failed: " + getTitle(), e);
logFailedJob(e);
}
return result;
}
public enum JobType {
TEST_CREATION("test-creation"),
TEST_EDIT("test-edit"),
TEST_DELETION("test-deletion"),
TEST_PROMOTION("test-promotion"),
WORKING_DIRECTORY_CLEANING("working-directory-cleaning"),
JOB_TEST("job-test"),
UNKNOWN("unknown");
private final String name;
JobType(final String name) {
this.name = name;
}
public String getName() {
return name;
}
}
public enum JobStatus {
PENDING("PENDING"),
DONE("DONE"),
CANCELLED("CANCELLED"),
FAILED("FAILED");
private final String name;
JobStatus(final String name) {
this.name = name;
}
public String getName() {
return name;
}
@Override
public String toString() {
return name;
}
}
}
|
PROW-313: Fix a bug in the logic to detect job status
|
src/main/java/com/indeed/proctor/webapp/controllers/BackgroundJob.java
|
PROW-313: Fix a bug in the logic to detect job status
|
|
Java
|
apache-2.0
|
cc5a4467d92854d7f41e3554a308706658e0c962
| 0
|
wyona/yanel,wyona/yanel,wyona/yanel,baszero/yanel,wyona/yanel,wyona/yanel,baszero/yanel,baszero/yanel,baszero/yanel,baszero/yanel,baszero/yanel,wyona/yanel
|
/*
* Merge global resource-types config with realm specific resource-types configs
*/
package org.wyona.yanel.ant;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.Path;
import java.io.File;
import org.wyona.yanel.core.ResourceTypeDefinition;
import org.wyona.yanel.core.map.Realm;
import org.wyona.yanel.core.map.RealmContextConfig;
import org.wyona.yanel.core.map.RealmManagerConfig;
import org.wyona.commons.xml.XMLHelper;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.apache.log4j.Logger;
/**
* Merge resource-types.xml config files of the various realms and core
*/
public class MergeResourceTypesConfigsTask extends Task {
private static Logger log = Logger.getLogger(MergeResourceTypesConfigsTask.class);
private Path realmsConfigFile;
private Path globalResourceTypesConfigFile;
private boolean isBinaryRelease;
private String NAMESPACE = "http://www.wyona.org/yanel/1.0";
/**
*
*/
public void execute() throws BuildException {
log("INFO: Realms config file: " + realmsConfigFile);
File realmsConfig = new File(realmsConfigFile.toString());
log("INFO: Global resource-types config directory: " + globalResourceTypesConfigFile);
File globalResourceTypesConfig = new File(globalResourceTypesConfigFile.toString());
if (isBinaryRelease) {
insertPackageAttribute(globalResourceTypesConfig);
}
RealmManagerConfig realmManagerConfig = new RealmManagerConfig();
try {
RealmContextConfig[] realmContextConfigs;
if (realmsConfig.isFile()) {
realmContextConfigs = realmManagerConfig.getRealmContextConfigs(realmsConfig);
log.info("Merge ...");
log("Number of realms: " + realmContextConfigs.length);
for (int i = 0; i < realmContextConfigs.length; i++) {
log("Realm context config: " + realmContextConfigs[i]);
mergeResourceTypesOfRealm(realmContextConfigs[i].getUnresolvedConfigurationFile(), globalResourceTypesConfig);
}
} else {
log("ERROR: No such realms config '" + realmsConfig.getAbsolutePath() + "' exists!");
}
} catch (Exception e) {
log("ERROR: " + e.getMessage());
throw new BuildException(e.getMessage(), e);
}
}
/**
* Ant file task attribute realmsconfigfile
*/
public void setRealmsConfigFile(Path realmsConfigFile) {
this.realmsConfigFile = realmsConfigFile;
}
/**
* Ant file task attribute globalresourcetypesconfigfile
*/
public void setGlobalResourceTypesConfigFile(Path globalResourceTypesConfigFile) {
this.globalResourceTypesConfigFile = globalResourceTypesConfigFile;
}
/**
* Ant file task attribute isbinaryrelease
*/
public void setIsBinaryRelease(boolean isBinaryRelease) {
this.isBinaryRelease = isBinaryRelease;
}
/**
*
*/
private void mergeResourceTypesOfRealm(File unresolvedRealmConfig, File globalResourceTypesConfig) {
File realmDir;
if (unresolvedRealmConfig.isDirectory()) {
realmDir = unresolvedRealmConfig;
} else if (unresolvedRealmConfig.isFile()) {
realmDir = new File(unresolvedRealmConfig.getParent());
} else {
log.error("Neither file nor directory: " + unresolvedRealmConfig);
return;
}
log("INFO: Realm directory: " + realmDir);
File resourceTypesConfigOfRealm = new File(realmDir, "resource-types.xml");
if (resourceTypesConfigOfRealm.isFile()) {
log("INFO: Realm has specific resource-types configured: " + resourceTypesConfigOfRealm);
try {
Document globalDoc = XMLHelper.readDocument(new java.io.FileInputStream(globalResourceTypesConfig));
Document realmDoc = XMLHelper.readDocument(new java.io.FileInputStream(resourceTypesConfigOfRealm));
Element rootElement = globalDoc.getDocumentElement();
rootElement.appendChild(globalDoc.createComment("Realm specific resource-types (" + resourceTypesConfigOfRealm + "):")); // Only formatting
Element[] resourceTypeElements = XMLHelper.getChildElements(realmDoc.getDocumentElement(), "resource-type", NAMESPACE);
for (int i = 0; i < resourceTypeElements.length; i++) {
String srcAttr = resourceTypeElements[i].getAttribute("src");
if (srcAttr != null) {
srcAttr = srcAttr.replace("@REALM_SRC_DIR@", resourceTypesConfigOfRealm.getParent().replace(File.separator, "/")); // NOTE: Enforce forward slashes in the case of Windows!
}
// TODO: Check for duplicated resource-types also based re package attribute!
if (!resourceTypeExists(srcAttr, rootElement)) {
rootElement.appendChild(globalDoc.createTextNode("\n ")); // Only formatting
Element rtElement = globalDoc.createElementNS(NAMESPACE, "resource-type");
//Element rtElement = globalDoc.createElementNS(namespace, "todo");
if (srcAttr != null && !srcAttr.equals("")) {
rtElement.setAttribute("src", srcAttr);
}
String packageAttr = resourceTypeElements[i].getAttribute("package");
if (packageAttr != null && !packageAttr.equals("")) {
rtElement.setAttribute("package", packageAttr);
}
String compileAttr = resourceTypeElements[i].getAttribute("compile");
if (compileAttr != null && !compileAttr.equals("")) {
rtElement.setAttribute("compile", compileAttr);
}
String copyDirNameAttr = resourceTypeElements[i].getAttribute("copy-dir-name");
if (copyDirNameAttr != null && !copyDirNameAttr.equals("")) {
rtElement.setAttribute("copy-dir-name", copyDirNameAttr);
log("WARN: copy-dir-name attribute is deprecated (Resource '" + srcAttr + "')!");
log.warn("copy-dir-name attribute is deprecated!");
}
if (isBinaryRelease) {
//log("DEBUG: This is a binary release! (Resource: " + srcAttr + ")");
//log.warn("DEBUG: This is a binary release!");
if (copyDirNameAttr.equals("") && packageAttr.equals("")) {
log("INFO: Insert the package name of the resource '" + srcAttr + "' automatically.");
rtElement.setAttribute("package", getJavaPackageOfResourceType(srcAttr));
}
}
rootElement.appendChild(rtElement);
}
}
rootElement.appendChild(globalDoc.createTextNode("\n\n")); // Only formatting
XMLHelper.writeDocument(globalDoc, new java.io.FileOutputStream(globalResourceTypesConfig));
} catch(Exception e) {
log.error(e, e);
}
} else {
log("INFO: Realm has no specific resource-types configured.");
}
}
/**
*
*/
private boolean resourceTypeExists(String src, Element rootElement) throws Exception {
Element[] elements = XMLHelper.getElements(rootElement, "resource-type", "src", src);
if (elements.length > 0) return true;
return false;
}
/**
* @param resourceHomePath Source directory of resource type
*/
private String getJavaPackageOfResourceType(String resourceHomePath) throws Exception {
String classname = new ResourceTypeDefinition(new File(resourceHomePath, "resource.xml")).getResourceTypeClassname();
//return Class.forName(classname).getPackage().getName(); // NOTE: This doesn't work, because java classloader check
return classname.substring(0, classname.lastIndexOf(".")); // NOTE: This doesn't work in the case of ...
}
/**
* Insert package attributes automatically if binary release and no copy-dir-name attribute exists
* @param globalResourceTypesConfig Copied resource types configuration file (see build/classes/resource-types.xml)
*/
private void insertPackageAttribute(File globalResourceTypesConfig) {
log.warn("DEBUG: Patch file: " + globalResourceTypesConfig);
try {
Document doc = XMLHelper.readDocument(new java.io.FileInputStream(globalResourceTypesConfig));
Element[] resourceTypeElements = XMLHelper.getChildElements(doc.getDocumentElement(), "resource-type", NAMESPACE);
for (int i = 0; i < resourceTypeElements.length; i++) {
if (!resourceTypeElements[i].hasAttribute("copy-dir-name")) {
if (!resourceTypeElements[i].hasAttribute("package")) {
String srcAttr = resourceTypeElements[i].getAttribute("src");
log.warn("DEBUG: Set package automatically for resource type: " + srcAttr);
resourceTypeElements[i].setAttribute("package", getJavaPackageOfResourceType(srcAttr));
}
}
}
log.warn("DEBUG: Overwrite file: " + globalResourceTypesConfig);
XMLHelper.writeDocument(doc, new java.io.FileOutputStream(globalResourceTypesConfig));
} catch(Exception e) {
log.error(e, e);
}
}
}
|
src/build/java/org/wyona/yanel/ant/MergeResourceTypesConfigsTask.java
|
/*
* Merge global resource-types config with realm specific resource-types configs
*/
package org.wyona.yanel.ant;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.Path;
import java.io.File;
import org.wyona.yanel.core.ResourceTypeDefinition;
import org.wyona.yanel.core.map.Realm;
import org.wyona.yanel.core.map.RealmContextConfig;
import org.wyona.yanel.core.map.RealmManagerConfig;
import org.wyona.commons.xml.XMLHelper;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.apache.log4j.Logger;
/**
* Merge resource-types.xml config files of the various realms and core
*/
public class MergeResourceTypesConfigsTask extends Task {
private static Logger log = Logger.getLogger(MergeResourceTypesConfigsTask.class);
private Path realmsConfigFile;
private Path globalResourceTypesConfigFile;
private boolean isBinaryRelease;
/**
*
*/
public void execute() throws BuildException {
log("INFO: Realms config file: " + realmsConfigFile);
File realmsConfig = new File(realmsConfigFile.toString());
log("INFO: Global resource-types config directory: " + globalResourceTypesConfigFile);
File globalResourceTypesConfig = new File(globalResourceTypesConfigFile.toString());
// TODO: Insert the package name automatically if binary release and no package and copy-dir-name specified
RealmManagerConfig realmManagerConfig = new RealmManagerConfig();
try {
RealmContextConfig[] realmContextConfigs;
if (realmsConfig.isFile()) {
realmContextConfigs = realmManagerConfig.getRealmContextConfigs(realmsConfig);
log.info("Merge ...");
log("Number of realms: " + realmContextConfigs.length);
for (int i = 0; i < realmContextConfigs.length; i++) {
log("Realm context config: " + realmContextConfigs[i]);
mergeResourceTypesOfRealm(realmContextConfigs[i].getUnresolvedConfigurationFile(), globalResourceTypesConfig);
}
} else {
log("ERROR: No such realms config '" + realmsConfig.getAbsolutePath() + "' exists!");
}
} catch (Exception e) {
log("ERROR: " + e.getMessage());
throw new BuildException(e.getMessage(), e);
}
}
/**
* Ant file task attribute realmsconfigfile
*/
public void setRealmsConfigFile(Path realmsConfigFile) {
this.realmsConfigFile = realmsConfigFile;
}
/**
* Ant file task attribute globalresourcetypesconfigfile
*/
public void setGlobalResourceTypesConfigFile(Path globalResourceTypesConfigFile) {
this.globalResourceTypesConfigFile = globalResourceTypesConfigFile;
}
/**
* Ant file task attribute isbinaryrelease
*/
public void setIsBinaryRelease(boolean isBinaryRelease) {
this.isBinaryRelease = isBinaryRelease;
}
/**
*
*/
private void mergeResourceTypesOfRealm(File unresolvedRealmConfig, File globalResourceTypesConfig) {
File realmDir;
if (unresolvedRealmConfig.isDirectory()) {
realmDir = unresolvedRealmConfig;
} else if (unresolvedRealmConfig.isFile()) {
realmDir = new File(unresolvedRealmConfig.getParent());
} else {
log.error("Neither file nor directory: " + unresolvedRealmConfig);
return;
}
log("INFO: Realm directory: " + realmDir);
File resourceTypesConfigOfRealm = new File(realmDir, "resource-types.xml");
if (resourceTypesConfigOfRealm.isFile()) {
log("INFO: Realm has specific resource-types configured: " + resourceTypesConfigOfRealm);
try {
Document globalDoc = XMLHelper.readDocument(new java.io.FileInputStream(globalResourceTypesConfig));
Document realmDoc = XMLHelper.readDocument(new java.io.FileInputStream(resourceTypesConfigOfRealm));
Element rootElement = globalDoc.getDocumentElement();
rootElement.appendChild(globalDoc.createComment("Realm specific resource-types (" + resourceTypesConfigOfRealm + "):")); // Only formatting
String namespace = "http://www.wyona.org/yanel/1.0";
Element[] resourceTypeElements = XMLHelper.getChildElements(realmDoc.getDocumentElement(), "resource-type", namespace);
for (int i = 0; i < resourceTypeElements.length; i++) {
String srcAttr = resourceTypeElements[i].getAttribute("src");
if (srcAttr != null) {
srcAttr = srcAttr.replace("@REALM_SRC_DIR@", resourceTypesConfigOfRealm.getParent().replace(File.separator, "/")); // NOTE: Enforce forward slashes in the case of Windows!
}
// TODO: Check for duplicated resource-types also based re package attribute!
if (!resourceTypeExists(srcAttr, rootElement)) {
rootElement.appendChild(globalDoc.createTextNode("\n ")); // Only formatting
Element rtElement = globalDoc.createElementNS(namespace, "resource-type");
//Element rtElement = globalDoc.createElementNS(namespace, "todo");
if (srcAttr != null && !srcAttr.equals("")) {
rtElement.setAttribute("src", srcAttr);
}
String packageAttr = resourceTypeElements[i].getAttribute("package");
if (packageAttr != null && !packageAttr.equals("")) {
rtElement.setAttribute("package", packageAttr);
}
String compileAttr = resourceTypeElements[i].getAttribute("compile");
if (compileAttr != null && !compileAttr.equals("")) {
rtElement.setAttribute("compile", compileAttr);
}
String copyDirNameAttr = resourceTypeElements[i].getAttribute("copy-dir-name");
if (copyDirNameAttr != null && !copyDirNameAttr.equals("")) {
rtElement.setAttribute("copy-dir-name", copyDirNameAttr);
log("WARN: copy-dir-name attribute is deprecated (Resource '" + srcAttr + "')!");
log.warn("copy-dir-name attribute is deprecated!");
}
if (isBinaryRelease) {
//log("DEBUG: This is a binary release! (Resource: " + srcAttr + ")");
//log.warn("DEBUG: This is a binary release!");
if (copyDirNameAttr.equals("") && packageAttr.equals("")) {
log("INFO: Insert the package name of the resource '" + srcAttr + "' automatically.");
rtElement.setAttribute("package", getJavaPackageOfResourceType(srcAttr));
}
}
rootElement.appendChild(rtElement);
}
}
rootElement.appendChild(globalDoc.createTextNode("\n\n")); // Only formatting
XMLHelper.writeDocument(globalDoc, new java.io.FileOutputStream(globalResourceTypesConfig));
} catch(Exception e) {
log.error(e, e);
}
} else {
log("INFO: Realm has no specific resource-types configured.");
}
}
/**
*
*/
private boolean resourceTypeExists(String src, Element rootElement) throws Exception {
Element[] elements = XMLHelper.getElements(rootElement, "resource-type", "src", src);
if (elements.length > 0) return true;
return false;
}
/**
* @param resourceHomePath Source directory of resource type
*/
private String getJavaPackageOfResourceType(String resourceHomePath) throws Exception {
String classname = new ResourceTypeDefinition(new File(resourceHomePath, "resource.xml")).getResourceTypeClassname();
//return Class.forName(classname).getPackage().getName(); // NOTE: This doesn't work, because java classloader check
return classname.substring(0, classname.lastIndexOf(".")); // NOTE: This doesn't work in the case of ...
}
}
|
insert packages automatically
|
src/build/java/org/wyona/yanel/ant/MergeResourceTypesConfigsTask.java
|
insert packages automatically
|
|
Java
|
bsd-2-clause
|
0ff91107b7c1d4a2c507d2ee3aa5b497c3c3f3ac
| 0
|
abelbriggs1/runelite,abelbriggs1/runelite,runelite/runelite,l2-/runelite,runelite/runelite,runelite/runelite,abelbriggs1/runelite,l2-/runelite,Sethtroll/runelite,KronosDesign/runelite,Sethtroll/runelite,KronosDesign/runelite
|
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.ui.overlay;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public class OverlayManagerTest
{
class TestOverlay extends Overlay
{
TestOverlay(OverlayPosition position, OverlayPriority priority)
{
setPosition(position);
setPriority(priority);
}
@Override
public Dimension render(Graphics2D graphics)
{
throw new UnsupportedOperationException("Not supported yet.");
}
}
private static class OverlayA extends Overlay
{
@Override
public Dimension render(Graphics2D graphics)
{
return null;
}
}
private static class OverlayB extends Overlay
{
@Override
public Dimension render(Graphics2D graphics)
{
return null;
}
}
@Test
public void testEquality()
{
Overlay a1 = new OverlayA();
Overlay a2 = new OverlayA();
Overlay b = new OverlayB();
// The same instance of the same overlay should be equal
assertTrue(a1.equals(a1));
// A different instance of the same overlay should not be equal by default
assertFalse(a1.equals(a2));
// A different instance of a different overlay should not be equal
assertFalse(a1.equals(b));
}
@Test
public void testSort()
{
// High priorities overlays render first
Overlay tlh = new TestOverlay(OverlayPosition.TOP_LEFT, OverlayPriority.HIGH);
Overlay tll = new TestOverlay(OverlayPosition.TOP_LEFT, OverlayPriority.LOW);
List<Overlay> overlays = Arrays.asList(tlh, tll);
overlays.sort(OverlayManager.OVERLAY_COMPARATOR);
assertEquals(tlh, overlays.get(0));
assertEquals(tll, overlays.get(1));
}
@Test
public void testSortDynamic()
{
// Dynamic overlays render before static overlays
Overlay tlh = new TestOverlay(OverlayPosition.TOP_LEFT, OverlayPriority.HIGH);
Overlay dyn = new TestOverlay(OverlayPosition.DYNAMIC, OverlayPriority.HIGH);
List<Overlay> overlays = Arrays.asList(tlh, dyn);
overlays.sort(OverlayManager.OVERLAY_COMPARATOR);
assertEquals(dyn, overlays.get(0));
assertEquals(tlh, overlays.get(1));
}
@Test
public void testTooltips()
{
// Tooltip overlay renders after everything
Overlay t = new TestOverlay(OverlayPosition.TOOLTIP, OverlayPriority.HIGH);
Overlay dyn = new TestOverlay(OverlayPosition.DYNAMIC, OverlayPriority.HIGH);
Overlay tlh = new TestOverlay(OverlayPosition.TOP_LEFT, OverlayPriority.HIGH);
List<Overlay> overlays = Arrays.asList(t, dyn, tlh);
overlays.sort(OverlayManager.OVERLAY_COMPARATOR);
assertEquals(dyn, overlays.get(0));
assertEquals(tlh, overlays.get(1));
assertEquals(t, overlays.get(2));
}
}
|
runelite-client/src/test/java/net/runelite/client/ui/overlay/OverlayManagerTest.java
|
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.ui.overlay;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
public class OverlayManagerTest
{
class TestOverlay extends Overlay
{
TestOverlay(OverlayPosition position, OverlayPriority priority)
{
setPosition(position);
setPriority(priority);
}
@Override
public Dimension render(Graphics2D graphics)
{
throw new UnsupportedOperationException("Not supported yet.");
}
}
@Test
public void testSort()
{
// High priorities overlays render first
Overlay tlh = new TestOverlay(OverlayPosition.TOP_LEFT, OverlayPriority.HIGH);
Overlay tll = new TestOverlay(OverlayPosition.TOP_LEFT, OverlayPriority.LOW);
List<Overlay> overlays = Arrays.asList(tlh, tll);
overlays.sort(OverlayManager.OVERLAY_COMPARATOR);
assertEquals(tlh, overlays.get(0));
assertEquals(tll, overlays.get(1));
}
@Test
public void testSortDynamic()
{
// Dynamic overlays render before static overlays
Overlay tlh = new TestOverlay(OverlayPosition.TOP_LEFT, OverlayPriority.HIGH);
Overlay dyn = new TestOverlay(OverlayPosition.DYNAMIC, OverlayPriority.HIGH);
List<Overlay> overlays = Arrays.asList(tlh, dyn);
overlays.sort(OverlayManager.OVERLAY_COMPARATOR);
assertEquals(dyn, overlays.get(0));
assertEquals(tlh, overlays.get(1));
}
@Test
public void testTooltips()
{
// Tooltip overlay renders after everything
Overlay t = new TestOverlay(OverlayPosition.TOOLTIP, OverlayPriority.HIGH);
Overlay dyn = new TestOverlay(OverlayPosition.DYNAMIC, OverlayPriority.HIGH);
Overlay tlh = new TestOverlay(OverlayPosition.TOP_LEFT, OverlayPriority.HIGH);
List<Overlay> overlays = Arrays.asList(t, dyn, tlh);
overlays.sort(OverlayManager.OVERLAY_COMPARATOR);
assertEquals(dyn, overlays.get(0));
assertEquals(tlh, overlays.get(1));
assertEquals(t, overlays.get(2));
}
}
|
runelite-client: Test Overlay equality
If Overlay::equals returns true for different subclasses it breaks the OverlayManager, because it can no longer remove overlays correctly.
|
runelite-client/src/test/java/net/runelite/client/ui/overlay/OverlayManagerTest.java
|
runelite-client: Test Overlay equality
|
|
Java
|
bsd-3-clause
|
15807cc544077b5ba64d4780d558c471625eaa4c
| 0
|
dhis2/dhis2-android-sdk,dhis2/dhis2-android-sdk,dhis2/dhis2-android-sdk
|
/*
* Copyright (c) 2017, University of Oslo
*
* All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.calls;
import android.support.annotation.NonNull;
import org.hisp.dhis.android.core.category.Category;
import org.hisp.dhis.android.core.category.CategoryComboHandler;
import org.hisp.dhis.android.core.category.CategoryEndpointCall;
import org.hisp.dhis.android.core.category.CategoryCombo;
import org.hisp.dhis.android.core.category.CategoryComboEndpointCall;
import org.hisp.dhis.android.core.category.CategoryComboQuery;
import org.hisp.dhis.android.core.category.CategoryComboService;
import org.hisp.dhis.android.core.category.CategoryHandler;
import org.hisp.dhis.android.core.category.CategoryQuery;
import org.hisp.dhis.android.core.category.CategoryService;
import org.hisp.dhis.android.core.category.ResponseValidator;
import org.hisp.dhis.android.core.common.Payload;
import org.hisp.dhis.android.core.data.database.DatabaseAdapter;
import org.hisp.dhis.android.core.data.database.Transaction;
import org.hisp.dhis.android.core.dataelement.DataElementStore;
import org.hisp.dhis.android.core.option.OptionSetCall;
import org.hisp.dhis.android.core.option.OptionSetService;
import org.hisp.dhis.android.core.option.OptionSetStore;
import org.hisp.dhis.android.core.option.OptionStore;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnit;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnitCall;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnitProgramLinkStore;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnitService;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnitStore;
import org.hisp.dhis.android.core.program.Program;
import org.hisp.dhis.android.core.program.ProgramCall;
import org.hisp.dhis.android.core.program.ProgramIndicatorStore;
import org.hisp.dhis.android.core.program.ProgramRuleActionStore;
import org.hisp.dhis.android.core.program.ProgramRuleStore;
import org.hisp.dhis.android.core.program.ProgramRuleVariableStore;
import org.hisp.dhis.android.core.program.ProgramService;
import org.hisp.dhis.android.core.program.ProgramStage;
import org.hisp.dhis.android.core.program.ProgramStageDataElement;
import org.hisp.dhis.android.core.program.ProgramStageDataElementStore;
import org.hisp.dhis.android.core.program.ProgramStageSectionProgramIndicatorLinkStore;
import org.hisp.dhis.android.core.program.ProgramStageSectionStore;
import org.hisp.dhis.android.core.program.ProgramStageStore;
import org.hisp.dhis.android.core.program.ProgramStore;
import org.hisp.dhis.android.core.program.ProgramTrackedEntityAttribute;
import org.hisp.dhis.android.core.program.ProgramTrackedEntityAttributeStore;
import org.hisp.dhis.android.core.relationship.RelationshipTypeStore;
import org.hisp.dhis.android.core.resource.ResourceHandler;
import org.hisp.dhis.android.core.resource.ResourceStore;
import org.hisp.dhis.android.core.systeminfo.SystemInfo;
import org.hisp.dhis.android.core.systeminfo.SystemInfoCall;
import org.hisp.dhis.android.core.systeminfo.SystemInfoService;
import org.hisp.dhis.android.core.systeminfo.SystemInfoStore;
import org.hisp.dhis.android.core.trackedentity.TrackedEntityAttributeStore;
import org.hisp.dhis.android.core.trackedentity.TrackedEntityCall;
import org.hisp.dhis.android.core.trackedentity.TrackedEntityService;
import org.hisp.dhis.android.core.trackedentity.TrackedEntityStore;
import org.hisp.dhis.android.core.user.User;
import org.hisp.dhis.android.core.user.UserCall;
import org.hisp.dhis.android.core.user.UserCredentialsStore;
import org.hisp.dhis.android.core.user.UserOrganisationUnitLinkStore;
import org.hisp.dhis.android.core.user.UserRole;
import org.hisp.dhis.android.core.user.UserRoleProgramLinkStore;
import org.hisp.dhis.android.core.user.UserRoleStore;
import org.hisp.dhis.android.core.user.UserService;
import org.hisp.dhis.android.core.user.UserStore;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import retrofit2.Response;
@SuppressWarnings({"PMD.ExcessiveImports", "PMD.TooManyFields", "PMD.CyclomaticComplexity",
"PMD.ModifiedCyclomaticComplexity", "PMD.StdCyclomaticComplexity"})
public class MetadataCall implements Call<Response> {
private final DatabaseAdapter databaseAdapter;
private final SystemInfoService systemInfoService;
private final UserService userService;
private final ProgramService programService;
private final OrganisationUnitService organisationUnitService;
private final TrackedEntityService trackedEntityService;
private final OptionSetService optionSetService;
private final SystemInfoStore systemInfoStore;
private final ResourceStore resourceStore;
private final UserStore userStore;
private final UserCredentialsStore userCredentialsStore;
private final UserRoleStore userRoleStore;
private final UserRoleProgramLinkStore userRoleProgramLinkStore;
private final OrganisationUnitStore organisationUnitStore;
private final UserOrganisationUnitLinkStore userOrganisationUnitLinkStore;
private final ProgramStore programStore;
private final TrackedEntityAttributeStore trackedEntityAttributeStore;
private final ProgramTrackedEntityAttributeStore programTrackedEntityAttributeStore;
private final ProgramRuleVariableStore programRuleVariableStore;
private final ProgramIndicatorStore programIndicatorStore;
private final ProgramStageSectionProgramIndicatorLinkStore
programStageSectionProgramIndicatorLinkStore;
private final ProgramRuleActionStore programRuleActionStore;
private final ProgramRuleStore programRuleStore;
private final OptionStore optionStore;
private final OptionSetStore optionSetStore;
private final DataElementStore dataElementStore;
private final ProgramStageDataElementStore programStageDataElementStore;
private final ProgramStageSectionStore programStageSectionStore;
private final ProgramStageStore programStageStore;
private final RelationshipTypeStore relationshipStore;
private final TrackedEntityStore trackedEntityStore;
private final CategoryQuery categoryQuery;
private final CategoryComboQuery categoryComboQuery;
private final CategoryService categoryService;
private final CategoryComboService categoryComboService;
private final CategoryHandler categoryHandler;
private final CategoryComboHandler categoryComboHandler;
private boolean isExecuted;
private final OrganisationUnitProgramLinkStore organisationUnitProgramLinkStore;
public MetadataCall(@NonNull DatabaseAdapter databaseAdapter,
@NonNull SystemInfoService systemInfoService,
@NonNull UserService userService,
@NonNull ProgramService programService,
@NonNull OrganisationUnitService organisationUnitService,
@NonNull TrackedEntityService trackedEntityService,
@NonNull OptionSetService optionSetService,
@NonNull SystemInfoStore systemInfoStore,
@NonNull ResourceStore resourceStore,
@NonNull UserStore userStore,
@NonNull UserCredentialsStore userCredentialsStore,
@NonNull UserRoleStore userRoleStore,
@NonNull UserRoleProgramLinkStore userRoleProgramLinkStore,
@NonNull OrganisationUnitStore organisationUnitStore,
@NonNull UserOrganisationUnitLinkStore userOrganisationUnitLinkStore,
@NonNull ProgramStore programStore,
@NonNull TrackedEntityAttributeStore trackedEntityAttributeStore,
@NonNull ProgramTrackedEntityAttributeStore programTrackedEntityAttributeStore,
@NonNull ProgramRuleVariableStore programRuleVariableStore,
@NonNull ProgramIndicatorStore programIndicatorStore,
@NonNull ProgramStageSectionProgramIndicatorLinkStore
programStageSectionProgramIndicatorLinkStore,
@NonNull ProgramRuleActionStore programRuleActionStore,
@NonNull ProgramRuleStore programRuleStore,
@NonNull OptionStore optionStore,
@NonNull OptionSetStore optionSetStore,
@NonNull DataElementStore dataElementStore,
@NonNull ProgramStageDataElementStore programStageDataElementStore,
@NonNull ProgramStageSectionStore programStageSectionStore,
@NonNull ProgramStageStore programStageStore,
@NonNull RelationshipTypeStore relationshipStore,
@NonNull TrackedEntityStore trackedEntityStore,
@NonNull OrganisationUnitProgramLinkStore organisationUnitProgramLinkStore,
@NonNull CategoryQuery categoryQuery,
@NonNull CategoryService categoryService,
@NonNull CategoryHandler categoryHandler,
@NonNull CategoryComboQuery categoryComboQuery,
@NonNull CategoryComboService categoryComboService,
@NonNull CategoryComboHandler categoryComboHandler) {
this.databaseAdapter = databaseAdapter;
this.systemInfoService = systemInfoService;
this.userService = userService;
this.programService = programService;
this.organisationUnitService = organisationUnitService;
this.trackedEntityService = trackedEntityService;
this.optionSetService = optionSetService;
this.systemInfoStore = systemInfoStore;
this.resourceStore = resourceStore;
this.userStore = userStore;
this.userCredentialsStore = userCredentialsStore;
this.userRoleStore = userRoleStore;
this.userRoleProgramLinkStore = userRoleProgramLinkStore;
this.organisationUnitStore = organisationUnitStore;
this.userOrganisationUnitLinkStore = userOrganisationUnitLinkStore;
this.programStore = programStore;
this.trackedEntityAttributeStore = trackedEntityAttributeStore;
this.programTrackedEntityAttributeStore = programTrackedEntityAttributeStore;
this.programRuleVariableStore = programRuleVariableStore;
this.programIndicatorStore = programIndicatorStore;
this.programStageSectionProgramIndicatorLinkStore =
programStageSectionProgramIndicatorLinkStore;
this.programRuleActionStore = programRuleActionStore;
this.programRuleStore = programRuleStore;
this.optionStore = optionStore;
this.optionSetStore = optionSetStore;
this.dataElementStore = dataElementStore;
this.programStageDataElementStore = programStageDataElementStore;
this.programStageSectionStore = programStageSectionStore;
this.programStageStore = programStageStore;
this.relationshipStore = relationshipStore;
this.trackedEntityStore = trackedEntityStore;
this.organisationUnitProgramLinkStore = organisationUnitProgramLinkStore;
this.categoryQuery = categoryQuery;
this.categoryService = categoryService;
this.categoryHandler = categoryHandler;
this.categoryComboQuery = categoryComboQuery;
this.categoryComboService = categoryComboService;
this.categoryComboHandler = categoryComboHandler;
}
@Override
public boolean isExecuted() {
synchronized (this) {
return isExecuted;
}
}
@SuppressWarnings("PMD.NPathComplexity")
@Override
public Response call() throws Exception {
synchronized (this) {
if (isExecuted) {
throw new IllegalStateException("Already executed");
}
isExecuted = true;
}
Response response = null;
Transaction transaction = databaseAdapter.beginNewTransaction();
try {
response = new SystemInfoCall(
databaseAdapter, systemInfoStore,
systemInfoService, resourceStore
).call();
if (!response.isSuccessful()) {
return response;
}
SystemInfo systemInfo = (SystemInfo) response.body();
Date serverDate = systemInfo.serverDate();
response = new UserCall(
userService,
databaseAdapter,
userStore,
userCredentialsStore,
userRoleStore,
resourceStore,
serverDate,
userRoleProgramLinkStore
).call();
if (!response.isSuccessful()) {
return response;
}
User user = (User) response.body();
response = new OrganisationUnitCall(
user, organisationUnitService, databaseAdapter, organisationUnitStore,
resourceStore, serverDate, userOrganisationUnitLinkStore,
organisationUnitProgramLinkStore).call();
if (!response.isSuccessful()) {
return response;
}
response = downloadCategories(serverDate);
if (!response.isSuccessful()) {
return response;
}
response = downloadCategoryCombos(serverDate);
if (!response.isSuccessful()) {
return response;
}
Set<String> programUids = getAssignedProgramUids(user);
response = new ProgramCall(
programService, databaseAdapter, resourceStore, programUids, programStore,
serverDate,
trackedEntityAttributeStore, programTrackedEntityAttributeStore,
programRuleVariableStore,
programIndicatorStore, programStageSectionProgramIndicatorLinkStore,
programRuleActionStore,
programRuleStore, optionStore, optionSetStore, dataElementStore,
programStageDataElementStore,
programStageSectionStore, programStageStore, relationshipStore
).call();
if (!response.isSuccessful()) {
return response;
}
List<Program> programs = ((Response<Payload<Program>>) response).body().items();
Set<String> trackedEntityUids = getAssignedTrackedEntityUids(programs);
response = new TrackedEntityCall(
trackedEntityUids, databaseAdapter, trackedEntityStore,
resourceStore, trackedEntityService, serverDate
).call();
if (!response.isSuccessful()) {
return response;
}
Set<String> optionSetUids = getAssignedOptionSetUids(programs);
response = new OptionSetCall(
optionSetService, optionSetStore, databaseAdapter, resourceStore,
optionSetUids, serverDate, optionStore
).call();
if (!response.isSuccessful()) {
return response;
}
transaction.setSuccessful();
return response;
} finally {
transaction.end();
}
}
/// Utilty methods:
private Set<String> getAssignedOptionSetUids(List<Program> programs) {
if (programs == null) {
return null;
}
Set<String> uids = new HashSet<>();
int size = programs.size();
for (int i = 0; i < size; i++) {
Program program = programs.get(i);
getOptionSetUidsForAttributes(uids, program);
getOptionSetUidsForDataElements(uids, program);
}
return uids;
}
private void getOptionSetUidsForDataElements(Set<String> uids, Program program) {
List<ProgramStage> programStages = program.programStages();
int programStagesSize = programStages.size();
for (int j = 0; j < programStagesSize; j++) {
ProgramStage programStage = programStages.get(j);
List<ProgramStageDataElement> programStageDataElements =
programStage.programStageDataElements();
int programStageDataElementSize = programStageDataElements.size();
for (int k = 0; k < programStageDataElementSize; k++) {
ProgramStageDataElement programStageDataElement = programStageDataElements.get(k);
if (programStageDataElement.dataElement() != null &&
programStageDataElement.dataElement().optionSet() != null) {
uids.add(programStageDataElement.dataElement().optionSet().uid());
}
}
}
}
private void getOptionSetUidsForAttributes(Set<String> uids, Program program) {
int programTrackedEntityAttributeSize = program.programTrackedEntityAttributes().size();
List<ProgramTrackedEntityAttribute> programTrackedEntityAttributes =
program.programTrackedEntityAttributes();
for (int j = 0; j < programTrackedEntityAttributeSize; j++) {
ProgramTrackedEntityAttribute programTrackedEntityAttribute =
programTrackedEntityAttributes.get(j);
if (programTrackedEntityAttribute.trackedEntityAttribute() != null &&
programTrackedEntityAttribute.trackedEntityAttribute().optionSet() != null) {
uids.add(programTrackedEntityAttribute.trackedEntityAttribute().optionSet().uid());
}
}
}
private Set<String> getAssignedTrackedEntityUids(List<Program> programs) {
if (programs == null) {
return null;
}
Set<String> uids = new HashSet<>();
int size = programs.size();
for (int i = 0; i < size; i++) {
Program program = programs.get(i);
if (program.trackedEntity() != null) {
uids.add(program.trackedEntity().uid());
}
}
return uids;
}
private Set<String> getAssignedProgramUids(User user) {
if (user == null || user.userCredentials() == null
|| user.userCredentials().userRoles() == null) {
return null;
}
Set<String> programUids = new HashSet<>();
getProgramUidsFromUserRoles(user, programUids);
getProgramUidsFromOrganisationUnits(user, programUids);
return programUids;
}
private void getProgramUidsFromOrganisationUnits(User user, Set<String> programUids) {
List<OrganisationUnit> organisationUnits = user.organisationUnits();
if (organisationUnits != null) {
int size = organisationUnits.size();
for (int i = 0; i < size; i++) {
OrganisationUnit organisationUnit = organisationUnits.get(i);
int programSize = organisationUnit.programs().size();
for (int j = 0; j < programSize; j++) {
Program program = organisationUnit.programs().get(j);
programUids.add(program.uid());
}
}
}
}
private void getProgramUidsFromUserRoles(User user, Set<String> programUids) {
List<UserRole> userRoles = user.userCredentials().userRoles();
if (userRoles != null) {
int size = userRoles.size();
for (int i = 0; i < size; i++) {
UserRole userRole = userRoles.get(i);
int programSize = userRole.programs().size();
for (int j = 0; j < programSize; j++) {
Program program = userRole.programs().get(j);
programUids.add(program.uid());
}
}
}
}
private Response<Payload<Category>> downloadCategories(Date serverDate) throws Exception {
ResponseValidator<Category> validator = new ResponseValidator<>();
return new CategoryEndpointCall(categoryQuery, categoryService, validator,
categoryHandler,
new ResourceHandler(resourceStore), databaseAdapter, serverDate).call();
}
private Response<Payload<CategoryCombo>> downloadCategoryCombos(Date serverDate)
throws Exception {
ResponseValidator<CategoryCombo> comboValidator = new ResponseValidator<>();
return new CategoryComboEndpointCall(categoryComboQuery, categoryComboService,
comboValidator, categoryComboHandler,
new ResourceHandler(resourceStore), databaseAdapter, serverDate).call();
}
}
|
core/src/main/java/org/hisp/dhis/android/core/calls/MetadataCall.java
|
/*
* Copyright (c) 2017, University of Oslo
*
* All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.calls;
import android.support.annotation.NonNull;
import org.hisp.dhis.android.core.category.Category;
import org.hisp.dhis.android.core.category.CategoryComboHandler;
import org.hisp.dhis.android.core.category.CategoryEndpointCall;
import org.hisp.dhis.android.core.category.CategoryCombo;
import org.hisp.dhis.android.core.category.CategoryComboEndpointCall;
import org.hisp.dhis.android.core.category.CategoryComboQuery;
import org.hisp.dhis.android.core.category.CategoryComboService;
import org.hisp.dhis.android.core.category.CategoryHandler;
import org.hisp.dhis.android.core.category.CategoryQuery;
import org.hisp.dhis.android.core.category.CategoryService;
import org.hisp.dhis.android.core.category.ResponseValidator;
import org.hisp.dhis.android.core.common.Payload;
import org.hisp.dhis.android.core.data.database.DatabaseAdapter;
import org.hisp.dhis.android.core.data.database.Transaction;
import org.hisp.dhis.android.core.dataelement.DataElementStore;
import org.hisp.dhis.android.core.option.OptionSetCall;
import org.hisp.dhis.android.core.option.OptionSetService;
import org.hisp.dhis.android.core.option.OptionSetStore;
import org.hisp.dhis.android.core.option.OptionStore;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnit;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnitCall;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnitProgramLinkStore;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnitService;
import org.hisp.dhis.android.core.organisationunit.OrganisationUnitStore;
import org.hisp.dhis.android.core.program.Program;
import org.hisp.dhis.android.core.program.ProgramCall;
import org.hisp.dhis.android.core.program.ProgramIndicatorStore;
import org.hisp.dhis.android.core.program.ProgramRuleActionStore;
import org.hisp.dhis.android.core.program.ProgramRuleStore;
import org.hisp.dhis.android.core.program.ProgramRuleVariableStore;
import org.hisp.dhis.android.core.program.ProgramService;
import org.hisp.dhis.android.core.program.ProgramStage;
import org.hisp.dhis.android.core.program.ProgramStageDataElement;
import org.hisp.dhis.android.core.program.ProgramStageDataElementStore;
import org.hisp.dhis.android.core.program.ProgramStageSectionProgramIndicatorLinkStore;
import org.hisp.dhis.android.core.program.ProgramStageSectionStore;
import org.hisp.dhis.android.core.program.ProgramStageStore;
import org.hisp.dhis.android.core.program.ProgramStore;
import org.hisp.dhis.android.core.program.ProgramTrackedEntityAttribute;
import org.hisp.dhis.android.core.program.ProgramTrackedEntityAttributeStore;
import org.hisp.dhis.android.core.relationship.RelationshipTypeStore;
import org.hisp.dhis.android.core.resource.ResourceHandler;
import org.hisp.dhis.android.core.resource.ResourceStore;
import org.hisp.dhis.android.core.systeminfo.SystemInfo;
import org.hisp.dhis.android.core.systeminfo.SystemInfoCall;
import org.hisp.dhis.android.core.systeminfo.SystemInfoService;
import org.hisp.dhis.android.core.systeminfo.SystemInfoStore;
import org.hisp.dhis.android.core.trackedentity.TrackedEntityAttributeStore;
import org.hisp.dhis.android.core.trackedentity.TrackedEntityCall;
import org.hisp.dhis.android.core.trackedentity.TrackedEntityService;
import org.hisp.dhis.android.core.trackedentity.TrackedEntityStore;
import org.hisp.dhis.android.core.user.User;
import org.hisp.dhis.android.core.user.UserCall;
import org.hisp.dhis.android.core.user.UserCredentialsStore;
import org.hisp.dhis.android.core.user.UserOrganisationUnitLinkStore;
import org.hisp.dhis.android.core.user.UserRole;
import org.hisp.dhis.android.core.user.UserRoleProgramLinkStore;
import org.hisp.dhis.android.core.user.UserRoleStore;
import org.hisp.dhis.android.core.user.UserService;
import org.hisp.dhis.android.core.user.UserStore;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import retrofit2.Response;
@SuppressWarnings({"PMD.ExcessiveImports", "PMD.TooManyFields", "PMD.CyclomaticComplexity",
"PMD.ModifiedCyclomaticComplexity", "PMD.StdCyclomaticComplexity"})
public class MetadataCall implements Call<Response> {
private final DatabaseAdapter databaseAdapter;
private final SystemInfoService systemInfoService;
private final UserService userService;
private final ProgramService programService;
private final OrganisationUnitService organisationUnitService;
private final TrackedEntityService trackedEntityService;
private final OptionSetService optionSetService;
private final SystemInfoStore systemInfoStore;
private final ResourceStore resourceStore;
private final UserStore userStore;
private final UserCredentialsStore userCredentialsStore;
private final UserRoleStore userRoleStore;
private final UserRoleProgramLinkStore userRoleProgramLinkStore;
private final OrganisationUnitStore organisationUnitStore;
private final UserOrganisationUnitLinkStore userOrganisationUnitLinkStore;
private final ProgramStore programStore;
private final TrackedEntityAttributeStore trackedEntityAttributeStore;
private final ProgramTrackedEntityAttributeStore programTrackedEntityAttributeStore;
private final ProgramRuleVariableStore programRuleVariableStore;
private final ProgramIndicatorStore programIndicatorStore;
private final ProgramStageSectionProgramIndicatorLinkStore
programStageSectionProgramIndicatorLinkStore;
private final ProgramRuleActionStore programRuleActionStore;
private final ProgramRuleStore programRuleStore;
private final OptionStore optionStore;
private final OptionSetStore optionSetStore;
private final DataElementStore dataElementStore;
private final ProgramStageDataElementStore programStageDataElementStore;
private final ProgramStageSectionStore programStageSectionStore;
private final ProgramStageStore programStageStore;
private final RelationshipTypeStore relationshipStore;
private final TrackedEntityStore trackedEntityStore;
private final CategoryQuery categoryQuery;
private final CategoryComboQuery categoryComboQuery;
private final CategoryService categoryService;
private final CategoryComboService categoryComboService;
private final CategoryHandler categoryHandler;
private final CategoryComboHandler categoryComboHandler;
private boolean isExecuted;
private final OrganisationUnitProgramLinkStore organisationUnitProgramLinkStore;
public MetadataCall(@NonNull DatabaseAdapter databaseAdapter,
@NonNull SystemInfoService systemInfoService,
@NonNull UserService userService,
@NonNull ProgramService programService,
@NonNull OrganisationUnitService organisationUnitService,
@NonNull TrackedEntityService trackedEntityService,
@NonNull OptionSetService optionSetService,
@NonNull SystemInfoStore systemInfoStore,
@NonNull ResourceStore resourceStore,
@NonNull UserStore userStore,
@NonNull UserCredentialsStore userCredentialsStore,
@NonNull UserRoleStore userRoleStore,
@NonNull UserRoleProgramLinkStore userRoleProgramLinkStore,
@NonNull OrganisationUnitStore organisationUnitStore,
@NonNull UserOrganisationUnitLinkStore userOrganisationUnitLinkStore,
@NonNull ProgramStore programStore,
@NonNull TrackedEntityAttributeStore trackedEntityAttributeStore,
@NonNull ProgramTrackedEntityAttributeStore programTrackedEntityAttributeStore,
@NonNull ProgramRuleVariableStore programRuleVariableStore,
@NonNull ProgramIndicatorStore programIndicatorStore,
@NonNull ProgramStageSectionProgramIndicatorLinkStore
programStageSectionProgramIndicatorLinkStore,
@NonNull ProgramRuleActionStore programRuleActionStore,
@NonNull ProgramRuleStore programRuleStore,
@NonNull OptionStore optionStore,
@NonNull OptionSetStore optionSetStore,
@NonNull DataElementStore dataElementStore,
@NonNull ProgramStageDataElementStore programStageDataElementStore,
@NonNull ProgramStageSectionStore programStageSectionStore,
@NonNull ProgramStageStore programStageStore,
@NonNull RelationshipTypeStore relationshipStore,
@NonNull TrackedEntityStore trackedEntityStore,
@NonNull OrganisationUnitProgramLinkStore organisationUnitProgramLinkStore,
@NonNull CategoryQuery categoryQuery,
@NonNull CategoryService categoryService,
@NonNull CategoryHandler categoryHandler,
@NonNull CategoryComboQuery categoryComboQuery,
@NonNull CategoryComboService categoryComboService,
@NonNull CategoryComboHandler categoryComboHandler) {
this.databaseAdapter = databaseAdapter;
this.systemInfoService = systemInfoService;
this.userService = userService;
this.programService = programService;
this.organisationUnitService = organisationUnitService;
this.trackedEntityService = trackedEntityService;
this.optionSetService = optionSetService;
this.systemInfoStore = systemInfoStore;
this.resourceStore = resourceStore;
this.userStore = userStore;
this.userCredentialsStore = userCredentialsStore;
this.userRoleStore = userRoleStore;
this.userRoleProgramLinkStore = userRoleProgramLinkStore;
this.organisationUnitStore = organisationUnitStore;
this.userOrganisationUnitLinkStore = userOrganisationUnitLinkStore;
this.programStore = programStore;
this.trackedEntityAttributeStore = trackedEntityAttributeStore;
this.programTrackedEntityAttributeStore = programTrackedEntityAttributeStore;
this.programRuleVariableStore = programRuleVariableStore;
this.programIndicatorStore = programIndicatorStore;
this.programStageSectionProgramIndicatorLinkStore =
programStageSectionProgramIndicatorLinkStore;
this.programRuleActionStore = programRuleActionStore;
this.programRuleStore = programRuleStore;
this.optionStore = optionStore;
this.optionSetStore = optionSetStore;
this.dataElementStore = dataElementStore;
this.programStageDataElementStore = programStageDataElementStore;
this.programStageSectionStore = programStageSectionStore;
this.programStageStore = programStageStore;
this.relationshipStore = relationshipStore;
this.trackedEntityStore = trackedEntityStore;
this.organisationUnitProgramLinkStore = organisationUnitProgramLinkStore;
this.categoryQuery = categoryQuery;
this.categoryService = categoryService;
this.categoryHandler = categoryHandler;
this.categoryComboQuery = categoryComboQuery;
this.categoryComboService = categoryComboService;
this.categoryComboHandler = categoryComboHandler;
}
@Override
public boolean isExecuted() {
synchronized (this) {
return isExecuted;
}
}
@SuppressWarnings("PMD.NPathComplexity")
@Override
public Response call() throws Exception {
synchronized (this) {
if (isExecuted) {
throw new IllegalStateException("Already executed");
}
isExecuted = true;
}
Response response = null;
Transaction transaction = databaseAdapter.beginNewTransaction();
try {
response = new SystemInfoCall(
databaseAdapter, systemInfoStore,
systemInfoService, resourceStore
).call();
if (isNotValid(response)) {
return response;
}
SystemInfo systemInfo = (SystemInfo) response.body();
Date serverDate = systemInfo.serverDate();
response = new UserCall(
userService,
databaseAdapter,
userStore,
userCredentialsStore,
userRoleStore,
resourceStore,
serverDate,
userRoleProgramLinkStore
).call();
if (isNotValid(response)) {
return response;
}
User user = (User) response.body();
response = new OrganisationUnitCall(
user, organisationUnitService, databaseAdapter, organisationUnitStore,
resourceStore, serverDate, userOrganisationUnitLinkStore,
organisationUnitProgramLinkStore).call();
if (isNotValid(response)) {
return response;
}
response = downloadCategories(serverDate);
if (isNotValid(response)) {
return response;
}
response = downloadCategoryCombos(serverDate);
if (isNotValid(response)) {
return response;
}
Set<String> programUids = getAssignedProgramUids(user);
response = new ProgramCall(
programService, databaseAdapter, resourceStore, programUids, programStore,
serverDate,
trackedEntityAttributeStore, programTrackedEntityAttributeStore,
programRuleVariableStore,
programIndicatorStore, programStageSectionProgramIndicatorLinkStore,
programRuleActionStore,
programRuleStore, optionStore, optionSetStore, dataElementStore,
programStageDataElementStore,
programStageSectionStore, programStageStore, relationshipStore
).call();
if (isNotValid(response)) {
return response;
}
List<Program> programs = ((Response<Payload<Program>>) response).body().items();
Set<String> trackedEntityUids = getAssignedTrackedEntityUids(programs);
response = new TrackedEntityCall(
trackedEntityUids, databaseAdapter, trackedEntityStore,
resourceStore, trackedEntityService, serverDate
).call();
if (isNotValid(response)) {
return response;
}
Set<String> optionSetUids = getAssignedOptionSetUids(programs);
response = new OptionSetCall(
optionSetService, optionSetStore, databaseAdapter, resourceStore,
optionSetUids, serverDate, optionStore
).call();
if (isNotValid(response)) {
return response;
}
transaction.setSuccessful();
return response;
} finally {
transaction.end();
}
}
private boolean isNotValid(Response response) {
return !response.isSuccessful();
}
/// Utilty methods:
private Set<String> getAssignedOptionSetUids(List<Program> programs) {
if (programs == null) {
return null;
}
Set<String> uids = new HashSet<>();
int size = programs.size();
for (int i = 0; i < size; i++) {
Program program = programs.get(i);
getOptionSetUidsForAttributes(uids, program);
getOptionSetUidsForDataElements(uids, program);
}
return uids;
}
private void getOptionSetUidsForDataElements(Set<String> uids, Program program) {
List<ProgramStage> programStages = program.programStages();
int programStagesSize = programStages.size();
for (int j = 0; j < programStagesSize; j++) {
ProgramStage programStage = programStages.get(j);
List<ProgramStageDataElement> programStageDataElements =
programStage.programStageDataElements();
int programStageDataElementSize = programStageDataElements.size();
for (int k = 0; k < programStageDataElementSize; k++) {
ProgramStageDataElement programStageDataElement = programStageDataElements.get(k);
if (programStageDataElement.dataElement() != null &&
programStageDataElement.dataElement().optionSet() != null) {
uids.add(programStageDataElement.dataElement().optionSet().uid());
}
}
}
}
private void getOptionSetUidsForAttributes(Set<String> uids, Program program) {
int programTrackedEntityAttributeSize = program.programTrackedEntityAttributes().size();
List<ProgramTrackedEntityAttribute> programTrackedEntityAttributes =
program.programTrackedEntityAttributes();
for (int j = 0; j < programTrackedEntityAttributeSize; j++) {
ProgramTrackedEntityAttribute programTrackedEntityAttribute =
programTrackedEntityAttributes.get(j);
if (programTrackedEntityAttribute.trackedEntityAttribute() != null &&
programTrackedEntityAttribute.trackedEntityAttribute().optionSet() != null) {
uids.add(programTrackedEntityAttribute.trackedEntityAttribute().optionSet().uid());
}
}
}
private Set<String> getAssignedTrackedEntityUids(List<Program> programs) {
if (programs == null) {
return null;
}
Set<String> uids = new HashSet<>();
int size = programs.size();
for (int i = 0; i < size; i++) {
Program program = programs.get(i);
if (program.trackedEntity() != null) {
uids.add(program.trackedEntity().uid());
}
}
return uids;
}
private Set<String> getAssignedProgramUids(User user) {
if (user == null || user.userCredentials() == null
|| user.userCredentials().userRoles() == null) {
return null;
}
Set<String> programUids = new HashSet<>();
getProgramUidsFromUserRoles(user, programUids);
getProgramUidsFromOrganisationUnits(user, programUids);
return programUids;
}
private void getProgramUidsFromOrganisationUnits(User user, Set<String> programUids) {
List<OrganisationUnit> organisationUnits = user.organisationUnits();
if (organisationUnits != null) {
int size = organisationUnits.size();
for (int i = 0; i < size; i++) {
OrganisationUnit organisationUnit = organisationUnits.get(i);
int programSize = organisationUnit.programs().size();
for (int j = 0; j < programSize; j++) {
Program program = organisationUnit.programs().get(j);
programUids.add(program.uid());
}
}
}
}
private void getProgramUidsFromUserRoles(User user, Set<String> programUids) {
List<UserRole> userRoles = user.userCredentials().userRoles();
if (userRoles != null) {
int size = userRoles.size();
for (int i = 0; i < size; i++) {
UserRole userRole = userRoles.get(i);
int programSize = userRole.programs().size();
for (int j = 0; j < programSize; j++) {
Program program = userRole.programs().get(j);
programUids.add(program.uid());
}
}
}
}
private Response<Payload<Category>> downloadCategories(Date serverDate) throws Exception {
ResponseValidator<Category> validator = new ResponseValidator<>();
return new CategoryEndpointCall(categoryQuery, categoryService, validator,
categoryHandler,
new ResourceHandler(resourceStore), databaseAdapter, serverDate).call();
}
private Response<Payload<CategoryCombo>> downloadCategoryCombos(Date serverDate)
throws Exception {
ResponseValidator<CategoryCombo> comboValidator = new ResponseValidator<>();
return new CategoryComboEndpointCall(categoryComboQuery, categoryComboService,
comboValidator, categoryComboHandler,
new ResourceHandler(resourceStore), databaseAdapter, serverDate).call();
}
}
|
remove isNotValid function from D2 (function with 1 parameter vs negative condition)
|
core/src/main/java/org/hisp/dhis/android/core/calls/MetadataCall.java
|
remove isNotValid function from D2 (function with 1 parameter vs negative condition)
|
|
Java
|
bsd-3-clause
|
dcd2d44e0964e28f91b3997b776673c1ca11b5f2
| 0
|
vivo-project/Vitro,vivo-project/Vitro,vivo-project/Vitro,vivo-project/Vitro
|
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.ontology.update;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.Syntax;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.shared.Lock;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaIngestUtils;
/**
* Performs knowledge base updates if necessary to align with a
* new ontology version.
*
* @author bjl23
*
*/
public class KnowledgeBaseUpdater {
private final Log log = LogFactory.getLog(KnowledgeBaseUpdater.class);
private UpdateSettings settings;
private ChangeLogger logger;
private ChangeRecord record;
public KnowledgeBaseUpdater(UpdateSettings settings) {
this.settings = settings;
this.logger = null;
this.record = new SimpleChangeRecord(settings.getAddedDataFile(), settings.getRemovedDataFile());
}
public void update() throws IOException {
if (this.logger == null) {
this.logger = new SimpleChangeLogger(settings.getLogFile(), settings.getErrorLogFile());
}
long startTime = System.currentTimeMillis();
System.out.println("Migrating the knowledge base");
log.info("Migrating the knowledge base");
logger.log("Started knowledge base migration");
try {
performUpdate();
} catch (Exception e) {
logger.logError(e.getMessage());
e.printStackTrace();
}
if (!logger.errorsWritten()) {
// add assertions to the knowledge base showing that the
// update was successful, so we don't need to run it again.
assertSuccess();
}
record.writeChanges();
logger.closeLogs();
long elapsedSecs = (System.currentTimeMillis() - startTime)/1000;
System.out.println("Finished knowledge base migration in " + elapsedSecs + " second" + (elapsedSecs != 1 ? "s" : ""));
log.info("Finished knowledge base migration in " + elapsedSecs + " second" + (elapsedSecs != 1 ? "s" : ""));
return;
}
private void performUpdate() throws IOException {
log.info("\tperforming SPARQL construct additions (abox)");
performSparqlConstructAdditions(settings.getSparqlConstructAdditionsDir(), settings.getAssertionOntModelSelector().getABoxModel());
log.info("\tperforming SPARQL construct deletions (infenences)");
performSparqlConstructRetractions(settings.getSparqlConstructDeletionsDir(), settings.getInferenceOntModelSelector().getABoxModel());
List<AtomicOntologyChange> rawChanges = getAtomicOntologyChanges();
AtomicOntologyChangeLists changes = new AtomicOntologyChangeLists(rawChanges,settings.getNewTBoxModel(),settings.getOldTBoxModel());
//process the TBox before the ABox
log.info("\tupdating tbox annotations");
updateTBoxAnnotations();
log.info("\tupdating the abox");
updateABox(changes);
}
private void performSparqlConstructAdditions(String sparqlConstructDir, OntModel aboxModel) throws IOException {
Model anonModel = performSparqlConstructs(sparqlConstructDir, aboxModel, true);
if (anonModel == null) {
return;
}
aboxModel.enterCriticalSection(Lock.WRITE);
try {
JenaIngestUtils jiu = new JenaIngestUtils();
Model additions = jiu.renameBNodes(anonModel, settings.getDefaultNamespace() + "n", aboxModel);
Model actualAdditions = ModelFactory.createDefaultModel();
StmtIterator stmtIt = additions.listStatements();
while (stmtIt.hasNext()) {
Statement stmt = stmtIt.nextStatement();
if (!aboxModel.contains(stmt)) {
actualAdditions.add(stmt);
}
}
aboxModel.add(actualAdditions);
record.recordAdditions(actualAdditions);
} finally {
aboxModel.leaveCriticalSection();
}
}
private void performSparqlConstructRetractions(String sparqlConstructDir, OntModel model) throws IOException {
Model retractions = performSparqlConstructs(sparqlConstructDir, model, false);
if (retractions == null) {
return;
}
model.enterCriticalSection(Lock.WRITE);
try {
model.remove(retractions);
record.recordRetractions(retractions);
} finally {
model.leaveCriticalSection();
}
}
/**
* Performs a set of arbitrary SPARQL CONSTRUCT queries on the
* data, for changes that cannot be expressed as simple property
* or class additions, deletions, or renamings.
* Blank nodes created by the queries are given random URIs.
* @param sparqlConstructDir
* @param aboxModel
*/
private Model performSparqlConstructs(String sparqlConstructDir,
OntModel aboxModel, boolean add) throws IOException {
Model anonModel = ModelFactory.createDefaultModel();
File sparqlConstructDirectory = new File(sparqlConstructDir);
if (!sparqlConstructDirectory.isDirectory()) {
logger.logError(this.getClass().getName() +
"performSparqlConstructs() expected to find a directory " +
" at " + sparqlConstructDir + ". Unable to execute " +
" SPARQL CONSTRUCTS.");
return null;
}
File[] sparqlFiles = sparqlConstructDirectory.listFiles();
for (int i = 0; i < sparqlFiles.length; i ++) {
File sparqlFile = sparqlFiles[i];
try {
BufferedReader reader =
new BufferedReader(new FileReader(sparqlFile));
StringBuffer fileContents = new StringBuffer();
String ln;
while ( (ln = reader.readLine()) != null) {
fileContents.append(ln).append('\n');
}
try {
log.debug("\t\tprocessing SPARQL construct query from file " + sparqlFiles[i].getName());
Query q = QueryFactory.create(fileContents.toString(), Syntax.syntaxARQ);
aboxModel.enterCriticalSection(Lock.WRITE);
try {
QueryExecution qe = QueryExecutionFactory.create(q, aboxModel);
long numBefore = anonModel.size();
qe.execConstruct(anonModel);
long numAfter = anonModel.size();
long num = numAfter - numBefore;
if (num > 0) {
logger.log((add ? "Added " : "Removed ") + num + (add ? "" : " inferred") +
" statement" + ((num > 1) ? "s" : "") +
" using the SPARQL construct query from file " + sparqlFiles[i].getName());
}
qe.close();
} finally {
aboxModel.leaveCriticalSection();
}
} catch (Exception e) {
logger.logError(this.getClass().getName() +
".performSparqlConstructs() unable to execute " +
"query at " + sparqlFile + ". Error message is: " + e.getMessage());
}
} catch (FileNotFoundException fnfe) {
logger.log("WARNING: performSparqlConstructs() could not find " +
" SPARQL CONSTRUCT file " + sparqlFile + ". Skipping.");
}
}
return anonModel;
}
private List<AtomicOntologyChange> getAtomicOntologyChanges()
throws IOException {
return (new OntologyChangeParser(logger)).parseFile(settings.getDiffFile());
}
private void updateABox(AtomicOntologyChangeLists changes)
throws IOException {
OntModel oldTBoxModel = settings.getOldTBoxModel();
OntModel newTBoxModel = settings.getNewTBoxModel();
OntModel ABoxModel = settings.getAssertionOntModelSelector().getABoxModel();
ABoxUpdater aboxUpdater = new ABoxUpdater(
oldTBoxModel, newTBoxModel, ABoxModel,
settings.getNewTBoxAnnotationsModel(), logger, record);
aboxUpdater.processPropertyChanges(changes.getAtomicPropertyChanges());
aboxUpdater.processClassChanges(changes.getAtomicClassChanges());
}
private void updateTBoxAnnotations() throws IOException {
TBoxUpdater tboxUpdater = new TBoxUpdater(settings.getOldTBoxAnnotationsModel(),
settings.getNewTBoxAnnotationsModel(),
settings.getAssertionOntModelSelector().getTBoxModel(), logger, record);
tboxUpdater.updateDefaultAnnotationValues();
//tboxUpdater.updateAnnotationModel();
}
/**
* Executes a SPARQL ASK query to determine whether the knowledge base
* needs to be updated to conform to a new ontology version
*/
public boolean updateRequired() throws IOException {
boolean required = false;
String sparqlQueryStr = loadSparqlQuery(settings.getAskUpdatedQueryFile());
if (sparqlQueryStr == null) {
return required;
}
Model abox = settings.getAssertionOntModelSelector().getABoxModel();
Query query = QueryFactory.create(sparqlQueryStr);
QueryExecution isUpdated = QueryExecutionFactory.create(query, abox);
// if the ASK query DOES have a solution (i.e. the assertions exist
// showing that the update has already been performed), then the update
// is NOT required.
if (isUpdated.execAsk()) {
required = false;
} else {
required = true;
if (JenaDataSourceSetupBase.isFirstStartup()) {
assertSuccess();
log.info("The application is starting with an empty DB, an indication will be added to the DB that a knowledge base migration to the current version is not required.");
required = false;
}
}
return required;
}
/**
* loads a SPARQL ASK query from a text file
* @param filePath
* @return the query string or null if file not found
*/
public static String loadSparqlQuery(String filePath) throws IOException {
File file = new File(filePath);
if (!file.exists()) {
return null;
}
BufferedReader reader = new BufferedReader(new FileReader(file));
StringBuffer fileContents = new StringBuffer();
String ln;
while ((ln = reader.readLine()) != null) {
fileContents.append(ln).append('\n');
}
return fileContents.toString();
}
private void assertSuccess() throws FileNotFoundException, IOException {
try {
//Model m = settings.getAssertionOntModelSelector().getApplicationMetadataModel();
Model m = settings.getAssertionOntModelSelector().getABoxModel();
File successAssertionsFile = new File(settings.getSuccessAssertionsFile());
InputStream inStream = new FileInputStream(successAssertionsFile);
m.enterCriticalSection(Lock.WRITE);
try {
m.read(inStream, null, settings.getSuccessRDFFormat());
if (logger != null) {
logger.logWithDate("Finished knowledge base migration");
}
} finally {
m.leaveCriticalSection();
}
} catch (Exception e) {
if (logger != null) {
logger.logError(" unable to make RDF assertions about successful " +
" update to new ontology version: " + e.getMessage());
}
}
}
/**
* A class that allows to access two different ontology change lists,
* one for class changes and the other for property changes. The
* constructor will split a list containing both types of changes.
* @author bjl23
*
*/
private class AtomicOntologyChangeLists {
private List<AtomicOntologyChange> atomicClassChanges =
new ArrayList<AtomicOntologyChange>();
private List<AtomicOntologyChange> atomicPropertyChanges =
new ArrayList<AtomicOntologyChange>();
public AtomicOntologyChangeLists (
List<AtomicOntologyChange> changeList, OntModel newTboxModel,
OntModel oldTboxModel) throws IOException {
Iterator<AtomicOntologyChange> listItr = changeList.iterator();
while(listItr.hasNext()) {
AtomicOntologyChange changeObj = listItr.next();
if (changeObj.getSourceURI() != null){
if (oldTboxModel.getOntProperty(changeObj.getSourceURI()) != null){
atomicPropertyChanges.add(changeObj);
} else if (oldTboxModel.getOntClass(changeObj.getSourceURI()) != null) {
atomicClassChanges.add(changeObj);
} else if ("Prop".equals(changeObj.getNotes())) {
atomicPropertyChanges.add(changeObj);
} else if ("Class".equals(changeObj.getNotes())) {
atomicClassChanges.add(changeObj);
} else{
logger.log("WARNING: Source URI is neither a Property" +
" nor a Class. " + "Change Object skipped for sourceURI: " + changeObj.getSourceURI());
}
} else if(changeObj.getDestinationURI() != null){
if (newTboxModel.getOntProperty(changeObj.getDestinationURI()) != null) {
atomicPropertyChanges.add(changeObj);
} else if(newTboxModel.getOntClass(changeObj.
getDestinationURI()) != null) {
atomicClassChanges.add(changeObj);
} else{
logger.log("WARNING: Destination URI is neither a Property" +
" nor a Class. " + "Change Object skipped for destinationURI: " + changeObj.getDestinationURI());
}
} else{
logger.log("WARNING: Source and Destination URI can't be null. " + "Change Object skipped" );
}
}
//logger.log("Property and Class change Object lists have been created");
}
public List<AtomicOntologyChange> getAtomicClassChanges() {
return atomicClassChanges;
}
public List<AtomicOntologyChange> getAtomicPropertyChanges() {
return atomicPropertyChanges;
}
}
}
|
webapp/src/edu/cornell/mannlib/vitro/webapp/ontology/update/KnowledgeBaseUpdater.java
|
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.ontology.update;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.Syntax;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.shared.Lock;
import edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaIngestUtils;
/**
* Performs knowledge base updates if necessary to align with a
* new ontology version.
*
* @author bjl23
*
*/
public class KnowledgeBaseUpdater {
private final Log log = LogFactory.getLog(KnowledgeBaseUpdater.class);
private UpdateSettings settings;
private ChangeLogger logger;
private ChangeRecord record;
public KnowledgeBaseUpdater(UpdateSettings settings) {
this.settings = settings;
this.logger = null;
this.record = new SimpleChangeRecord(settings.getAddedDataFile(), settings.getRemovedDataFile());
}
public void update() throws IOException {
if (this.logger == null) {
this.logger = new SimpleChangeLogger(settings.getLogFile(), settings.getErrorLogFile());
}
long startTime = System.currentTimeMillis();
System.out.println("Migrating the knowledge base");
log.info("Migrating the knowledge base");
logger.log("Started knowledge base migration");
try {
performUpdate();
} catch (Exception e) {
logger.logError(e.getMessage());
e.printStackTrace();
}
if (!logger.errorsWritten()) {
// add assertions to the knowledge base showing that the
// update was successful, so we don't need to run it again.
assertSuccess();
}
record.writeChanges();
logger.closeLogs();
long elapsedSecs = (System.currentTimeMillis() - startTime)/1000;
System.out.println("Finished knowledge base migration in " + elapsedSecs + " second" + (elapsedSecs != 1 ? "s" : ""));
log.info("Finished knowledge base migration in " + elapsedSecs + " second" + (elapsedSecs != 1 ? "s" : ""));
return;
}
private void performUpdate() throws IOException {
log.info("\tperforming SPARQL construct additions (abox)");
performSparqlConstructAdditions(settings.getSparqlConstructAdditionsDir(), settings.getAssertionOntModelSelector().getABoxModel());
log.info("\tperforming SPARQL construct deletions (infenences)");
performSparqlConstructRetractions(settings.getSparqlConstructDeletionsDir(), settings.getInferenceOntModelSelector().getABoxModel());
List<AtomicOntologyChange> rawChanges = getAtomicOntologyChanges();
AtomicOntologyChangeLists changes = new AtomicOntologyChangeLists(rawChanges,settings.getNewTBoxModel(),settings.getOldTBoxModel());
//process the TBox before the ABox
log.info("\tupdating tbox annotations");
updateTBoxAnnotations();
log.info("\tupdating the abox");
updateABox(changes);
}
private void performSparqlConstructAdditions(String sparqlConstructDir, OntModel aboxModel) throws IOException {
Model anonModel = performSparqlConstructs(sparqlConstructDir, aboxModel, true);
if (anonModel == null) {
return;
}
aboxModel.enterCriticalSection(Lock.WRITE);
try {
JenaIngestUtils jiu = new JenaIngestUtils();
Model additions = jiu.renameBNodes(anonModel, settings.getDefaultNamespace() + "n", aboxModel);
Model actualAdditions = ModelFactory.createDefaultModel();
StmtIterator stmtIt = additions.listStatements();
while (stmtIt.hasNext()) {
Statement stmt = stmtIt.nextStatement();
if (!aboxModel.contains(stmt)) {
actualAdditions.add(stmt);
}
}
aboxModel.add(actualAdditions);
record.recordAdditions(actualAdditions);
} finally {
aboxModel.leaveCriticalSection();
}
}
private void performSparqlConstructRetractions(String sparqlConstructDir, OntModel model) throws IOException {
Model retractions = performSparqlConstructs(sparqlConstructDir, model, false);
if (retractions == null) {
return;
}
model.enterCriticalSection(Lock.WRITE);
try {
model.remove(retractions);
record.recordRetractions(retractions);
} finally {
model.leaveCriticalSection();
}
}
/**
* Performs a set of arbitrary SPARQL CONSTRUCT queries on the
* data, for changes that cannot be expressed as simple property
* or class additions, deletions, or renamings.
* Blank nodes created by the queries are given random URIs.
* @param sparqlConstructDir
* @param aboxModel
*/
private Model performSparqlConstructs(String sparqlConstructDir,
OntModel aboxModel, boolean add) throws IOException {
Model anonModel = ModelFactory.createDefaultModel();
File sparqlConstructDirectory = new File(sparqlConstructDir);
if (!sparqlConstructDirectory.isDirectory()) {
logger.logError(this.getClass().getName() +
"performSparqlConstructs() expected to find a directory " +
" at " + sparqlConstructDir + ". Unable to execute " +
" SPARQL CONSTRUCTS.");
return null;
}
File[] sparqlFiles = sparqlConstructDirectory.listFiles();
for (int i = 0; i < sparqlFiles.length; i ++) {
File sparqlFile = sparqlFiles[i];
try {
BufferedReader reader =
new BufferedReader(new FileReader(sparqlFile));
StringBuffer fileContents = new StringBuffer();
String ln;
while ( (ln = reader.readLine()) != null) {
fileContents.append(ln).append('\n');
}
try {
log.debug("\t\tprocessing SPARQL construct query from file " + sparqlFiles[i].getName());
Query q = QueryFactory.create(fileContents.toString(), Syntax.syntaxARQ);
aboxModel.enterCriticalSection(Lock.WRITE);
try {
QueryExecution qe = QueryExecutionFactory.create(q, aboxModel);
long numBefore = anonModel.size();
qe.execConstruct(anonModel);
long numAfter = anonModel.size();
long num = numAfter - numBefore;
if (num > 0) {
logger.log((add ? "Added " : "Removed ") + num + (add ? "" : " inferred") +
" statement" + ((num > 1) ? "s" : "") +
" using the SPARQL construct query from file " + sparqlFiles[i].getName());
}
qe.close();
} finally {
aboxModel.leaveCriticalSection();
}
} catch (Exception e) {
logger.logError(this.getClass().getName() +
".performSparqlConstructs() unable to execute " +
"query at " + sparqlFile + ". Error message is: " + e.getMessage());
}
} catch (FileNotFoundException fnfe) {
logger.log("WARNING: performSparqlConstructs() could not find " +
" SPARQL CONSTRUCT file " + sparqlFile + ". Skipping.");
}
}
return anonModel;
}
private List<AtomicOntologyChange> getAtomicOntologyChanges()
throws IOException {
return (new OntologyChangeParser(logger)).parseFile(settings.getDiffFile());
}
private void updateABox(AtomicOntologyChangeLists changes)
throws IOException {
OntModel oldTBoxModel = settings.getOldTBoxModel();
OntModel newTBoxModel = settings.getNewTBoxModel();
OntModel ABoxModel = settings.getAssertionOntModelSelector().getABoxModel();
ABoxUpdater aboxUpdater = new ABoxUpdater(
oldTBoxModel, newTBoxModel, ABoxModel,
settings.getNewTBoxAnnotationsModel(), logger, record);
aboxUpdater.processPropertyChanges(changes.getAtomicPropertyChanges());
aboxUpdater.processClassChanges(changes.getAtomicClassChanges());
}
private void updateTBoxAnnotations() throws IOException {
TBoxUpdater tboxUpdater = new TBoxUpdater(settings.getOldTBoxAnnotationsModel(),
settings.getNewTBoxAnnotationsModel(),
settings.getAssertionOntModelSelector().getTBoxModel(), logger, record);
tboxUpdater.updateDefaultAnnotationValues();
//tboxUpdater.updateAnnotationModel();
}
/**
* Executes a SPARQL ASK query to determine whether the knowledge base
* needs to be updated to conform to a new ontology version
*/
public boolean updateRequired() throws IOException {
boolean required = false;
String sparqlQueryStr = loadSparqlQuery(settings.getAskUpdatedQueryFile());
if (sparqlQueryStr == null) {
return required;
}
Model abox = settings.getAssertionOntModelSelector().getABoxModel();
Query query = QueryFactory.create(sparqlQueryStr);
QueryExecution isUpdated = QueryExecutionFactory.create(query, abox);
// if the ASK query DOES have a solution (i.e. the assertions exist
// showing that the update has already been performed), then the update
// is NOT required.
if (isUpdated.execAsk()) {
required = false;
} else {
required = true;
if (JenaDataSourceSetupBase.isFirstStartup()) {
assertSuccess();
log.info("The application is starting with a fresh DB, an indication will be added to the DB that a knowledge base migration to the current version is not required.");
required = false;
}
}
return required;
}
/**
* loads a SPARQL ASK query from a text file
* @param filePath
* @return the query string or null if file not found
*/
public static String loadSparqlQuery(String filePath) throws IOException {
File file = new File(filePath);
if (!file.exists()) {
return null;
}
BufferedReader reader = new BufferedReader(new FileReader(file));
StringBuffer fileContents = new StringBuffer();
String ln;
while ((ln = reader.readLine()) != null) {
fileContents.append(ln).append('\n');
}
return fileContents.toString();
}
private void assertSuccess() throws FileNotFoundException, IOException {
try {
//Model m = settings.getAssertionOntModelSelector().getApplicationMetadataModel();
Model m = settings.getAssertionOntModelSelector().getABoxModel();
File successAssertionsFile = new File(settings.getSuccessAssertionsFile());
InputStream inStream = new FileInputStream(successAssertionsFile);
m.enterCriticalSection(Lock.WRITE);
try {
m.read(inStream, null, settings.getSuccessRDFFormat());
if (logger != null) {
logger.logWithDate("Finished knowledge base migration");
}
} finally {
m.leaveCriticalSection();
}
} catch (Exception e) {
if (logger != null) {
logger.logError(" unable to make RDF assertions about successful " +
" update to new ontology version: " + e.getMessage());
}
}
}
/**
* A class that allows to access two different ontology change lists,
* one for class changes and the other for property changes. The
* constructor will split a list containing both types of changes.
* @author bjl23
*
*/
private class AtomicOntologyChangeLists {
private List<AtomicOntologyChange> atomicClassChanges =
new ArrayList<AtomicOntologyChange>();
private List<AtomicOntologyChange> atomicPropertyChanges =
new ArrayList<AtomicOntologyChange>();
public AtomicOntologyChangeLists (
List<AtomicOntologyChange> changeList, OntModel newTboxModel,
OntModel oldTboxModel) throws IOException {
Iterator<AtomicOntologyChange> listItr = changeList.iterator();
while(listItr.hasNext()) {
AtomicOntologyChange changeObj = listItr.next();
if (changeObj.getSourceURI() != null){
if (oldTboxModel.getOntProperty(changeObj.getSourceURI()) != null){
atomicPropertyChanges.add(changeObj);
} else if (oldTboxModel.getOntClass(changeObj.getSourceURI()) != null) {
atomicClassChanges.add(changeObj);
} else if ("Prop".equals(changeObj.getNotes())) {
atomicPropertyChanges.add(changeObj);
} else if ("Class".equals(changeObj.getNotes())) {
atomicClassChanges.add(changeObj);
} else{
logger.log("WARNING: Source URI is neither a Property" +
" nor a Class. " + "Change Object skipped for sourceURI: " + changeObj.getSourceURI());
}
} else if(changeObj.getDestinationURI() != null){
if (newTboxModel.getOntProperty(changeObj.getDestinationURI()) != null) {
atomicPropertyChanges.add(changeObj);
} else if(newTboxModel.getOntClass(changeObj.
getDestinationURI()) != null) {
atomicClassChanges.add(changeObj);
} else{
logger.log("WARNING: Destination URI is neither a Property" +
" nor a Class. " + "Change Object skipped for destinationURI: " + changeObj.getDestinationURI());
}
} else{
logger.log("WARNING: Source and Destination URI can't be null. " + "Change Object skipped" );
}
}
//logger.log("Property and Class change Object lists have been created");
}
public List<AtomicOntologyChange> getAtomicClassChanges() {
return atomicClassChanges;
}
public List<AtomicOntologyChange> getAtomicPropertyChanges() {
return atomicPropertyChanges;
}
}
}
|
updating log message
|
webapp/src/edu/cornell/mannlib/vitro/webapp/ontology/update/KnowledgeBaseUpdater.java
|
updating log message
|
|
Java
|
bsd-3-clause
|
db957801cb9ace70f84d36d260460defa6f22d5c
| 0
|
skill-lang/jvmCommon
|
/* ___ _ ___ _ _ *\
* / __| |/ (_) | | Your SKilL Scala Binding *
* \__ \ ' <| | | |__ generated: 19.11.2014 *
* |___/_|\_\_|_|____| by: Timm Felden *
\* */
package de.ust.skill.common.jvm.streams;
import java.nio.ByteBuffer;
/**
* Implementations of this class are used to turn a byte stream into a stream of integers and floats.
*
* @author Timm Felden
*/
public abstract class InStream {
protected final ByteBuffer input;
protected InStream(ByteBuffer input) {
this.input = input;
}
/**
* @return take an f64 from the stream
*/
final public double f64() {
return input.getDouble();
}
/**
* @return take an f32 from the stream
*/
public final float f32() {
return input.getFloat();
}
/**
* @return take an v64 from the stream
*/
public final long v64() {
long rval;
return (0 != ((rval = i8()) & 0x80)) ? multiByteV64(rval) : rval;
}
/**
* multi byte v64 values are treated in a different function to enable inlining of more common single byte v64
* values
*/
@SuppressWarnings("all")
private long multiByteV64(long rval) {
long r;
rval = (rval & 0x7f) | ((r = i8()) & 0x7f) << 7;
if (0 != (r & 0x80)) {
rval |= ((r = i8()) & 0x7f) << 14;
if (0 != (r & 0x80)) {
rval |= ((r = i8()) & 0x7f) << 21;
if (0 != (r & 0x80)) {
rval |= ((r = i8()) & 0x7f) << 28;
if (0 != (r & 0x80)) {
rval |= ((r = i8()) & 0x7f) << 35;
if (0 != (r & 0x80)) {
rval |= ((r = i8()) & 0x7f) << 42;
if (0 != (r & 0x80)) {
rval |= ((r = i8()) & 0x7f) << 49;
if (0 != (r & 0x80)) {
rval |= i8() << 56;
}
}
}
}
}
}
}
return rval;
}
/**
* @return take an i64 from the stream
*/
public final long i64() {
return input.getLong();
}
/**
* @return take an i32 from the stream
* @throws UnexpectedEOF
* if there is no i32 in the stream
*/
public final int i32() {
return input.getInt();
}
/**
* @return take an i16 from the stream
*/
public final short i16() {
return input.getShort();
}
/**
* @return take an i8 from the stream
*/
public final byte i8() {
return input.get();
}
/**
* @return take a bool from the stream
*/
public final boolean bool() {
return input.get() != 0;
}
/**
* @return raw byte array taken from the stream
*/
public final byte[] bytes(long length) {
final byte[] rval = new byte[(int) length];
input.get(rval);
return rval;
}
/**
* @return true iff there are at least n bytes left in the stream
*/
public final boolean has(int n) {
return input.limit() >= n + input.position();
}
/**
* @return true iff at the end of file (or stream)
*/
public final boolean eof() {
return input.limit() == input.position();
}
/**
* use with care!
*
* @param position
* jump to target position, without the ability to restore the old position
*/
public abstract void jump(long position);
final public long position() {
return input.position();
}
}
|
src/de/ust/skill/common/jvm/streams/InStream.java
|
/* ___ _ ___ _ _ *\
* / __| |/ (_) | | Your SKilL Scala Binding *
* \__ \ ' <| | | |__ generated: 19.11.2014 *
* |___/_|\_\_|_|____| by: Timm Felden *
\* */
package de.ust.skill.common.jvm.streams;
import java.nio.ByteBuffer;
/**
* Implementations of this class are used to turn a byte stream into a stream of integers and floats.
*
* @author Timm Felden
*/
public abstract class InStream {
protected final ByteBuffer input;
protected InStream(ByteBuffer input) {
this.input = input;
}
/**
* @return take an f64 from the stream
*/
final public double f64() {
return input.getDouble();
}
/**
* @return take an f32 from the stream
*/
public final float f32() {
return input.getFloat();
}
/**
* @return take an v64 from the stream
*/
public final long v64() {
long rval;
return (0 != ((rval = i8()) & 0x80)) ? multiByteV64(rval) : rval;
}
/**
* multi byte v64 values are treated in a different function to enable inlining of more common single byte v64
* values
*/
@SuppressWarnings("all")
private long multiByteV64(long rval) {
long r;
rval |= ((r = i8()) & 0x7f) << 7;
if (0 != (r & 0x80)) {
rval |= ((r = i8()) & 0x7f) << 14;
if (0 != (r & 0x80)) {
rval |= ((r = i8()) & 0x7f) << 21;
if (0 != (r & 0x80)) {
rval |= ((r = i8()) & 0x7f) << 28;
if (0 != (r & 0x80)) {
rval |= ((r = i8()) & 0x7f) << 35;
if (0 != (r & 0x80)) {
rval |= ((r = i8()) & 0x7f) << 42;
if (0 != (r & 0x80)) {
rval |= ((r = i8()) & 0x7f) << 49;
if (0 != (r & 0x80)) {
rval |= i8() << 56;
}
}
}
}
}
}
}
return rval;
}
/**
* @return take an i64 from the stream
*/
public final long i64() {
return input.getLong();
}
/**
* @return take an i32 from the stream
* @throws UnexpectedEOF
* if there is no i32 in the stream
*/
public final int i32() {
return input.getInt();
}
/**
* @return take an i16 from the stream
*/
public final short i16() {
return input.getShort();
}
/**
* @return take an i8 from the stream
*/
public final byte i8() {
return input.get();
}
/**
* @return take a bool from the stream
*/
public final boolean bool() {
return input.get() != 0;
}
/**
* @return raw byte array taken from the stream
*/
public final byte[] bytes(long length) {
final byte[] rval = new byte[(int) length];
input.get(rval);
return rval;
}
/**
* @return true iff there are at least n bytes left in the stream
*/
public final boolean has(int n) {
return input.limit() >= n + input.position();
}
/**
* @return true iff at the end of file (or stream)
*/
public final boolean eof() {
return input.limit() == input.position();
}
/**
* use with care!
*
* @param position
* jump to target position, without the ability to restore the old position
*/
public abstract void jump(long position);
final public long position() {
return input.position();
}
}
|
fixed v64 decoding
|
src/de/ust/skill/common/jvm/streams/InStream.java
|
fixed v64 decoding
|
|
Java
|
mit
|
062da0375fb56bfb98d4242d2c29e5131db23ded
| 0
|
sniffy/sniffy,sniffy/sniffy,bedrin/jdbc-sniffer,sniffy/sniffy
|
package io.sniffy.sql;
import io.sniffy.*;
import org.junit.Test;
public class SqlQueries_Rows_Test extends BaseTest {
@Test
public void testNoneRows() {
executeStatement(Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.noneRows())) {
executeStatement(Query.DELETE);
}
}
@Test
public void testOneQueryNoRows() {
executeStatement(Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().delete().noneRows())) {
executeStatement(Query.DELETE);
}
}
@Test
public void testOneMergeRow() {
executeStatement(Query.DELETE);
executeStatement(Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().currentThread().merge())) {
executeStatement(Query.MERGE);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testOneOtherRow_Exception() {
executeStatement(Query.DELETE);
executeStatement(Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactRows(1).other())) {
executeStatement(Query.OTHER);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testTwoQueryMinTwoRows_Exception() {
executeStatement(Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(2).minRows(2).delete().anyThreads())) {
executeStatement(Query.DELETE);
executeStatement(Query.DELETE);
}
}
@Test
public void testTwoQueryMaxTwoMergeRowsOtherThreads() {
executeStatement(Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(2).maxRows(2).otherThreads().merge())) {
executeStatementsInOtherThread(2, Query.MERGE);
}
}
@Test
public void testMinMaxRows() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.minRows(2).maxRows(3))) {
executeStatement();
executeStatement();
executeStatement();
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testMaxMinRows_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.maxRows(5).minRows(4))) {
executeStatement();
executeStatement();
executeStatement();
}
}
@Test(expected = IllegalArgumentException.class)
public void testMaxMinRows_IllegalArgumentException() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.maxRows(5).minRows(6))) {
executeStatement();
executeStatement();
executeStatement();
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testMinMaxRows_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.minRows(2).maxRows(3))) {
executeStatement();
executeStatement();
executeStatement();
executeStatement();
}
}
@Test
public void testMinMaxRowsOtherThreads() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.maxRows(3).otherThreads())) {
executeStatements(4);
executeStatementsInOtherThread(2);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testMinMaxRowsOtherThreads_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.minRows(5).otherThreads())) {
executeStatements(6);
executeStatementsInOtherThread(4);
}
}
@Test
public void testMaxRowsInserted() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.maxRows(3).insert())) {
executeStatements(4, Query.SELECT);
executeStatements(2, Query.INSERT);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testAtMostOneRowUpdated_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().update())) {
executeStatements(2, Query.INSERT);
executeStatements(2, Query.UPDATE);
executeStatements(1, Query.DELETE);
}
}
@Test
public void testExactThreeRowsDeleteOtherThreads() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactRows(3).delete().otherThreads())) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatementsInOtherThread(1, Query.DELETE);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testBetweenNineAndElevenRowSelectedAllQueries_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.rowsBetween(9,11).select().anyThreads())) {
executeStatements(2, Query.INSERT);
executeStatementsInOtherThread(3, Query.INSERT);
executeStatements(1, Query.SELECT);
executeStatementsInOtherThread(1, Query.SELECT);
}
}
@Test
public void testExactThreeRowsDeleteOtherThreadsMinMaxQueries() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactRows(3).delete().otherThreads().minQueries(5).maxQueries(7))) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatementsInOtherThread(6, Query.DELETE);
executeStatements(2, Query.DELETE);
}
}
@Test(expected = WrongNumberOfQueriesError.class)
public void testExactThreeRowsDeleteOtherThreadsMaxMinQueries_Exception() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactRows(3).otherThreads().delete().minQueries(5).maxQueries(7))) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatementsInOtherThread(4, Query.DELETE);
executeStatements(2, Query.DELETE);
}
}
@Test
public void testExactQueriesAnyThreadsDeleteMaxMinRows() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(8).anyThreads().delete().maxRows(4).minRows(2))) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatementsInOtherThread(6, Query.DELETE);
executeStatements(2, Query.DELETE);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testExactQueriesAnyThreadsDeleteMinMaxRows_Exception() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(8).anyThreads().delete().minRows(1).maxRows(2))) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatementsInOtherThread(6, Query.DELETE);
executeStatements(2, Query.DELETE);
}
}
@Test
public void testExactQueriesExactRows() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(2).exactRows(3))) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatements(2, Query.DELETE);
}
}
@Test
public void testExactQueriesBetweenRowsAnyThreads() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(5).exactRows(6).anyThreads())) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatements(2, Query.DELETE);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testExactQueriesMaxMinRows_Exception() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(2).maxRows(10).minRows(9))) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatements(2, Query.DELETE);
}
}
@Test
public void testExactQueriesAtMostOneRow() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(2).atMostOneRow().otherThreads())) {
executeStatements(3, Query.INSERT);
executeStatements(2, Query.DELETE);
executeStatementsInOtherThread(2, Query.DELETE);
}
}
@Test
public void testExactQueriesNoRows_Exception() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(2).noneRows())) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatementsInOtherThread(2, Query.DELETE);
executeStatements(2, Query.DELETE);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testOneRowAnyThreads_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().anyThreads())) {
executeStatement(Query.INSERT);
executeStatementInOtherThread(Query.INSERT);
}
}
@Test(expected = WrongNumberOfQueriesError.class)
public void testOneRowNoneQueries_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().noneQueries().otherThreads().insert())) {
executeStatement(Query.INSERT);
executeStatementInOtherThread(Query.INSERT);
}
}
@Test
public void testOneRowOneQueryInsertOtherThreads() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().atMostOneQuery().insert().otherThreads())) {
executeStatement(Query.INSERT);
executeStatementInOtherThread(Query.INSERT);
}
}
@Test
public void testTwoRowsMinMaxQueries() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.rowsBetween(2,2).minQueries(1).maxQueries(2).anyThreads().insert())) {
executeStatement(Query.INSERT);
executeStatementInOtherThread(Query.INSERT);
}
}
@Test(expected = WrongNumberOfQueriesError.class)
public void testTwoRowsMaxMinQueries_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.rowsBetween(2,2).maxQueries(4).minQueries(3).insert().anyThreads())) {
executeStatement(Query.INSERT);
executeStatementInOtherThread(Query.INSERT);
}
}
@Test
public void testOneRowMerge() {
executeStatement(Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().merge().currentThread())) {
executeStatement(Query.MERGE);
}
}
@Test
public void testOneRowOneQuerySelectOtherThread() {
executeStatement(Query.DELETE);
executeStatement(Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().atMostOneQuery().select().otherThreads())) {
executeStatementInOtherThread(Query.SELECT);
}
}
@Test
public void testTwoRowsTwoQueriesUpdateAnyThreads() {
executeStatement(Query.DELETE);
executeStatement(Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.queriesBetween(2,2).exactRows(2).update().anyThreads())) {
executeStatement(Query.UPDATE);
executeStatementInOtherThread(Query.UPDATE);
}
}
@Test
public void testTwoRowsTwoQueriesMergeAnyThreads() {
executeStatement(Query.DELETE);
executeStatement(Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.maxQueries(2).minQueries(2).minRows(2).maxRows(2).merge().anyThreads())) {
executeStatement(Query.MERGE);
executeStatementInOtherThread(Query.MERGE);
}
}
@Test
public void testOneRowOneQueryOtherCurrentThread() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().atMostOneRow().other().currentThread())) {
executeStatement(Query.OTHER);
}
}
@Test
public void testOneRowOneQueryCurrentThreadOther() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().atMostOneRow().currentThread().other())) {
executeStatement(Query.OTHER);
}
}
@Test
public void testAtMostOneSelectQueryReturnsAtMostOneRow() {
executeStatement(Query.DELETE);
executeStatement(Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().select().atMostOneRow())) {
executeStatement(Query.SELECT);
}
}
@Test
public void testAtMostOneUpdateQueryReturnsValidMinMaxRows() {
executeStatement(Query.DELETE);
executeStatements(3, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().merge().minRows(1).maxRows(1).currentThread())) {
executeStatement(Query.MERGE);
}
}
@Test
public void testAtMostOneDeleteQueryReturnsValidMaxMinRows() {
executeStatement(Query.DELETE);
executeStatements(3, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().delete().maxRows(4).minRows(2).otherThreads())) {
executeStatementInOtherThread(Query.DELETE);
}
}
@Test
public void testOneRowOneSelectQuery() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().select().atMostOneQuery().currentThread())) {
executeStatement(Query.SELECT);
}
}
@Test
public void testNoneRowsSelectNoneQueries() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.noneRows().select().noneQueries().otherThreads())) {
executeStatementInOtherThread(Query.OTHER);
}
}
@Test
public void testNoneRowsOtherQueriesBetweenTwoAndThree() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.noneRows().other().queriesBetween(2,3).anyThreads())) {
executeStatement(Query.OTHER);
executeStatementInOtherThread(Query.OTHER);
}
}
@Test
public void testThreeRowsDeleteQueriesMinMax() {
executeStatement(Query.DELETE);
executeStatements(3, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactRows(3).delete().minQueries(1).maxQueries(2))) {
executeStatement(Query.DELETE);
executeStatement(Query.DELETE);
}
}
@Test(expected = WrongNumberOfQueriesError.class)
public void testThreeRowsDeleteQueriesMaxMin_Exception() {
executeStatement(Query.DELETE);
executeStatements(3, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactRows(3).delete().maxQueries(4).minQueries(3))) {
executeStatement(Query.DELETE);
executeStatement(Query.DELETE);
}
}
@Test
public void testOneSelectQueryOtherThreadOneRow() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().otherThreads().atMostOneRow())) {
executeStatement(Query.SELECT);
}
}
@Test
public void testOneDeleteQueryAnyThreadNoneRows() {
executeStatement(Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().anyThreads().noneRows())) {
executeStatement(Query.DELETE);
}
}
@Test
public void testOneDeleteQueryCurrentThreadBetweenTwoAndThreeRows() {
executeStatement(Query.DELETE);
executeStatements(2, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().currentThread().rowsBetween(2,3))) {
executeStatement(Query.DELETE);
}
}
@Test
public void testOneDeleteQueryCurrentThreadBetweenMinMaxRows() {
executeStatement(Query.DELETE);
executeStatements(2, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().currentThread().minRows(2).maxRows(3))) {
executeStatement(Query.DELETE);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testOneDeleteQueryCurrentThreadBetweenMaxMinRows_Exception() {
executeStatement(Query.DELETE);
executeStatements(2, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().currentThread().maxRows(5).minRows(4))) {
executeStatement(Query.DELETE);
}
}
@Test
public void testOneRowCurrentThreadSelectMaxMinQueries() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().currentThread().select().maxQueries(2).minQueries(1))) {
executeStatement(Query.SELECT);
}
}
@Test
public void testOneRowOtherThreadInsertMinMaxQueries() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().otherThreads().insert().minQueries(1).maxQueries(2))) {
executeStatementInOtherThread(Query.INSERT);
}
}
@Test
public void testTwoRowsAnyThreadUpdateMinMaxQueries() {
executeStatement(Query.DELETE);
executeStatements(2, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.maxRows(2).minRows(2).anyThreads().update().atMostOneQuery())) {
executeStatementInOtherThread(Query.UPDATE);
}
}
}
|
src/test/java/io/sniffy/sql/SqlQueries_Rows_Test.java
|
package io.sniffy.sql;
import io.sniffy.*;
import org.junit.Test;
public class SqlQueries_Rows_Test extends BaseTest {
@Test
public void testNoneRows() {
executeStatement(Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.noneRows())) {
executeStatement(Query.DELETE);
}
}
@Test
public void testOneQueryNoRows() {
executeStatement(Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().delete().noneRows())) {
executeStatement(Query.DELETE);
}
}
@Test
public void testOneMergeRow() {
executeStatement(Query.DELETE);
executeStatement(Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().currentThread().merge())) {
executeStatement(Query.MERGE);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testOneOtherRow_Exception() {
executeStatement(Query.DELETE);
executeStatement(Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactRows(1).other())) {
executeStatement(Query.OTHER);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testTwoQueryMinTwoRows_Exception() {
executeStatement(Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(2).minRows(2).delete().anyThreads())) {
executeStatement(Query.DELETE);
executeStatement(Query.DELETE);
}
}
@Test
public void testTwoQueryMaxTwoMergeRowsOtherThreads() {
executeStatement(Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(2).maxRows(2).otherThreads().merge())) {
executeStatementsInOtherThread(2, Query.MERGE);
}
}
@Test
public void testMinMaxRows() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.minRows(2).maxRows(3))) {
executeStatement();
executeStatement();
executeStatement();
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testMaxMinRows_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.maxRows(5).minRows(4))) {
executeStatement();
executeStatement();
executeStatement();
}
}
@Test(expected = IllegalArgumentException.class)
public void testMaxMinRows_IllegalArgumentException() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.maxRows(5).minRows(6))) {
executeStatement();
executeStatement();
executeStatement();
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testMinMaxRows_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.minRows(2).maxRows(3))) {
executeStatement();
executeStatement();
executeStatement();
executeStatement();
}
}
@Test
public void testMinMaxRowsOtherThreads() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.maxRows(3).otherThreads())) {
executeStatements(4);
executeStatementsInOtherThread(2);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testMinMaxRowsOtherThreads_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.minRows(5).otherThreads())) {
executeStatements(6);
executeStatementsInOtherThread(4);
}
}
@Test
public void testMaxRowsInserted() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.maxRows(3).insert())) {
executeStatements(4, Query.SELECT);
executeStatements(2, Query.INSERT);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testAtMostOneRowUpdated_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().update())) {
executeStatements(2, Query.INSERT);
executeStatements(2, Query.UPDATE);
executeStatements(1, Query.DELETE);
}
}
@Test
public void testExactThreeRowsDeleteOtherThreads() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactRows(3).delete().otherThreads())) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatementsInOtherThread(1, Query.DELETE);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testBetweenNineAndElevenRowSelectedAllQueries_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.rowsBetween(9,11).select().anyThreads())) {
executeStatements(2, Query.INSERT);
executeStatementsInOtherThread(3, Query.INSERT);
executeStatements(1, Query.SELECT);
executeStatementsInOtherThread(1, Query.SELECT);
}
}
@Test
public void testExactThreeRowsDeleteOtherThreadsMinMaxQueries() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactRows(3).delete().otherThreads().minQueries(5).maxQueries(7))) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatementsInOtherThread(6, Query.DELETE);
executeStatements(2, Query.DELETE);
}
}
@Test(expected = WrongNumberOfQueriesError.class)
public void testExactThreeRowsDeleteOtherThreadsMaxMinQueries_Exception() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactRows(3).otherThreads().delete().minQueries(5).maxQueries(7))) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatementsInOtherThread(4, Query.DELETE);
executeStatements(2, Query.DELETE);
}
}
@Test
public void testExactQueriesAnyThreadsDeleteMaxMinRows() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(8).anyThreads().delete().maxRows(4).minRows(2))) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatementsInOtherThread(6, Query.DELETE);
executeStatements(2, Query.DELETE);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testExactQueriesAnyThreadsDeleteMinMaxRows_Exception() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(8).anyThreads().delete().minRows(1).maxRows(2))) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatementsInOtherThread(6, Query.DELETE);
executeStatements(2, Query.DELETE);
}
}
@Test
public void testExactQueriesExactRows() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(2).exactRows(3))) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatements(2, Query.DELETE);
}
}
@Test
public void testExactQueriesBetweenRowsAnyThreads() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(5).exactRows(6).anyThreads())) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatements(2, Query.DELETE);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testExactQueriesMaxMinRows_Exception() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(2).maxRows(10).minRows(9))) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatements(2, Query.DELETE);
}
}
@Test
public void testExactQueriesAtMostOneRow() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(2).atMostOneRow().otherThreads())) {
executeStatements(3, Query.INSERT);
executeStatements(2, Query.DELETE);
executeStatementsInOtherThread(2, Query.DELETE);
}
}
@Test
public void testExactQueriesNoRows_Exception() {
executeStatementsInOtherThread(1, Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactQueries(2).noneRows())) {
executeStatementsInOtherThread(3, Query.INSERT);
executeStatementsInOtherThread(2, Query.DELETE);
executeStatements(2, Query.DELETE);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testOneRowAnyThreads_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().anyThreads())) {
executeStatement(Query.INSERT);
executeStatementInOtherThread(Query.INSERT);
}
}
@Test(expected = WrongNumberOfQueriesError.class)
public void testOneRowNoneQueries_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().noneQueries().otherThreads().insert())) {
executeStatement(Query.INSERT);
executeStatementInOtherThread(Query.INSERT);
}
}
@Test
public void testOneRowOneQueryInsertOtherThreads() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().atMostOneQuery().insert().otherThreads())) {
executeStatement(Query.INSERT);
executeStatementInOtherThread(Query.INSERT);
}
}
@Test
public void testTwoRowsMinMaxQueries() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.rowsBetween(2,2).minQueries(1).maxQueries(2).anyThreads().insert())) {
executeStatement(Query.INSERT);
executeStatementInOtherThread(Query.INSERT);
}
}
@Test(expected = WrongNumberOfQueriesError.class)
public void testTwoRowsMaxMinQueries_Exception() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.rowsBetween(2,2).maxQueries(4).minQueries(3).insert().anyThreads())) {
executeStatement(Query.INSERT);
executeStatementInOtherThread(Query.INSERT);
}
}
@Test
public void testOneRowMerge() {
executeStatement(Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().merge().currentThread())) {
executeStatement(Query.MERGE);
}
}
@Test
public void testOneRowOneQuerySelectOtherThread() {
executeStatement(Query.DELETE);
executeStatement(Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().atMostOneQuery().select().otherThreads())) {
executeStatementInOtherThread(Query.SELECT);
}
}
@Test
public void testTwoRowsTwoQueriesUpdateAnyThreads() {
executeStatement(Query.DELETE);
executeStatement(Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.queriesBetween(2,2).exactRows(2).update().anyThreads())) {
executeStatement(Query.UPDATE);
executeStatementInOtherThread(Query.UPDATE);
}
}
@Test
public void testTwoRowsTwoQueriesMergeAnyThreads() {
executeStatement(Query.DELETE);
executeStatement(Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.maxQueries(2).minQueries(2).minRows(2).maxRows(2).merge().anyThreads())) {
executeStatement(Query.MERGE);
executeStatementInOtherThread(Query.MERGE);
}
}
@Test
public void testOneRowOneQueryOtherCurrentThread() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().atMostOneRow().other().currentThread())) {
executeStatement(Query.OTHER);
}
}
@Test
public void testOneRowOneQueryCurrentThreadOther() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().atMostOneRow().currentThread().other())) {
executeStatement(Query.OTHER);
}
}
@Test
public void testAtMostOneSelectQueryReturnsAtMostOneRow() {
executeStatement(Query.DELETE);
executeStatement(Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().select().atMostOneRow())) {
executeStatement(Query.SELECT);
}
}
@Test
public void testAtMostOneUpdateQueryReturnsValidMinMaxRows() {
executeStatement(Query.DELETE);
executeStatements(3, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().merge().minRows(1).maxRows(1).currentThread())) {
executeStatement(Query.MERGE);
}
}
@Test
public void testAtMostOneDeleteQueryReturnsValidMaxMinRows() {
executeStatement(Query.DELETE);
executeStatements(3, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().delete().maxRows(4).minRows(2).otherThreads())) {
executeStatementInOtherThread(Query.DELETE);
}
}
@Test
public void testOneRowOneSelectQuery() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneRow().select().atMostOneQuery().currentThread())) {
executeStatement(Query.SELECT);
}
}
@Test
public void testNoneRowsSelectNoneQueries() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.noneRows().select().noneQueries().otherThreads())) {
executeStatementInOtherThread(Query.OTHER);
}
}
@Test
public void testNoneRowsOtherQueriesBetweenTwoAndThree() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.noneRows().other().queriesBetween(2,3).anyThreads())) {
executeStatement(Query.OTHER);
executeStatementInOtherThread(Query.OTHER);
}
}
@Test
public void testThreeRowsDeleteQueriesMinMax() {
executeStatement(Query.DELETE);
executeStatements(3, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactRows(3).delete().minQueries(1).maxQueries(2))) {
executeStatement(Query.DELETE);
executeStatement(Query.DELETE);
}
}
@Test(expected = WrongNumberOfQueriesError.class)
public void testThreeRowsDeleteQueriesMaxMin_Exception() {
executeStatement(Query.DELETE);
executeStatements(3, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.exactRows(3).delete().maxQueries(4).minQueries(3))) {
executeStatement(Query.DELETE);
executeStatement(Query.DELETE);
}
}
@Test
public void testOneSelectQueryOtherThreadOneRow() {
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().otherThreads().atMostOneRow())) {
executeStatement(Query.SELECT);
}
}
@Test
public void testOneDeleteQueryAnyThreadNoneRows() {
executeStatement(Query.DELETE);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().anyThreads().noneRows())) {
executeStatement(Query.DELETE);
}
}
@Test
public void testOneDeleteQueryCurrentThreadBetweenTwoAndThreeRows() {
executeStatement(Query.DELETE);
executeStatements(2, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().currentThread().rowsBetween(2,3))) {
executeStatement(Query.DELETE);
}
}
@Test
public void testOneDeleteQueryCurrentThreadBetweenMinMaxRows() {
executeStatement(Query.DELETE);
executeStatements(2, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().currentThread().minRows(2).maxRows(3))) {
executeStatement(Query.DELETE);
}
}
@Test(expected = WrongNumberOfRowsError.class)
public void testOneDeleteQueryCurrentThreadBetweenMaxMinRows_Exception() {
executeStatement(Query.DELETE);
executeStatements(2, Query.INSERT);
try (@SuppressWarnings("unused") Spy $= Sniffer.expect(SqlQueries.atMostOneQuery().currentThread().maxRows(5).minRows(4))) {
executeStatement(Query.DELETE);
}
}
}
|
Unit tests for SqlQueries expectations builder
|
src/test/java/io/sniffy/sql/SqlQueries_Rows_Test.java
|
Unit tests for SqlQueries expectations builder
|
|
Java
|
mit
|
9556446f1277574cf5e772f9458fe3f982dc4252
| 0
|
hres/cfg-task-service,hres/cfg-task-service,hres/cfg-task-service
|
package ca.gc.ip346.classification.resource;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Updates.combine;
import static com.mongodb.client.model.Updates.currentDate;
import static com.mongodb.client.model.Updates.set;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import javax.ws.rs.BeanParam;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.bson.types.ObjectId;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.jaxrs.annotation.JacksonFeatures;
import com.google.common.net.HttpHeaders;
import com.google.gson.GsonBuilder;
import com.mongodb.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
// import ca.gc.ip346.classification.model.Added;
import ca.gc.ip346.classification.model.CanadaFoodGuideDataset;
import ca.gc.ip346.classification.model.CfgFilter;
import ca.gc.ip346.classification.model.CfgTier;
import ca.gc.ip346.classification.model.ContainsAdded;
import ca.gc.ip346.classification.model.Dataset;
import ca.gc.ip346.classification.model.Missing;
import ca.gc.ip346.classification.model.PseudoBoolean;
import ca.gc.ip346.classification.model.PseudoDouble;
import ca.gc.ip346.classification.model.PseudoInteger;
import ca.gc.ip346.classification.model.PseudoString;
import ca.gc.ip346.classification.model.RecipeRolled;
import ca.gc.ip346.util.ClassificationProperties;
import ca.gc.ip346.util.DBConnection;
import ca.gc.ip346.util.MongoClientFactory;
import ca.gc.ip346.util.RequestURI;
@Path("/datasets")
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
public class FoodsResource {
private static final Logger logger = LogManager.getLogger(FoodsResource.class);
private Connection conn = null;
private DatabaseMetaData meta = null;
private MongoClient mongoClient = null;
private MongoCollection<Document> collection = null;
public FoodsResource() {
mongoClient = MongoClientFactory.getMongoClient();
collection = mongoClient.getDatabase(MongoClientFactory.getDatabase()).getCollection(MongoClientFactory.getCollection());
try {
conn = DBConnection.getConnections();
} catch(Exception e) {
// TODO: proper response to handle exceptions
e.printStackTrace();
}
}
@GET
@Path("/search")
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public /* List<CanadaFoodGuideDataset> */ Response getFoodList(@BeanParam CfgFilter search) {
String sql = ContentHandler.read("canada_food_guide_dataset.sql", getClass());
search.setSql(sql);
mongoClient.close();
return doSearchCriteria(search);
}
@POST
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
public /* Map<String, Object> */ Response saveDataset(Dataset dataset) {
ResponseBuilder response = null;
Map<String, Object> map = new HashMap<String, Object>();
if (dataset.getData() != null && dataset.getName() != null && dataset.getComments() != null) {
Document doc = new Document()
.append("data", dataset.getData())
.append("name", dataset.getName())
.append("env", dataset.getEnv())
.append("owner", dataset.getOwner())
.append("status", dataset.getStatus())
.append("comments", dataset.getComments());
collection.insertOne(doc);
ObjectId id = (ObjectId)doc.get("_id");
collection.updateOne(
eq("_id", id),
combine(
set("name", dataset.getName()),
set("comments", dataset.getComments()),
currentDate("modifiedDate"))
);
logger.error("[01;34mLast inserted Dataset id: " + id + "[00;00m");
logger.error("[01;34mCurrent number of Datasets: " + collection.count() + "[00;00m");
map.put("id", id.toString());
logger.error("[01;34m" + Response.Status.CREATED.getStatusCode() + " " + Response.Status.CREATED.toString() + "[00;00m");
response = Response.status(Response.Status.CREATED);
} else {
List<String> list = new ArrayList<String>();
if (dataset.getData() == null) list.add("data");
if (dataset.getName() == null) list.add("name");
if (dataset.getEnv() == null) list.add("env");
if (dataset.getComments() == null) list.add("comments");
map.put("code", Response.Status.BAD_REQUEST.getStatusCode());
map.put("description", Response.Status.BAD_REQUEST.toString() + " - Unable to insert Dataset!");
map.put("fields", StringUtils.join(list, ", "));
logger.error("[01;34m" + Response.Status.BAD_REQUEST.toString() + " - Unable to insert Dataset!" + "[00;00m");
logger.error("[01;34m" + Response.Status.BAD_REQUEST.getStatusCode() + " " + Response.Status.BAD_REQUEST.toString() + "[00;00m");
response = Response.status(Response.Status.BAD_REQUEST);
}
mongoClient.close();
logger.error("[01;31m" + "response status: " + response.build().getStatusInfo() + "[00;00m");
return response
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "*")
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, "origin, content-type, accept, authorization")
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true")
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, "GET, POST, PUT, DELETE, OPTIONS, HEAD")
.header(HttpHeaders.ACCESS_CONTROL_MAX_AGE, "1209600")
.entity(map).build();
}
@GET
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public /* List<Map<String, String>> */ Response getDatasets(@QueryParam("env") String env) {
List<Map<String, String>> list = new ArrayList<Map<String, String>>();
MongoCursor<Document> cursorDocMap = collection.find(eq("env", env)).iterator();
while (cursorDocMap.hasNext()) {
Map<String, String> map = new HashMap<String, String>();
Document doc = cursorDocMap.next();
map.put("id", doc.get("_id").toString());
if (doc.get("name" ) != null) map.put("name", doc.get("name" ).toString());
if (doc.get("env" ) != null) map.put("env", doc.get("env" ).toString());
if (doc.get("owner" ) != null) map.put("owner", doc.get("owner" ).toString());
if (doc.get("status" ) != null) map.put("status", doc.get("status" ).toString());
if (doc.get("comments" ) != null) map.put("comments", doc.get("comments" ).toString());
if (doc.get("modifiedDate") != null) map.put("modifiedDate", doc.get("modifiedDate").toString());
list.add(map);
logger.error("[01;34mDataset ID: " + doc.get("_id") + "[00;00m");
}
mongoClient.close();
return getResponse(Response.Status.OK, list);
}
@GET
@Path("/{id}")
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public /* List<Map<String, Object>> */ Response getDataset(@PathParam("id") String id) {
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
MongoCursor<Document> cursorDocMap = null;
if (ObjectId.isValid(id)) {
System.out.println("[01;31m" + "Valid hexadecimal representation of ObjectId " + id + "[00;00m");
cursorDocMap = collection.find(new Document("_id", new ObjectId(id))).iterator();
} else {
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Invalid hexadecimal representation of ObjectId " + id + "");
System.out.println("[01;31m" + "Invalid hexadecimal representation of ObjectId " + id + "[00;00m");
mongoClient.close();
return getResponse(Response.Status.BAD_REQUEST, msg);
}
if (!cursorDocMap.hasNext()) {
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Dataset with ID " + id + " does not exist!");
logger.error("[01;34m" + "Dataset with ID " + id + " does not exist!" + "[00;00m");
mongoClient.close();
return getResponse(Response.Status.NOT_FOUND, msg);
}
while (cursorDocMap.hasNext()) {
Map<String, Object> map = new HashMap<String, Object>();
Document doc = cursorDocMap.next();
logger.error("[01;34mDataset ID: " + doc.get("_id") + "[00;00m");
if (doc != null) {
map.put("id", id);
map.put("data", doc.get("data"));
map.put("name", doc.get("name"));
map.put("env", doc.get("env"));
map.put("owner", doc.get("owner"));
map.put("status", doc.get("status"));
map.put("comments", doc.get("comments"));
map.put("modifiedDate", doc.get("modifiedDate"));
list.add(map);
}
}
mongoClient.close();
return getResponse(Response.Status.OK, list.get(0));
}
@DELETE
@Path("/{id}")
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response deleteDataset(@PathParam("id") String id) {
MongoCursor<Document> cursorDocMap = collection.find(new Document("_id", new ObjectId(id))).iterator();
if (!cursorDocMap.hasNext()) {
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Dataset with ID " + id + " does not exist!");
logger.error("[01;34m" + "Dataset with ID " + id + " does not exist!" + "[00;00m");
mongoClient.close();
return getResponse(Response.Status.NOT_FOUND, msg);
}
while (cursorDocMap.hasNext()) {
Document doc = cursorDocMap.next();
logger.error("[01;34mDataset ID: " + doc.get("_id") + "[00;00m");
collection.deleteOne(doc);
}
mongoClient.close();
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Successfully deleted dataset with ID: " + id);
return getResponse(Response.Status.OK, msg);
}
@DELETE
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response deleteAllDatasets() {
collection.deleteMany(new Document());
mongoClient.close();
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Successfully deleted all datasets");
return getResponse(Response.Status.OK, msg);
}
@PUT
@Path("/{id}")
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response updateDataset(@PathParam("id") String id, Dataset dataset) {
Map<Integer, Map<String, Object>> original_values_map = new HashMap<Integer, Map<String, Object>>();
Map<Integer, Map<String, Object>> toupdate_values_map = new HashMap<Integer, Map<String, Object>>();
List<Object> list = null;
List<Bson> firstLevelSets = new ArrayList<Bson>();
int changes = 0;
// retrive the corresponding dataset with the given id
MongoCursor<Document> cursorDocMap = collection.find(new Document("_id", new ObjectId(id))).iterator();
while (cursorDocMap.hasNext()) {
Document doc = cursorDocMap.next();
list = castList(doc.get("data"), Object.class);
for (Object obj : list) {
Map<?, ?> mObj = (Map<?, ?>)obj;
Map<String, Object> tmp = new HashMap<String, Object>();
Iterator<?> it = mObj.keySet().iterator();
while (it.hasNext()) {
String key = (String)it.next();
tmp.put(key, mObj.get(key));
}
original_values_map.put((Integer)tmp.get("code"), tmp);
}
logger.error("[01;31mUpdate: " + "casting property 'data' seems to have passed!" + "[00;00m");
if (!dataset.getName ().equals(doc.get("name") )) {
firstLevelSets.add(set("name", dataset.getName()));
++changes;
}
if (!dataset.getEnv ().equals(doc.get("env") )) {
firstLevelSets.add(set("env", dataset.getEnv()));
++changes;
}
if (!dataset.getOwner ().equals(doc.get("owner") )) {
firstLevelSets.add(set("owner", dataset.getOwner()));
++changes;
}
if (!dataset.getStatus ().equals(doc.get("status") )) {
firstLevelSets.add(set("status", dataset.getStatus()));
++changes;
}
if (!dataset.getComments ().equals(doc.get("comments"))) {
firstLevelSets.add(set("comments", dataset.getComments()));
++changes;
}
}
List<Map<String, Object>> updates = dataset.getData();
for (Map<String, Object> map : updates) {
toupdate_values_map.put((Integer)map.get("code"), map);
logger.error("[01;34mDataset: " + toupdate_values_map.get(map.get("code")) + "[00;00m");
logger.error("[01;31mname: " + toupdate_values_map.get(map.get("code")).get("name") + "[00;00m");
}
Map<String, String> updateDatePair = new HashMap<String, String>();
updateDatePair.put("cfgCode", "cfgCodeUpdateDate");
updateDatePair.put("comments", "");
updateDatePair.put("containsAddedFat", "containsAddedFatUpdateDate");
updateDatePair.put("containsAddedSodium", "containsAddedSodiumUpdateDate");
updateDatePair.put("containsAddedSugar", "containsAddedSugarUpdateDate");
updateDatePair.put("containsAddedTransfat", "containsAddedTransfatUpdateDate");
updateDatePair.put("containsCaffeine", "containsCaffeineUpdateDate");
updateDatePair.put("containsFreeSugars", "containsFreeSugarsUpdateDate");
updateDatePair.put("containsSugarSubstitutes", "containsSugarSubstitutesUpdateDate");
updateDatePair.put("foodGuideServingG", "foodGuideUpdateDate");
updateDatePair.put("foodGuideServingMeasure", "foodGuideUpdateDate");
updateDatePair.put("replacementCode", "");
updateDatePair.put("rolledUp", "rolledUpUpdateDate");
updateDatePair.put("satfatAmountPer100g", "satfatImputationDate");
updateDatePair.put("satfatImputationReference", "satfatImputationDate");
updateDatePair.put("sodiumAmountPer100g", "sodiumImputationDate");
updateDatePair.put("sodiumImputationReference", "sodiumImputationDate");
updateDatePair.put("sugarAmountPer100g", "sugarImputationDate");
updateDatePair.put("sugarImputationReference", "sugarImputationDate");
updateDatePair.put("tier4ServingG", "tier4ServingUpdateDate");
updateDatePair.put("tier4ServingMeasure", "tier4ServingUpdateDate");
updateDatePair.put("totalfatAmountPer100g", "totalfatImputationDate");
updateDatePair.put("totalfatImputationReference", "totalfatImputationDate");
updateDatePair.put("transfatAmountPer100g", "transfatImputationDate");
updateDatePair.put("transfatImputationReference", "transfatImputationDate");
for (Map<String, Object> map : updates) {
List<Bson> sets = new ArrayList<Bson>();
logger.error("[01;31msize: " + sets.size() + "[00;00m");
if (toupdate_values_map.get(map.get("code")).get("name") != null && !toupdate_values_map .get (map .get ("code")) .get ("name") .equals (original_values_map .get (map .get ("code")) .get ("name"))) {
sets.add(set("data.$.name", map.get("name")));
++changes;
logger.error("[01;31mvalue changed: " + map.get("name") + "[00;00m");
}
if (toupdate_values_map .get (map .get ("code")) .get ("cnfGroupCode") != null && !toupdate_values_map .get (map .get ("code")) .get ("cnfGroupCode") .equals (original_values_map .get (map .get ("code")) .get ("cnfGroupCode"))) {
sets.add(set("data.$.cnfGroupCode", map.get("cnfGroupCode")));
++changes;
logger.error("[01;31mvalue changed: " + map.get("cnfGroupCode") + "[00;00m");
}
for (String key : updateDatePair.keySet()) {
changes = updateIfModified(key, updateDatePair.get(key), sets, changes, original_values_map, toupdate_values_map, map);
}
if (toupdate_values_map .get (map .get ("code")) .get ("cfgCodeUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("cfgCodeUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("cfgCodeUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("energyKcal") != null && !toupdate_values_map .get (map .get ("code")) .get ("energyKcal") .equals (original_values_map .get (map .get ("code")) .get ("energyKcal"))) {
sets.add(set("data.$.energyKcal", map.get("energyKcal")));
++changes;
logger.error("[01;31mvalue changed: " + map.get("energyKcal") + "[00;00m");
}
if (toupdate_values_map .get (map .get ("code")) .get ("sodiumImputationDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("sodiumImputationDate") .equals (original_values_map .get (map .get ("code")) .get ("sodiumImputationDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("sugarImputationDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("sugarImputationDate") .equals (original_values_map .get (map .get ("code")) .get ("sugarImputationDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("transfatImputationDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("transfatImputationDate") .equals (original_values_map .get (map .get ("code")) .get ("transfatImputationDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("satfatImputationDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("satfatImputationDate") .equals (original_values_map .get (map .get ("code")) .get ("satfatImputationDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("totalfatImputationDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("totalfatImputationDate") .equals (original_values_map .get (map .get ("code")) .get ("totalfatImputationDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsAddedSodiumUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsAddedSodiumUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsAddedSodiumUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsAddedSugarUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsAddedSugarUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsAddedSugarUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsFreeSugarsUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsFreeSugarsUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsFreeSugarsUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsAddedFatUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsAddedFatUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsAddedFatUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsAddedTransfatUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsAddedTransfatUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsAddedTransfatUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsCaffeineUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsCaffeineUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsCaffeineUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsSugarSubstitutesUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsSugarSubstitutesUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsSugarSubstitutesUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("referenceAmountG") != null && !toupdate_values_map .get (map .get ("code")) .get ("referenceAmountG") .equals (original_values_map .get (map .get ("code")) .get ("referenceAmountG"))) {
sets.add(set("data.$.referenceAmountG", map.get("referenceAmountG")));
sets.add(currentDate("data.$.referenceAmountUpdateDate"));
++changes;
logger.error("[01;31mvalue changed: " + map.get("referenceAmountG") + "[00;00m");
}
if (toupdate_values_map .get (map .get ("code")) .get ("referenceAmountMeasure") != null && !toupdate_values_map .get (map .get ("code")) .get ("referenceAmountMeasure") .equals (original_values_map .get (map .get ("code")) .get ("referenceAmountMeasure"))) {
sets.add(set("data.$.referenceAmountMeasure", map.get("referenceAmountMeasure")));
++changes;
logger.error("[01;31mvalue changed: " + map.get("referenceAmountMeasure") + "[00;00m");
}
if (toupdate_values_map .get (map .get ("code")) .get ("referenceAmountUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("referenceAmountUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("referenceAmountUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("foodGuideUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("foodGuideUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("foodGuideUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("tier4ServingUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("tier4ServingUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("tier4ServingUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("rolledUpUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("rolledUpUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("rolledUpUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("overrideSmallRaAdjustment") != null && !toupdate_values_map .get (map .get ("code")) .get ("overrideSmallRaAdjustment") .equals (original_values_map .get (map .get ("code")) .get ("overrideSmallRaAdjustment"))) {
sets.add(set("data.$.overrideSmallRaAdjustment", map.get("overrideSmallRaAdjustment")));
++changes;
logger.error("[01;31mvalue changed: " + map.get("overrideSmallRaAdjustment") + "[00;00m");
}
if (toupdate_values_map .get (map .get ("code")) .get ("adjustedReferenceAmount") != null && !toupdate_values_map .get (map .get ("code")) .get ("adjustedReferenceAmount") .equals (original_values_map .get (map .get ("code")) .get ("adjustedReferenceAmount"))) {
sets.add(set("data.$.adjustedReferenceAmount", map.get("adjustedReferenceAmount")));
++changes;
logger.error("[01;31mvalue changed: " + map.get("adjustedReferenceAmount") + "[00;00m");
}
if (toupdate_values_map .get (map .get ("code")) .get ("commitDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("commitDate") .equals (original_values_map .get (map .get ("code")) .get ("commitDate"))) {
}
logger.error("[01;31msize: " + sets.size() + "[00;00m");
logger.error("[01;35mcode: " + map.get("code") + "[00;00m");
if (sets.size() > 0) {
collection.updateOne(and(eq("_id", new ObjectId(id)), eq("data.code", map.get("code"))), combine(sets));
}
}
if (changes != 0) {
firstLevelSets.add(currentDate("modifiedDate"));
collection.updateOne(eq("_id", new ObjectId(id)), combine(firstLevelSets));
}
mongoClient.close();
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Successfully updated dataset");
return getResponse(Response.Status.OK, msg);
}
@POST
@Path("/{id}/classify")
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
@SuppressWarnings("unchecked")
public Response classifyDataset(@PathParam("id") String id) {
Map<String, Object> map = null;
MongoCursor<Document> cursorDocMap = collection.find(new Document("_id", new ObjectId(id))).iterator();
List<Object> list = null;
List<Document> dox = new ArrayList<Document>();
if (!cursorDocMap.hasNext()) {
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Dataset with ID " + id + " does not exist!");
logger.error("[01;34m" + "Dataset with ID " + id + " does not exist!" + "[00;00m");
mongoClient.close();
return getResponse(Response.Status.NOT_FOUND, msg);
}
Map<String, String> updateDatePair = new HashMap<String, String>();
updateDatePair.put("cfgCode", "cfgCodeUpdateDate");
updateDatePair.put("comments", "");
updateDatePair.put("containsAddedFat", "containsAddedFatUpdateDate");
updateDatePair.put("containsAddedSodium", "containsAddedSodiumUpdateDate");
updateDatePair.put("containsAddedSugar", "containsAddedSugarUpdateDate");
updateDatePair.put("containsAddedTransfat", "containsAddedTransfatUpdateDate");
updateDatePair.put("containsCaffeine", "containsCaffeineUpdateDate");
updateDatePair.put("containsFreeSugars", "containsFreeSugarsUpdateDate");
updateDatePair.put("containsSugarSubstitutes", "containsSugarSubstitutesUpdateDate");
updateDatePair.put("foodGuideServingG", "foodGuideUpdateDate");
updateDatePair.put("foodGuideServingMeasure", "foodGuideUpdateDate");
updateDatePair.put("replacementCode", "");
updateDatePair.put("rolledUp", "rolledUpUpdateDate");
updateDatePair.put("satfatAmountPer100g", "satfatImputationDate");
updateDatePair.put("satfatImputationReference", "satfatImputationDate");
updateDatePair.put("sodiumAmountPer100g", "sodiumImputationDate");
updateDatePair.put("sodiumImputationReference", "sodiumImputationDate");
updateDatePair.put("sugarAmountPer100g", "sugarImputationDate");
updateDatePair.put("sugarImputationReference", "sugarImputationDate");
updateDatePair.put("tier4ServingG", "tier4ServingUpdateDate");
updateDatePair.put("tier4ServingMeasure", "tier4ServingUpdateDate");
updateDatePair.put("totalfatAmountPer100g", "totalfatImputationDate");
updateDatePair.put("totalfatImputationReference", "totalfatImputationDate");
updateDatePair.put("transfatAmountPer100g", "transfatImputationDate");
updateDatePair.put("transfatImputationReference", "transfatImputationDate");
while (cursorDocMap.hasNext()) {
map = new HashMap<String, Object>();
Document doc = cursorDocMap.next();
logger.error("[01;34mDataset ID: " + doc.get("_id") + "[00;00m");
if (doc != null) {
list = castList(doc.get("data"), Object.class);
for (Object obj : list) {
for (String key : updateDatePair.keySet()) {
Document value = (Document)((Document)obj).get(key + "");
((Document)obj).put(key, value.get("value"));
}
dox.add((Document)obj);
}
map.put("data", dox);
map.put("name", doc.get("name"));
map.put("env", doc.get("env"));
map.put("owner", doc.get("owner"));
map.put("status", doc.get("status"));
map.put("comments", doc.get("comments"));
map.put("modifiedDate", doc.get("modifiedDate"));
}
}
mongoClient.close();
Response response = ClientBuilder
.newClient()
.target(RequestURI.getUri() + ClassificationProperties.getEndPoint())
.path("/classify")
.request()
.post(Entity.entity(map, MediaType.APPLICATION_JSON));
logger.error("[01;31m" + "response status: " + response.getStatusInfo() + "[00;00m");
Map<String, Object> deserialized = (Map<String, Object>)response.readEntity(Object.class);
List<Object> dataArray = (List<Object>)(deserialized).get("data");
for (Object obj : dataArray) {
for (String key : updateDatePair.keySet()) {
Object value = ((Map<String, Object>)obj).get(key + "");
Map<String, Object> metadataObject = new HashMap<String, Object>();
metadataObject.put("value", value);
metadataObject.put("modified", false);
((Map<String, Object>)obj).put(key, metadataObject);
}
}
logger.error("[01;31m" + "response status: " + ((Map<String, Object>)dataArray.get(0)).get("sodiumAmountPer100g") + "[00;00m");
return getResponse(Response.Status.OK, deserialized);
}
@POST
@Path("/{id}/flags")
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response flagsDataset(@PathParam("id") String id, Dataset dataset) {
Response response = ClientBuilder
.newClient()
.target(RequestURI.getUri() + ClassificationProperties.getEndPoint())
.path("/flags")
.request()
.post(Entity.entity(dataset, MediaType.APPLICATION_JSON));
return response;
}
@POST
@Path("/{id}/init")
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response initDataset(@PathParam("id") String id, Dataset dataset) {
Response response = ClientBuilder
.newClient()
.target(RequestURI.getUri() + ClassificationProperties.getEndPoint())
.path("/init")
.request()
.post(Entity.entity(dataset, MediaType.APPLICATION_JSON));
return response;
}
@POST
@Path("/{id}/adjustment")
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response adjustmentDataset(@PathParam("id") String id, Dataset dataset) {
Response response = ClientBuilder
.newClient()
.target(RequestURI.getUri() + ClassificationProperties.getEndPoint())
.path("/adjustment")
.request()
.post(Entity.entity(dataset, MediaType.APPLICATION_JSON));
return response;
}
@POST
@Path("/{id}/commit")
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public void commitDataset() {
}
@GET
@Path("/status")
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response getStatusCodes() {
Map<Integer, String> list = new HashMap<Integer, String>();
String sql = ContentHandler.read("connectivity_test.sql", getClass());
try {
PreparedStatement stmt = conn.prepareStatement(sql); // Create PreparedStatement
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
list.put(rs.getInt("canada_food_group_id"), rs.getString("canada_food_group_desc_e"));
}
conn.close();
} catch(SQLException e) {
// TODO: proper response to handle exceptions
logger.error("[01;03;31m" + e.getMessage() + "[00;00;00m");
for (Response.Status status : Response.Status.values()) {
list.put(new Integer(status.getStatusCode()), status.getReasonPhrase());
}
}
Map<Integer, String> map = new HashMap<Integer, String>();
for (Response.Status obj : Response.Status.values()) {
map.put(obj.getStatusCode(), obj.name());
}
int len = 0;
for (Integer key : map.keySet()) {
String value = map.get(key);
if (value.length() > len) {
len = value.length();
}
}
String format = new StringBuffer()
.append("%d %-")
.append(len)
.append("s")
.toString();
String tamrof = new StringBuffer()
.append("%-")
.append(len + 4)
.append("s")
.toString();
Integer[][] arr = new Integer[6][18];
Integer series = 0;
int i = 0;
for (Integer key : map.keySet()) {
if (key / 100 != series) {
series = key / 100;
i = 0;
}
arr[series][i++] = key;
}
for (int j = 0; j < 18; ++j) {
arr[0][j] = null;
arr[1][j] = null;
}
for (int l = 0; l < 18; ++l) {
for (int m = 2; m < 6; ++m) {
Integer key = arr[m][l];
if (key != null) {
System.out.printf("[01;%dm" + format + "[00;00m", l % 2 == 0 ? 36 : 35, key, Response.Status.fromStatusCode(key).name());
} else {
System.out.printf(tamrof, "");
}
}
System.out.println();
}
System.out.println();
System.out.println();
for (int l = 0; l < 18; ++l) {
for (int m = 2; m < 6; ++m) {
Integer key = arr[m][l];
if (key != null) {
System.out.printf("[01;%dm" + format + "[00;00m", l % 2 == 0 ? 34 : 31, key, Response.Status.fromStatusCode(key));
} else {
System.out.printf(tamrof, "");
}
}
System.out.println();
}
mongoClient.close();
// return getResponse(Response.Status.OK, Response.Status.values());
return getResponse(Response.Status.OK, list);
}
private /* List<CanadaFoodGuideDataset> */ Response doSearchCriteria(CfgFilter search) {
List<CanadaFoodGuideDataset> list = new ArrayList<CanadaFoodGuideDataset>();
if (search != null) {
StringBuffer sb = new StringBuffer(search.getSql());
logger.error("[01;30m" + search.getDataSource() + "[00;00m");
sb.append(" WHERE length('this where-clause is an artifact') = 32").append("\n");
if (search.getDataSource() != null && search.getDataSource().matches("food|recipe")) {
sb.append(" AND type = ?").append("\n");
}
logger.error("[01;30m" + search.getFoodRecipeName() + "[00;00m");
if (search.getFoodRecipeName() != null && !search.getFoodRecipeName().isEmpty()) {
sb.append(" AND LOWER(name) LIKE ?").append("\n");
}
logger.error("[01;30m" + search.getFoodRecipeCode() + "[00;00m");
if (search.getFoodRecipeCode() != null && !search.getFoodRecipeCode().isEmpty()) {
sb.append(" AND code = ? OR CAST(code AS text) LIKE ?").append("\n");
}
logger.error("[01;30m" + search.getCnfCode() + "[00;00m");
if (search.getCnfCode() != null && !search.getCnfCode().isEmpty()) {
sb.append(" AND cnf_group_code = ?").append("\n");
}
logger.error("[01;30m" + search.getSubgroupCode() + "[00;00m");
if (search.getSubgroupCode() != null && !search.getSubgroupCode().isEmpty()) {
sb.append(" AND CAST(cfg_code AS text) LIKE ?").append("\n");
}
if (search.getCfgTier() != null && !search.getCfgTier().equals(CfgTier.ALL.getCode())) {
switch (search.getCfgTier()) {
case 1:
case 2:
case 3:
case 4:
logger.error("[01;31mCalling all codes with Tier " + search.getCfgTier() + "[00;00m");
sb.append(" AND LENGTH(CAST(cfg_code AS text)) = 4").append("\n");
sb.append(" AND CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
break;
case 12:
case 13:
case 14:
case 23:
case 24:
case 34:
logger.error("[01;31mCalling all codes with Tier " + search.getCfgTier() + "[00;00m");
sb.append(" AND LENGTH(CAST(cfg_code AS text)) = 4").append("\n");
sb.append(" AND CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
sb.append(" OR CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
break;
case 123:
case 124:
case 134:
case 234:
logger.error("[01;31mCalling all codes with Tier " + search.getCfgTier() + "[00;00m");
sb.append(" AND LENGTH(CAST(cfg_code AS text)) = 4").append("\n");
sb.append(" AND CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
sb.append(" OR CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
sb.append(" OR CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
break;
case 1234:
logger.error("[01;31mCalling all codes with Tier " + search.getCfgTier() + "[00;00m");
sb.append(" AND LENGTH(CAST(cfg_code AS text)) = 4").append("\n");
sb.append(" AND CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
sb.append(" OR CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
sb.append(" OR CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
sb.append(" OR CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
break;
case 9:
logger.error("[01;31mCalling all codes with missing Tier![00;00m");
sb.append(" AND LENGTH(CAST(cfg_code AS text)) < 4").append("\n");
break;
}
}
if (search.getRecipe() != null && !search.getRecipe().equals(RecipeRolled.IGNORE.getCode())) {
logger.error("[01;32m" + search.getRecipe() + "[00;00m");
switch (search.getRecipe()) {
case 1:
case 2:
sb.append(" AND rolled_up = ?").append("\n");
break;
case 3:
sb.append(" AND (rolled_up = 1 OR rolled_up = 2)").append("\n");
break;
}
}
boolean notIgnore = false;
if (search.getContainsAdded() != null) {
String[] arr = new String[search.getContainsAdded().size()];
arr = search.getContainsAdded().toArray(arr);
for (String i : arr) {
logger.error("[01;32m" + i + "[00;00m");
if (!i.equals("0")) {
notIgnore = true;
}
}
}
Map<String, String> map = null;
if (search.getContainsAdded() != null && notIgnore) {
map = new HashMap<String, String>();
logger.error("[01;32m" + search.getContainsAdded() + "[00;00m");
String[] arr = new String[search.getContainsAdded().size()];
arr = search.getContainsAdded().toArray(arr);
for (String keyValue : arr) {
StringTokenizer tokenizer = new StringTokenizer(keyValue, "=");
map.put(tokenizer.nextToken(), tokenizer.nextToken());
logger.error("[01;32m" + keyValue + "[00;00m");
}
logger.error("\n[01;32m" + new GsonBuilder().setDateFormat("yyyy-MM-dd").setPrettyPrinting().create().toJson(map) + "[00;00m");
Set<String> keys = map.keySet();
for (String key : keys) {
switch (ContainsAdded.valueOf(key)) {
case sodium:
sb.append(" AND contains_added_sodium = ? ").append("\n");
break;
case sugar:
sb.append(" AND contains_added_sugar = ? ").append("\n");
break;
case fat:
sb.append(" AND contains_added_fat = ? ").append("\n");
break;
case transfat:
sb.append(" AND contains_added_transfat = ? ").append("\n");
break;
case caffeine:
sb.append(" AND contains_caffeine = ? ").append("\n");
break;
case freeSugars:
sb.append(" AND contains_free_sugars = ? ").append("\n");
break;
case sugarSubstitute:
sb.append(" AND contains_sugar_substitutes = ? ").append("\n");
break;
}
}
}
if (search.getMissingValues() != null) {
logger.error("[01;32m" + search.getMissingValues() + "[00;00m");
logger.error("\n[01;32m" + new GsonBuilder().setDateFormat("yyyy-MM-dd").setPrettyPrinting().create().toJson(search.getMissingValues()) + "[00;00m");
for (String name : search.getMissingValues()) {
switch (Missing.valueOf(name)) {
case refAmount:
sb.append(" AND reference_amount_g IS NULL").append("\n");
break;
case cfgServing:
sb.append(" AND food_guide_serving_g IS NULL").append("\n");
break;
case tier4Serving:
sb.append(" AND tier_4_serving_g IS NULL").append("\n");
break;
case energy:
sb.append(" AND energy_kcal IS NULL").append("\n");
break;
case cnfCode:
sb.append(" AND cnf_group_code IS NULL").append("\n");
break;
case rollUp:
sb.append(" AND rolled_up IS NULL").append("\n");
break;
case sodiumPer100g:
sb.append(" AND sodium_amount_per_100g IS NULL").append("\n");
break;
case sugarPer100g:
sb.append(" AND sugar_amount_per_100g IS NULL").append("\n");
break;
case fatPer100g:
sb.append(" AND totalfat_amount_per_100g IS NULL").append("\n");
break;
case transfatPer100g:
sb.append(" AND transfat_amount_per_100g IS NULL").append("\n");
break;
case satFatPer100g:
sb.append(" AND satfat_amount_per_100g IS NULL").append("\n");
break;
case addedSodium:
sb.append(" AND contains_added_sodium IS NULL").append("\n");
break;
case addedSugar:
sb.append(" AND contains_added_sugar IS NULL").append("\n");
break;
case addedFat:
sb.append(" AND contains_added_fat IS NULL").append("\n");
break;
case addedTransfat:
sb.append(" AND contains_added_transfat IS NULL").append("\n");
break;
case caffeine:
sb.append(" AND contains_caffeine IS NULL").append("\n");
break;
case freeSugars:
sb.append(" AND contains_free_sugars IS NULL").append("\n");
break;
case sugarSubstitute:
sb.append(" AND contains_sugar_substitutes IS NULL").append("\n");
break;
}
}
}
if (search.getLastUpdateDateFrom() != null && search.getLastUpdateDateFrom().matches("\\d{4}-\\d{2}-\\d{2}") && search.getLastUpdateDateTo() != null && search.getLastUpdateDateTo().matches("\\d{4}-\\d{2}-\\d{2}")) {
if (search.getLastUpdatedFilter() != null) {
logger.error("[01;32m" + search.getLastUpdatedFilter() + "[00;00m");
for (String name : search.getLastUpdatedFilter()) {
switch (Missing.valueOf(name)) {
case refAmount:
sb.append(" AND reference_amount_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case cfgServing:
sb.append(" AND food_guide_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case tier4Serving:
sb.append(" AND tier_4_serving_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case energy:
case cnfCode:
break;
case rollUp:
sb.append(" AND rolled_up_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case sodiumPer100g:
sb.append(" AND sodium_imputation_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case sugarPer100g:
sb.append(" AND sugar_imputation_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case fatPer100g:
sb.append(" AND totalfat_imputation_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case transfatPer100g:
sb.append(" AND transfat_imputation_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case satFatPer100g:
sb.append(" AND satfat_imputation_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case addedSodium:
sb.append(" AND contains_added_sodium_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case addedSugar:
sb.append(" AND contains_added_sugar_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case addedFat:
sb.append(" AND contains_added_fat_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case addedTransfat:
sb.append(" AND contains_added_transfat_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case caffeine:
sb.append(" AND contains_caffeine_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case freeSugars:
sb.append(" AND contains_free_sugars_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case sugarSubstitute:
sb.append(" AND contains_sugar_substitutes_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
}
}
}
}
if (search.getComments() != null && !search.getComments().isEmpty()) {
sb.append(" AND LOWER(comments) LIKE ?").append("\n");
}
if (search.getCommitDateFrom() != null && search.getCommitDateFrom().matches("\\d{4}-\\d{2}-\\d{2}") && search.getCommitDateTo() != null && search.getCommitDateTo().matches("\\d{4}-\\d{2}-\\d{2}")) {
sb.append(" AND commit_date BETWEEN CAST(? AS date) AND CAST(? AS date)") .append("\n");
}
search.setSql(sb.toString());
try {
meta = conn.getMetaData(); // Create Oracle DatabaseMetaData object
logger.error("[01;34mJDBC driver version is " + meta.getDriverVersion() + "[00;00m"); // Retrieve driver information
PreparedStatement stmt = conn.prepareStatement(search.getSql()); // Create PreparedStatement
int i = 0; // keeps count of the number of placeholders
if (search != null) {
if (search.getDataSource() != null && search.getDataSource().matches("food|recipe")) {
stmt.setInt(++i, search.getDataSource().equals("food") ? 1 : 2);
}
if (search.getFoodRecipeName() != null && !search.getFoodRecipeName().isEmpty()) {
stmt.setString(++i, new String("%" + search.getFoodRecipeName() + "%").toLowerCase());
}
if (search.getFoodRecipeCode() != null && !search.getFoodRecipeCode().isEmpty()) {
stmt.setInt(++i, Integer.parseInt(search.getFoodRecipeCode()));
stmt.setString(++i, new String("" + search.getFoodRecipeCode() + "%"));
}
if (search.getCnfCode() != null && !search.getCnfCode().isEmpty()) {
stmt.setInt(++i, Integer.parseInt(search.getCnfCode()));
}
if (search.getSubgroupCode() != null && !search.getSubgroupCode().isEmpty()) {
stmt.setString(++i, new String("" + search.getSubgroupCode() + "%"));
}
if (search.getCfgTier() != null && !search.getCfgTier().equals(CfgTier.ALL.getCode())) {
switch (search.getCfgTier()) {
case 1:
case 2:
case 3:
case 4:
stmt.setInt(++i, search.getCfgTier());
break;
}
}
if (search.getRecipe() != null && !search.getRecipe().equals(RecipeRolled.IGNORE.getCode())) {
switch (search.getRecipe()) {
case 1:
case 2:
stmt.setInt(++i, search.getRecipe());
break;
}
}
if (search.getContainsAdded() != null && notIgnore) {
Set<String> keys = map.keySet();
for (String key : keys) {
switch (ContainsAdded.valueOf(key)) {
case sodium:
case sugar:
case fat:
case transfat:
case caffeine:
case freeSugars:
case sugarSubstitute:
stmt.setInt(++i, map.get(key).equals("true") ? 1 : 2);
break;
}
}
}
if (search.getLastUpdateDateFrom() != null && search.getLastUpdateDateFrom().matches("\\d{4}-\\d{2}-\\d{2}") && search.getLastUpdateDateTo() != null && search.getLastUpdateDateTo().matches("\\d{4}-\\d{2}-\\d{2}")) {
if (search.getLastUpdatedFilter() != null) {
logger.error("[01;32m" + search.getLastUpdatedFilter() + "[00;00m");
for (String name : search.getLastUpdatedFilter()) {
switch (Missing.valueOf(name)) {
case refAmount:
case cfgServing:
case tier4Serving:
stmt.setString(++i, search.getLastUpdateDateFrom());
stmt.setString(++i, search.getLastUpdateDateTo());
break;
case energy:
case cnfCode:
break;
case rollUp:
case sodiumPer100g:
case sugarPer100g:
case fatPer100g:
case transfatPer100g:
case satFatPer100g:
case addedSodium:
case addedSugar:
case addedFat:
case addedTransfat:
case caffeine:
case freeSugars:
case sugarSubstitute:
stmt.setString(++i, search.getLastUpdateDateFrom());
stmt.setString(++i, search.getLastUpdateDateTo());
break;
}
}
}
}
if (search.getComments() != null && !search.getComments().isEmpty()) {
stmt.setString(++i, new String("%" + search.getComments() + "%").toLowerCase());
}
if (search.getCommitDateFrom() != null && search.getCommitDateFrom().matches("\\d{4}-\\d{2}-\\d{2}") && search.getCommitDateTo() != null && search.getCommitDateTo().matches("\\d{4}-\\d{2}-\\d{2}")) {
stmt.setString(++i, search.getCommitDateFrom());
stmt.setString(++i, search.getCommitDateTo());
}
}
logger.error("[01;34mSQL query to follow:\n" + stmt.toString() + "[00;00m");
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
CanadaFoodGuideDataset foodItem = new CanadaFoodGuideDataset();
foodItem.setType(rs.getInt ("type") == 1 ? "food" : "recipe" );
foodItem.setCode(rs.getInt ("code") );
foodItem.setName(rs.getString ("name") );
if (rs.getString("cnf_group_code") != null) {
foodItem.setCnfGroupCode(rs.getInt ("cnf_group_code") );
}
if (rs.getString("cfg_code") != null) {
foodItem.setCfgCode(new PseudoInteger(rs.getInt ("cfg_code")) ); // editable
} else {
foodItem.setCfgCode(new PseudoInteger() );
}
foodItem.setCommitDate(rs.getDate ("cfg_code_update_date") );
if (rs.getString("energy_kcal") != null) {
foodItem.setEnergyKcal(rs.getDouble ("energy_kcal") );
}
if (rs.getString("sodium_amount_per_100g") != null) {
foodItem.setSodiumAmountPer100g(new PseudoDouble(rs.getDouble ("sodium_amount_per_100g")) ); // editable
} else {
foodItem.setSodiumAmountPer100g(new PseudoDouble() );
}
foodItem.setSodiumImputationReference(new PseudoString(rs.getString ("sodium_imputation_reference")) );
foodItem.setSodiumImputationDate(rs.getDate ("sodium_imputation_date") );
if (rs.getString("sugar_amount_per_100g") != null) {
foodItem.setSugarAmountPer100g(new PseudoDouble(rs.getDouble ("sugar_amount_per_100g")) ); // editable
} else {
foodItem.setSugarAmountPer100g(new PseudoDouble() );
}
foodItem.setSugarImputationReference(new PseudoString(rs.getString ("sugar_imputation_reference")) );
foodItem.setSugarImputationDate(rs.getDate ("sugar_imputation_date") );
if (rs.getString("transfat_amount_per_100g") != null) {
foodItem.setTransfatAmountPer100g(new PseudoDouble(rs.getDouble ("transfat_amount_per_100g")) ); // editable
} else {
foodItem.setTransfatAmountPer100g(new PseudoDouble() );
}
foodItem.setTransfatImputationReference(new PseudoString(rs.getString ("transfat_imputation_reference")) );
foodItem.setTransfatImputationDate(rs.getDate ("transfat_imputation_date") );
if (rs.getString("satfat_amount_per_100g") != null) {
foodItem.setSatfatAmountPer100g(new PseudoDouble(rs.getDouble ("satfat_amount_per_100g")) ); // editable
} else {
foodItem.setSatfatAmountPer100g(new PseudoDouble() );
}
foodItem.setSatfatImputationReference(new PseudoString(rs.getString ("satfat_imputation_reference")) );
foodItem.setSatfatImputationDate(rs.getDate ("satfat_imputation_date") );
if (rs.getString("totalfat_amount_per_100g") != null) {
foodItem.setTotalfatAmountPer100g(new PseudoDouble(rs.getDouble ("totalfat_amount_per_100g")) ); // editable
} else {
foodItem.setTotalfatAmountPer100g(new PseudoDouble() );
}
foodItem.setTotalfatImputationReference(new PseudoString(rs.getString ("totalfat_imputation_reference")) );
foodItem.setTotalfatImputationDate(rs.getDate ("totalfat_imputation_date") );
if (rs.getString("contains_added_sodium") != null) {
foodItem.setContainsAddedSodium(new PseudoBoolean(rs.getBoolean ("contains_added_sodium")) ); // editable
} else {
foodItem.setContainsAddedSodium(new PseudoBoolean() );
}
foodItem.setContainsAddedSodiumUpdateDate(rs.getDate ("contains_added_sodium_update_date") );
if (rs.getString("contains_added_sugar") != null) {
foodItem.setContainsAddedSugar(new PseudoBoolean(rs.getBoolean ("contains_added_sugar")) ); // editable
} else {
foodItem.setContainsAddedSugar(new PseudoBoolean() );
}
foodItem.setContainsAddedSugarUpdateDate(rs.getDate ("contains_added_sugar_update_date") );
if (rs.getString("contains_free_sugars") != null) {
foodItem.setContainsFreeSugars(new PseudoBoolean(rs.getBoolean ("contains_free_sugars")) ); // editable
} else {
foodItem.setContainsFreeSugars(new PseudoBoolean() );
}
foodItem.setContainsFreeSugarsUpdateDate(rs.getDate ("contains_free_sugars_update_date") );
if (rs.getString("contains_added_fat") != null) {
foodItem.setContainsAddedFat(new PseudoBoolean(rs.getBoolean ("contains_added_fat")) ); // editable
} else {
foodItem.setContainsAddedFat(new PseudoBoolean() );
}
foodItem.setContainsAddedFatUpdateDate(rs.getDate ("contains_added_fat_update_date") );
if (rs.getString("contains_added_transfat") != null) {
foodItem.setContainsAddedTransfat(new PseudoBoolean(rs.getBoolean ("contains_added_transfat")) ); // editable
} else {
foodItem.setContainsAddedTransfat(new PseudoBoolean() );
}
foodItem.setContainsAddedTransfatUpdateDate(rs.getDate ("contains_added_transfat_update_date") );
if (rs.getString("contains_caffeine") != null) {
foodItem.setContainsCaffeine(new PseudoBoolean(rs.getBoolean ("contains_caffeine")) ); // editable
} else {
foodItem.setContainsCaffeine(new PseudoBoolean() );
}
foodItem.setContainsCaffeineUpdateDate(rs.getDate ("contains_caffeine_update_date") );
if (rs.getString("contains_sugar_substitutes") != null) {
foodItem.setContainsSugarSubstitutes(new PseudoBoolean(rs.getBoolean ("contains_sugar_substitutes")) ); // editable
} else {
foodItem.setContainsSugarSubstitutes(new PseudoBoolean() );
}
foodItem.setContainsSugarSubstitutesUpdateDate(rs.getDate ("contains_sugar_substitutes_update_date") );
if (rs.getString("reference_amount_g") != null) {
foodItem.setReferenceAmountG(rs.getDouble ("reference_amount_g") ); // editable
}
foodItem.setReferenceAmountMeasure(rs.getString ("reference_amount_measure") );
foodItem.setReferenceAmountUpdateDate(rs.getDate ("reference_amount_update_date") );
if (rs.getString("food_guide_serving_g") != null) {
foodItem.setFoodGuideServingG(new PseudoDouble(rs.getDouble ("food_guide_serving_g")) ); // editable
} else {
foodItem.setFoodGuideServingG(new PseudoDouble() );
}
foodItem.setFoodGuideServingMeasure(new PseudoString(rs.getString ("food_guide_serving_measure")) );
foodItem.setFoodGuideUpdateDate(rs.getDate ("food_guide_update_date") );
if (rs.getString("tier_4_serving_g") != null) {
foodItem.setTier4ServingG(new PseudoDouble(rs.getDouble ("tier_4_serving_g")) ); // editable
} else {
foodItem.setTier4ServingG(new PseudoDouble() );
}
foodItem.setTier4ServingMeasure(new PseudoString(rs.getString ("tier_4_serving_measure")) );
foodItem.setTier4ServingUpdateDate(rs.getDate ("tier_4_serving_update_date") );
if (rs.getString("rolled_up") != null) {
foodItem.setRolledUp(new PseudoBoolean(rs.getBoolean ("rolled_up")) ); // editable
} else {
foodItem.setRolledUp(new PseudoBoolean() );
}
foodItem.setRolledUpUpdateDate(rs.getDate ("rolled_up_update_date") );
if (rs.getString("apply_small_ra_adjustment") != null) {
foodItem.setOverrideSmallRaAdjustment(rs.getBoolean ("apply_small_ra_adjustment") );
}
if (rs.getString("replacement_code") != null) {
foodItem.setReplacementCode(new PseudoInteger(rs.getInt ("replacement_code")) ); // editable
} else {
foodItem.setReplacementCode(new PseudoInteger() );
}
foodItem.setCommitDate(rs.getDate ("commit_date") );
foodItem.setComments(new PseudoString(rs.getString ("comments")) ); // editable
list.add(foodItem);
}
conn.close();
} catch(SQLException e) {
// TODO: proper response to handle exceptions
e.printStackTrace();
}
}
return getResponse(Response.Status.OK, list);
}
private Response getResponse(Response.Status status, Object obj) {
return Response.status(status)
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "*")
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, "origin, content-type, accept, authorization")
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true")
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, "GET, POST, PUT, DELETE, OPTIONS, HEAD")
.header(HttpHeaders.ACCESS_CONTROL_MAX_AGE, "1209600")
.entity(obj).build();
}
private static <T> List<T> castList(Object obj, Class<T> clazz) {
List<T> result = new ArrayList<T>();
if (obj instanceof List<?>) {
for (Object o : (List<?>)obj) {
result.add(clazz.cast(o));
}
return result;
}
return null;
}
@SuppressWarnings("unchecked")
private int updateIfModified(String key, String value, List<Bson> sets, int changes, Map<Integer, Map<String, Object>> original_values_map, Map<Integer, Map<String, Object>> toupdate_values_map, Map<String, Object> map) {
logger.error("[01;34m" + key + ": " + ((Map<String, Object>)toupdate_values_map.get(map.get("code")).get(key)).get("value") + "[00;00m");
if (((Map<String, Object>)toupdate_values_map.get(map.get("code")).get(key)).get("value") != null && !((Map<String, Object>)toupdate_values_map.get(map.get("code")).get(key)).get("value").equals(((Map<String, Object>)original_values_map.get(map.get("code")).get(key)).get("value"))) {
sets.add(set("data.$." + key + ".value", ((Map<String, Object>)map.get(key)).get("value")));
if (!value.isEmpty()) {
sets.add(currentDate("data.$." + value));
}
++changes;
logger.error("[01;31mvalue changed: " + ((Map<String, Object>)map.get(key)).get("value") + "[00;00m");
}
return changes;
}
}
|
src/main/java/ca/gc/ip346/classification/resource/FoodsResource.java
|
package ca.gc.ip346.classification.resource;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Updates.combine;
import static com.mongodb.client.model.Updates.currentDate;
import static com.mongodb.client.model.Updates.set;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import javax.ws.rs.BeanParam;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.bson.types.ObjectId;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.jaxrs.annotation.JacksonFeatures;
import com.google.common.net.HttpHeaders;
import com.google.gson.GsonBuilder;
import com.mongodb.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
// import ca.gc.ip346.classification.model.Added;
import ca.gc.ip346.classification.model.CanadaFoodGuideDataset;
import ca.gc.ip346.classification.model.CfgFilter;
import ca.gc.ip346.classification.model.CfgTier;
import ca.gc.ip346.classification.model.ContainsAdded;
import ca.gc.ip346.classification.model.Dataset;
import ca.gc.ip346.classification.model.Missing;
import ca.gc.ip346.classification.model.PseudoBoolean;
import ca.gc.ip346.classification.model.PseudoDouble;
import ca.gc.ip346.classification.model.PseudoInteger;
import ca.gc.ip346.classification.model.PseudoString;
import ca.gc.ip346.classification.model.RecipeRolled;
import ca.gc.ip346.util.ClassificationProperties;
import ca.gc.ip346.util.DBConnection;
import ca.gc.ip346.util.MongoClientFactory;
import ca.gc.ip346.util.RequestURI;
@Path("/datasets")
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
public class FoodsResource {
private static final Logger logger = LogManager.getLogger(FoodsResource.class);
private Connection conn = null;
private DatabaseMetaData meta = null;
private MongoClient mongoClient = null;
private MongoCollection<Document> collection = null;
public FoodsResource() {
mongoClient = MongoClientFactory.getMongoClient();
collection = mongoClient.getDatabase(MongoClientFactory.getDatabase()).getCollection(MongoClientFactory.getCollection());
try {
conn = DBConnection.getConnections();
} catch(Exception e) {
// TODO: proper response to handle exceptions
e.printStackTrace();
}
}
@GET
@Path("/search")
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public /* List<CanadaFoodGuideDataset> */ Response getFoodList(@BeanParam CfgFilter search) {
String sql = ContentHandler.read("canada_food_guide_dataset.sql", getClass());
search.setSql(sql);
mongoClient.close();
return doSearchCriteria(search);
}
@POST
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
public /* Map<String, Object> */ Response saveDataset(Dataset dataset) {
ResponseBuilder response = null;
Map<String, Object> map = new HashMap<String, Object>();
if (dataset.getData() != null && dataset.getName() != null && dataset.getComments() != null) {
Document doc = new Document()
.append("data", dataset.getData())
.append("name", dataset.getName())
.append("env", dataset.getEnv())
.append("owner", dataset.getOwner())
.append("status", dataset.getStatus())
.append("comments", dataset.getComments());
collection.insertOne(doc);
ObjectId id = (ObjectId)doc.get("_id");
collection.updateOne(
eq("_id", id),
combine(
set("name", dataset.getName()),
set("comments", dataset.getComments()),
currentDate("modifiedDate"))
);
logger.error("[01;34mLast inserted Dataset id: " + id + "[00;00m");
logger.error("[01;34mCurrent number of Datasets: " + collection.count() + "[00;00m");
map.put("id", id.toString());
logger.error("[01;34m" + Response.Status.CREATED.getStatusCode() + " " + Response.Status.CREATED.toString() + "[00;00m");
response = Response.status(Response.Status.CREATED);
} else {
List<String> list = new ArrayList<String>();
if (dataset.getData() == null) list.add("data");
if (dataset.getName() == null) list.add("name");
if (dataset.getEnv() == null) list.add("env");
if (dataset.getComments() == null) list.add("comments");
map.put("code", Response.Status.BAD_REQUEST.getStatusCode());
map.put("description", Response.Status.BAD_REQUEST.toString() + " - Unable to insert Dataset!");
map.put("fields", StringUtils.join(list, ", "));
logger.error("[01;34m" + Response.Status.BAD_REQUEST.toString() + " - Unable to insert Dataset!" + "[00;00m");
logger.error("[01;34m" + Response.Status.BAD_REQUEST.getStatusCode() + " " + Response.Status.BAD_REQUEST.toString() + "[00;00m");
response = Response.status(Response.Status.BAD_REQUEST);
}
mongoClient.close();
logger.error("[01;31m" + "response status: " + response.build().getStatusInfo() + "[00;00m");
return response
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "*")
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, "origin, content-type, accept, authorization")
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true")
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, "GET, POST, PUT, DELETE, OPTIONS, HEAD")
.header(HttpHeaders.ACCESS_CONTROL_MAX_AGE, "1209600")
.entity(map).build();
}
@GET
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public /* List<Map<String, String>> */ Response getDatasets(@QueryParam("env") String env) {
List<Map<String, String>> list = new ArrayList<Map<String, String>>();
MongoCursor<Document> cursorDocMap = collection.find(eq("env", env)).iterator();
while (cursorDocMap.hasNext()) {
Map<String, String> map = new HashMap<String, String>();
Document doc = cursorDocMap.next();
map.put("id", doc.get("_id").toString());
if (doc.get("name" ) != null) map.put("name", doc.get("name" ).toString());
if (doc.get("env" ) != null) map.put("env", doc.get("env" ).toString());
if (doc.get("owner" ) != null) map.put("owner", doc.get("owner" ).toString());
if (doc.get("status" ) != null) map.put("status", doc.get("status" ).toString());
if (doc.get("comments" ) != null) map.put("comments", doc.get("comments" ).toString());
if (doc.get("modifiedDate") != null) map.put("modifiedDate", doc.get("modifiedDate").toString());
list.add(map);
logger.error("[01;34mDataset ID: " + doc.get("_id") + "[00;00m");
}
mongoClient.close();
return getResponse(Response.Status.OK, list);
}
@GET
@Path("/{id}")
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public /* List<Map<String, Object>> */ Response getDataset(@PathParam("id") String id) {
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
MongoCursor<Document> cursorDocMap = null;
if (ObjectId.isValid(id)) {
System.out.println("[01;31m" + "Valid hexadecimal representation of ObjectId " + id + "[00;00m");
cursorDocMap = collection.find(new Document("_id", new ObjectId(id))).iterator();
} else {
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Invalid hexadecimal representation of ObjectId " + id + "");
System.out.println("[01;31m" + "Invalid hexadecimal representation of ObjectId " + id + "[00;00m");
mongoClient.close();
return getResponse(Response.Status.BAD_REQUEST, msg);
}
if (!cursorDocMap.hasNext()) {
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Dataset with ID " + id + " does not exist!");
logger.error("[01;34m" + "Dataset with ID " + id + " does not exist!" + "[00;00m");
mongoClient.close();
return getResponse(Response.Status.NOT_FOUND, msg);
}
while (cursorDocMap.hasNext()) {
Map<String, Object> map = new HashMap<String, Object>();
Document doc = cursorDocMap.next();
logger.error("[01;34mDataset ID: " + doc.get("_id") + "[00;00m");
if (doc != null) {
map.put("id", id);
map.put("data", doc.get("data"));
map.put("name", doc.get("name"));
map.put("env", doc.get("env"));
map.put("owner", doc.get("owner"));
map.put("status", doc.get("status"));
map.put("comments", doc.get("comments"));
map.put("modifiedDate", doc.get("modifiedDate"));
list.add(map);
}
}
mongoClient.close();
return getResponse(Response.Status.OK, list.get(0));
}
@DELETE
@Path("/{id}")
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response deleteDataset(@PathParam("id") String id) {
MongoCursor<Document> cursorDocMap = collection.find(new Document("_id", new ObjectId(id))).iterator();
if (!cursorDocMap.hasNext()) {
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Dataset with ID " + id + " does not exist!");
logger.error("[01;34m" + "Dataset with ID " + id + " does not exist!" + "[00;00m");
mongoClient.close();
return getResponse(Response.Status.NOT_FOUND, msg);
}
while (cursorDocMap.hasNext()) {
Document doc = cursorDocMap.next();
logger.error("[01;34mDataset ID: " + doc.get("_id") + "[00;00m");
collection.deleteOne(doc);
}
mongoClient.close();
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Successfully deleted dataset with ID: " + id);
return getResponse(Response.Status.OK, msg);
}
@DELETE
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response deleteAllDatasets() {
collection.deleteMany(new Document());
mongoClient.close();
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Successfully deleted all datasets");
return getResponse(Response.Status.OK, msg);
}
@PUT
@Path("/{id}")
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response updateDataset(@PathParam("id") String id, Dataset dataset) {
Map<Integer, Map<String, Object>> original_values_map = new HashMap<Integer, Map<String, Object>>();
Map<Integer, Map<String, Object>> toupdate_values_map = new HashMap<Integer, Map<String, Object>>();
List<Object> list = null;
List<Bson> firstLevelSets = new ArrayList<Bson>();
int changes = 0;
// retrive the corresponding dataset with the given id
MongoCursor<Document> cursorDocMap = collection.find(new Document("_id", new ObjectId(id))).iterator();
while (cursorDocMap.hasNext()) {
Document doc = cursorDocMap.next();
list = castList(doc.get("data"), Object.class);
for (Object obj : list) {
Map<?, ?> mObj = (Map<?, ?>)obj;
Map<String, Object> tmp = new HashMap<String, Object>();
Iterator<?> it = mObj.keySet().iterator();
while (it.hasNext()) {
String key = (String)it.next();
tmp.put(key, mObj.get(key));
}
original_values_map.put((Integer)tmp.get("code"), tmp);
}
logger.error("[01;31mUpdate: " + "casting property 'data' seems to have passed!" + "[00;00m");
if (!dataset.getName ().equals(doc.get("name") )) {
firstLevelSets.add(set("name", dataset.getName()));
++changes;
}
if (!dataset.getEnv ().equals(doc.get("env") )) {
firstLevelSets.add(set("env", dataset.getEnv()));
++changes;
}
if (!dataset.getOwner ().equals(doc.get("owner") )) {
firstLevelSets.add(set("owner", dataset.getOwner()));
++changes;
}
if (!dataset.getStatus ().equals(doc.get("status") )) {
firstLevelSets.add(set("status", dataset.getStatus()));
++changes;
}
if (!dataset.getComments ().equals(doc.get("comments"))) {
firstLevelSets.add(set("comments", dataset.getComments()));
++changes;
}
}
List<Map<String, Object>> updates = dataset.getData();
for (Map<String, Object> map : updates) {
toupdate_values_map.put((Integer)map.get("code"), map);
logger.error("[01;34mDataset: " + toupdate_values_map.get(map.get("code")) + "[00;00m");
logger.error("[01;31mname: " + toupdate_values_map.get(map.get("code")).get("name") + "[00;00m");
}
Map<String, String> updateDatePair = new HashMap<String, String>();
updateDatePair.put("cfgCode", "cfgCodeUpdateDate");
updateDatePair.put("comments", "");
updateDatePair.put("containsAddedFat", "containsAddedFatUpdateDate");
updateDatePair.put("containsAddedSodium", "containsAddedSodiumUpdateDate");
updateDatePair.put("containsAddedSugar", "containsAddedSugarUpdateDate");
updateDatePair.put("containsAddedTransfat", "containsAddedTransfatUpdateDate");
updateDatePair.put("containsCaffeine", "containsCaffeineUpdateDate");
updateDatePair.put("containsFreeSugars", "containsFreeSugarsUpdateDate");
updateDatePair.put("containsSugarSubstitutes", "containsSugarSubstitutesUpdateDate");
updateDatePair.put("foodGuideServingG", "foodGuideUpdateDate");
updateDatePair.put("foodGuideServingMeasure", "foodGuideUpdateDate");
updateDatePair.put("replacementCode", "");
updateDatePair.put("rolledUp", "rolledUpUpdateDate");
updateDatePair.put("satfatAmountPer100g", "satfatImputationDate");
updateDatePair.put("satfatImputationReference", "satfatImputationDate");
updateDatePair.put("sodiumAmountPer100g", "sodiumImputationDate");
updateDatePair.put("sodiumImputationReference", "sodiumImputationDate");
updateDatePair.put("sugarAmountPer100g", "sugarImputationDate");
updateDatePair.put("sugarImputationReference", "sugarImputationDate");
updateDatePair.put("tier4ServingG", "tier4ServingUpdateDate");
updateDatePair.put("tier4ServingMeasure", "tier4ServingUpdateDate");
updateDatePair.put("totalfatAmountPer100g", "totalfatImputationDate");
updateDatePair.put("totalfatImputationReference", "totalfatImputationDate");
updateDatePair.put("transfatAmountPer100g", "transfatImputationDate");
updateDatePair.put("transfatImputationReference", "transfatImputationDate");
for (Map<String, Object> map : updates) {
List<Bson> sets = new ArrayList<Bson>();
logger.error("[01;31msize: " + sets.size() + "[00;00m");
if (toupdate_values_map.get(map.get("code")).get("name") != null && !toupdate_values_map .get (map .get ("code")) .get ("name") .equals (original_values_map .get (map .get ("code")) .get ("name"))) {
sets.add(set("data.$.name", map.get("name")));
++changes;
logger.error("[01;31mvalue changed: " + map.get("name") + "[00;00m");
}
if (toupdate_values_map .get (map .get ("code")) .get ("cnfGroupCode") != null && !toupdate_values_map .get (map .get ("code")) .get ("cnfGroupCode") .equals (original_values_map .get (map .get ("code")) .get ("cnfGroupCode"))) {
sets.add(set("data.$.cnfGroupCode", map.get("cnfGroupCode")));
++changes;
logger.error("[01;31mvalue changed: " + map.get("cnfGroupCode") + "[00;00m");
}
for (String key : updateDatePair.keySet()) {
changes = updateIfModified(key, updateDatePair.get(key), sets, changes, original_values_map, toupdate_values_map, map);
}
if (toupdate_values_map .get (map .get ("code")) .get ("cfgCodeUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("cfgCodeUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("cfgCodeUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("energyKcal") != null && !toupdate_values_map .get (map .get ("code")) .get ("energyKcal") .equals (original_values_map .get (map .get ("code")) .get ("energyKcal"))) {
sets.add(set("data.$.energyKcal", map.get("energyKcal")));
++changes;
logger.error("[01;31mvalue changed: " + map.get("energyKcal") + "[00;00m");
}
if (toupdate_values_map .get (map .get ("code")) .get ("sodiumImputationDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("sodiumImputationDate") .equals (original_values_map .get (map .get ("code")) .get ("sodiumImputationDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("sugarImputationDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("sugarImputationDate") .equals (original_values_map .get (map .get ("code")) .get ("sugarImputationDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("transfatImputationDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("transfatImputationDate") .equals (original_values_map .get (map .get ("code")) .get ("transfatImputationDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("satfatImputationDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("satfatImputationDate") .equals (original_values_map .get (map .get ("code")) .get ("satfatImputationDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("totalfatImputationDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("totalfatImputationDate") .equals (original_values_map .get (map .get ("code")) .get ("totalfatImputationDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsAddedSodiumUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsAddedSodiumUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsAddedSodiumUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsAddedSugarUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsAddedSugarUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsAddedSugarUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsFreeSugarsUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsFreeSugarsUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsFreeSugarsUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsAddedFatUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsAddedFatUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsAddedFatUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsAddedTransfatUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsAddedTransfatUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsAddedTransfatUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsCaffeineUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsCaffeineUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsCaffeineUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("containsSugarSubstitutesUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("containsSugarSubstitutesUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("containsSugarSubstitutesUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("referenceAmountG") != null && !toupdate_values_map .get (map .get ("code")) .get ("referenceAmountG") .equals (original_values_map .get (map .get ("code")) .get ("referenceAmountG"))) {
sets.add(set("data.$.referenceAmountG", map.get("referenceAmountG")));
sets.add(currentDate("data.$.referenceAmountUpdateDate"));
++changes;
logger.error("[01;31mvalue changed: " + map.get("referenceAmountG") + "[00;00m");
}
if (toupdate_values_map .get (map .get ("code")) .get ("referenceAmountMeasure") != null && !toupdate_values_map .get (map .get ("code")) .get ("referenceAmountMeasure") .equals (original_values_map .get (map .get ("code")) .get ("referenceAmountMeasure"))) {
sets.add(set("data.$.referenceAmountMeasure", map.get("referenceAmountMeasure")));
++changes;
logger.error("[01;31mvalue changed: " + map.get("referenceAmountMeasure") + "[00;00m");
}
if (toupdate_values_map .get (map .get ("code")) .get ("referenceAmountUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("referenceAmountUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("referenceAmountUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("foodGuideUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("foodGuideUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("foodGuideUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("tier4ServingUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("tier4ServingUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("tier4ServingUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("rolledUpUpdateDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("rolledUpUpdateDate") .equals (original_values_map .get (map .get ("code")) .get ("rolledUpUpdateDate"))) {
}
if (toupdate_values_map .get (map .get ("code")) .get ("overrideSmallRaAdjustment") != null && !toupdate_values_map .get (map .get ("code")) .get ("overrideSmallRaAdjustment") .equals (original_values_map .get (map .get ("code")) .get ("overrideSmallRaAdjustment"))) {
sets.add(set("data.$.overrideSmallRaAdjustment", map.get("overrideSmallRaAdjustment")));
++changes;
logger.error("[01;31mvalue changed: " + map.get("overrideSmallRaAdjustment") + "[00;00m");
}
if (toupdate_values_map .get (map .get ("code")) .get ("adjustedReferenceAmount") != null && !toupdate_values_map .get (map .get ("code")) .get ("adjustedReferenceAmount") .equals (original_values_map .get (map .get ("code")) .get ("adjustedReferenceAmount"))) {
sets.add(set("data.$.adjustedReferenceAmount", map.get("adjustedReferenceAmount")));
++changes;
logger.error("[01;31mvalue changed: " + map.get("adjustedReferenceAmount") + "[00;00m");
}
if (toupdate_values_map .get (map .get ("code")) .get ("commitDate") != null && !toupdate_values_map .get (map .get ("code")) .get ("commitDate") .equals (original_values_map .get (map .get ("code")) .get ("commitDate"))) {
}
logger.error("[01;31msize: " + sets.size() + "[00;00m");
logger.error("[01;35mcode: " + map.get("code") + "[00;00m");
if (sets.size() > 0) {
collection.updateOne(and(eq("_id", new ObjectId(id)), eq("data.code", map.get("code"))), combine(sets));
}
}
if (changes != 0) {
firstLevelSets.add(currentDate("modifiedDate"));
collection.updateOne(eq("_id", new ObjectId(id)), combine(firstLevelSets));
}
mongoClient.close();
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Successfully updated dataset");
return getResponse(Response.Status.OK, msg);
}
@POST
@Path("/{id}/classify")
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response classifyDataset(@PathParam("id") String id) {
Map<String, Object> map = null;
MongoCursor<Document> cursorDocMap = collection.find(new Document("_id", new ObjectId(id))).iterator();
List<Object> list = null;
List<Document> dox = new ArrayList<Document>();
if (!cursorDocMap.hasNext()) {
Map<String, String> msg = new HashMap<String, String>();
msg.put("message", "Dataset with ID " + id + " does not exist!");
logger.error("[01;34m" + "Dataset with ID " + id + " does not exist!" + "[00;00m");
mongoClient.close();
return getResponse(Response.Status.NOT_FOUND, msg);
}
Map<String, String> updateDatePair = new HashMap<String, String>();
updateDatePair.put("cfgCode", "cfgCodeUpdateDate");
updateDatePair.put("comments", "");
updateDatePair.put("containsAddedFat", "containsAddedFatUpdateDate");
updateDatePair.put("containsAddedSodium", "containsAddedSodiumUpdateDate");
updateDatePair.put("containsAddedSugar", "containsAddedSugarUpdateDate");
updateDatePair.put("containsAddedTransfat", "containsAddedTransfatUpdateDate");
updateDatePair.put("containsCaffeine", "containsCaffeineUpdateDate");
updateDatePair.put("containsFreeSugars", "containsFreeSugarsUpdateDate");
updateDatePair.put("containsSugarSubstitutes", "containsSugarSubstitutesUpdateDate");
updateDatePair.put("foodGuideServingG", "foodGuideUpdateDate");
updateDatePair.put("foodGuideServingMeasure", "foodGuideUpdateDate");
updateDatePair.put("replacementCode", "");
updateDatePair.put("rolledUp", "rolledUpUpdateDate");
updateDatePair.put("satfatAmountPer100g", "satfatImputationDate");
updateDatePair.put("satfatImputationReference", "satfatImputationDate");
updateDatePair.put("sodiumAmountPer100g", "sodiumImputationDate");
updateDatePair.put("sodiumImputationReference", "sodiumImputationDate");
updateDatePair.put("sugarAmountPer100g", "sugarImputationDate");
updateDatePair.put("sugarImputationReference", "sugarImputationDate");
updateDatePair.put("tier4ServingG", "tier4ServingUpdateDate");
updateDatePair.put("tier4ServingMeasure", "tier4ServingUpdateDate");
updateDatePair.put("totalfatAmountPer100g", "totalfatImputationDate");
updateDatePair.put("totalfatImputationReference", "totalfatImputationDate");
updateDatePair.put("transfatAmountPer100g", "transfatImputationDate");
updateDatePair.put("transfatImputationReference", "transfatImputationDate");
while (cursorDocMap.hasNext()) {
map = new HashMap<String, Object>();
Document doc = cursorDocMap.next();
logger.error("[01;34mDataset ID: " + doc.get("_id") + "[00;00m");
if (doc != null) {
list = castList(doc.get("data"), Object.class);
for (Object obj : list) {
for (String key : updateDatePair.keySet()) {
Document value = (Document)((Document)obj).get(key + "");
((Document)obj).put(key, value.get("value"));
}
dox.add((Document)obj);
}
map.put("data", dox);
map.put("name", doc.get("name"));
map.put("env", doc.get("env"));
map.put("owner", doc.get("owner"));
map.put("status", doc.get("status"));
map.put("comments", doc.get("comments"));
map.put("modifiedDate", doc.get("modifiedDate"));
}
}
mongoClient.close();
Response response = ClientBuilder
.newClient()
.target(RequestURI.getUri() + ClassificationProperties.getEndPoint())
.path("/classify")
.request()
.post(Entity.entity(map, MediaType.APPLICATION_JSON));
logger.error("[01;31m" + "response status: " + response.getStatusInfo() + "[00;00m");
return response;
}
@POST
@Path("/{id}/flags")
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response flagsDataset(@PathParam("id") String id, Dataset dataset) {
Response response = ClientBuilder
.newClient()
.target(RequestURI.getUri() + ClassificationProperties.getEndPoint())
.path("/flags")
.request()
.post(Entity.entity(dataset, MediaType.APPLICATION_JSON));
return response;
}
@POST
@Path("/{id}/init")
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response initDataset(@PathParam("id") String id, Dataset dataset) {
Response response = ClientBuilder
.newClient()
.target(RequestURI.getUri() + ClassificationProperties.getEndPoint())
.path("/init")
.request()
.post(Entity.entity(dataset, MediaType.APPLICATION_JSON));
return response;
}
@POST
@Path("/{id}/adjustment")
@Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response adjustmentDataset(@PathParam("id") String id, Dataset dataset) {
Response response = ClientBuilder
.newClient()
.target(RequestURI.getUri() + ClassificationProperties.getEndPoint())
.path("/adjustment")
.request()
.post(Entity.entity(dataset, MediaType.APPLICATION_JSON));
return response;
}
@POST
@Path("/{id}/commit")
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public void commitDataset() {
}
@GET
@Path("/status")
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
@JacksonFeatures(serializationEnable = {SerializationFeature.INDENT_OUTPUT})
public Response getStatusCodes() {
Map<Integer, String> list = new HashMap<Integer, String>();
String sql = ContentHandler.read("connectivity_test.sql", getClass());
try {
PreparedStatement stmt = conn.prepareStatement(sql); // Create PreparedStatement
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
list.put(rs.getInt("canada_food_group_id"), rs.getString("canada_food_group_desc_e"));
}
conn.close();
} catch(SQLException e) {
// TODO: proper response to handle exceptions
logger.error("[01;03;31m" + e.getMessage() + "[00;00;00m");
for (Response.Status status : Response.Status.values()) {
list.put(new Integer(status.getStatusCode()), status.getReasonPhrase());
}
}
Map<Integer, String> map = new HashMap<Integer, String>();
for (Response.Status obj : Response.Status.values()) {
map.put(obj.getStatusCode(), obj.name());
}
int len = 0;
for (Integer key : map.keySet()) {
String value = map.get(key);
if (value.length() > len) {
len = value.length();
}
}
String format = new StringBuffer()
.append("%d %-")
.append(len)
.append("s")
.toString();
String tamrof = new StringBuffer()
.append("%-")
.append(len + 4)
.append("s")
.toString();
Integer[][] arr = new Integer[6][18];
Integer series = 0;
int i = 0;
for (Integer key : map.keySet()) {
if (key / 100 != series) {
series = key / 100;
i = 0;
}
arr[series][i++] = key;
}
for (int j = 0; j < 18; ++j) {
arr[0][j] = null;
arr[1][j] = null;
}
for (int l = 0; l < 18; ++l) {
for (int m = 2; m < 6; ++m) {
Integer key = arr[m][l];
if (key != null) {
System.out.printf("[01;%dm" + format + "[00;00m", l % 2 == 0 ? 36 : 35, key, Response.Status.fromStatusCode(key).name());
} else {
System.out.printf(tamrof, "");
}
}
System.out.println();
}
System.out.println();
System.out.println();
for (int l = 0; l < 18; ++l) {
for (int m = 2; m < 6; ++m) {
Integer key = arr[m][l];
if (key != null) {
System.out.printf("[01;%dm" + format + "[00;00m", l % 2 == 0 ? 34 : 31, key, Response.Status.fromStatusCode(key));
} else {
System.out.printf(tamrof, "");
}
}
System.out.println();
}
mongoClient.close();
// return getResponse(Response.Status.OK, Response.Status.values());
return getResponse(Response.Status.OK, list);
}
private /* List<CanadaFoodGuideDataset> */ Response doSearchCriteria(CfgFilter search) {
List<CanadaFoodGuideDataset> list = new ArrayList<CanadaFoodGuideDataset>();
if (search != null) {
StringBuffer sb = new StringBuffer(search.getSql());
logger.error("[01;30m" + search.getDataSource() + "[00;00m");
sb.append(" WHERE length('this where-clause is an artifact') = 32").append("\n");
if (search.getDataSource() != null && search.getDataSource().matches("food|recipe")) {
sb.append(" AND type = ?").append("\n");
}
logger.error("[01;30m" + search.getFoodRecipeName() + "[00;00m");
if (search.getFoodRecipeName() != null && !search.getFoodRecipeName().isEmpty()) {
sb.append(" AND LOWER(name) LIKE ?").append("\n");
}
logger.error("[01;30m" + search.getFoodRecipeCode() + "[00;00m");
if (search.getFoodRecipeCode() != null && !search.getFoodRecipeCode().isEmpty()) {
sb.append(" AND code = ? OR CAST(code AS text) LIKE ?").append("\n");
}
logger.error("[01;30m" + search.getCnfCode() + "[00;00m");
if (search.getCnfCode() != null && !search.getCnfCode().isEmpty()) {
sb.append(" AND cnf_group_code = ?").append("\n");
}
logger.error("[01;30m" + search.getSubgroupCode() + "[00;00m");
if (search.getSubgroupCode() != null && !search.getSubgroupCode().isEmpty()) {
sb.append(" AND CAST(cfg_code AS text) LIKE ?").append("\n");
}
if (search.getCfgTier() != null && !search.getCfgTier().equals(CfgTier.ALL.getCode())) {
switch (search.getCfgTier()) {
case 1:
case 2:
case 3:
case 4:
logger.error("[01;31mCalling all codes with Tier " + search.getCfgTier() + "[00;00m");
sb.append(" AND LENGTH(CAST(cfg_code AS text)) = 4").append("\n");
sb.append(" AND CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
break;
case 12:
case 13:
case 14:
case 23:
case 24:
case 34:
logger.error("[01;31mCalling all codes with Tier " + search.getCfgTier() + "[00;00m");
sb.append(" AND LENGTH(CAST(cfg_code AS text)) = 4").append("\n");
sb.append(" AND CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
sb.append(" OR CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
break;
case 123:
case 124:
case 134:
case 234:
logger.error("[01;31mCalling all codes with Tier " + search.getCfgTier() + "[00;00m");
sb.append(" AND LENGTH(CAST(cfg_code AS text)) = 4").append("\n");
sb.append(" AND CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
sb.append(" OR CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
sb.append(" OR CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
break;
case 1234:
logger.error("[01;31mCalling all codes with Tier " + search.getCfgTier() + "[00;00m");
sb.append(" AND LENGTH(CAST(cfg_code AS text)) = 4").append("\n");
sb.append(" AND CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
sb.append(" OR CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
sb.append(" OR CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
sb.append(" OR CAST(SUBSTR(CAST(cfg_code AS text), 4, 1) AS integer) = ?").append("\n");
break;
case 9:
logger.error("[01;31mCalling all codes with missing Tier![00;00m");
sb.append(" AND LENGTH(CAST(cfg_code AS text)) < 4").append("\n");
break;
}
}
if (search.getRecipe() != null && !search.getRecipe().equals(RecipeRolled.IGNORE.getCode())) {
logger.error("[01;32m" + search.getRecipe() + "[00;00m");
switch (search.getRecipe()) {
case 1:
case 2:
sb.append(" AND rolled_up = ?").append("\n");
break;
case 3:
sb.append(" AND (rolled_up = 1 OR rolled_up = 2)").append("\n");
break;
}
}
boolean notIgnore = false;
if (search.getContainsAdded() != null) {
String[] arr = new String[search.getContainsAdded().size()];
arr = search.getContainsAdded().toArray(arr);
for (String i : arr) {
logger.error("[01;32m" + i + "[00;00m");
if (!i.equals("0")) {
notIgnore = true;
}
}
}
Map<String, String> map = null;
if (search.getContainsAdded() != null && notIgnore) {
map = new HashMap<String, String>();
logger.error("[01;32m" + search.getContainsAdded() + "[00;00m");
String[] arr = new String[search.getContainsAdded().size()];
arr = search.getContainsAdded().toArray(arr);
for (String keyValue : arr) {
StringTokenizer tokenizer = new StringTokenizer(keyValue, "=");
map.put(tokenizer.nextToken(), tokenizer.nextToken());
logger.error("[01;32m" + keyValue + "[00;00m");
}
logger.error("\n[01;32m" + new GsonBuilder().setDateFormat("yyyy-MM-dd").setPrettyPrinting().create().toJson(map) + "[00;00m");
Set<String> keys = map.keySet();
for (String key : keys) {
switch (ContainsAdded.valueOf(key)) {
case sodium:
sb.append(" AND contains_added_sodium = ? ").append("\n");
break;
case sugar:
sb.append(" AND contains_added_sugar = ? ").append("\n");
break;
case fat:
sb.append(" AND contains_added_fat = ? ").append("\n");
break;
case transfat:
sb.append(" AND contains_added_transfat = ? ").append("\n");
break;
case caffeine:
sb.append(" AND contains_caffeine = ? ").append("\n");
break;
case freeSugars:
sb.append(" AND contains_free_sugars = ? ").append("\n");
break;
case sugarSubstitute:
sb.append(" AND contains_sugar_substitutes = ? ").append("\n");
break;
}
}
}
if (search.getMissingValues() != null) {
logger.error("[01;32m" + search.getMissingValues() + "[00;00m");
logger.error("\n[01;32m" + new GsonBuilder().setDateFormat("yyyy-MM-dd").setPrettyPrinting().create().toJson(search.getMissingValues()) + "[00;00m");
for (String name : search.getMissingValues()) {
switch (Missing.valueOf(name)) {
case refAmount:
sb.append(" AND reference_amount_g IS NULL").append("\n");
break;
case cfgServing:
sb.append(" AND food_guide_serving_g IS NULL").append("\n");
break;
case tier4Serving:
sb.append(" AND tier_4_serving_g IS NULL").append("\n");
break;
case energy:
sb.append(" AND energy_kcal IS NULL").append("\n");
break;
case cnfCode:
sb.append(" AND cnf_group_code IS NULL").append("\n");
break;
case rollUp:
sb.append(" AND rolled_up IS NULL").append("\n");
break;
case sodiumPer100g:
sb.append(" AND sodium_amount_per_100g IS NULL").append("\n");
break;
case sugarPer100g:
sb.append(" AND sugar_amount_per_100g IS NULL").append("\n");
break;
case fatPer100g:
sb.append(" AND totalfat_amount_per_100g IS NULL").append("\n");
break;
case transfatPer100g:
sb.append(" AND transfat_amount_per_100g IS NULL").append("\n");
break;
case satFatPer100g:
sb.append(" AND satfat_amount_per_100g IS NULL").append("\n");
break;
case addedSodium:
sb.append(" AND contains_added_sodium IS NULL").append("\n");
break;
case addedSugar:
sb.append(" AND contains_added_sugar IS NULL").append("\n");
break;
case addedFat:
sb.append(" AND contains_added_fat IS NULL").append("\n");
break;
case addedTransfat:
sb.append(" AND contains_added_transfat IS NULL").append("\n");
break;
case caffeine:
sb.append(" AND contains_caffeine IS NULL").append("\n");
break;
case freeSugars:
sb.append(" AND contains_free_sugars IS NULL").append("\n");
break;
case sugarSubstitute:
sb.append(" AND contains_sugar_substitutes IS NULL").append("\n");
break;
}
}
}
if (search.getLastUpdateDateFrom() != null && search.getLastUpdateDateFrom().matches("\\d{4}-\\d{2}-\\d{2}") && search.getLastUpdateDateTo() != null && search.getLastUpdateDateTo().matches("\\d{4}-\\d{2}-\\d{2}")) {
if (search.getLastUpdatedFilter() != null) {
logger.error("[01;32m" + search.getLastUpdatedFilter() + "[00;00m");
for (String name : search.getLastUpdatedFilter()) {
switch (Missing.valueOf(name)) {
case refAmount:
sb.append(" AND reference_amount_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case cfgServing:
sb.append(" AND food_guide_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case tier4Serving:
sb.append(" AND tier_4_serving_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case energy:
case cnfCode:
break;
case rollUp:
sb.append(" AND rolled_up_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case sodiumPer100g:
sb.append(" AND sodium_imputation_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case sugarPer100g:
sb.append(" AND sugar_imputation_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case fatPer100g:
sb.append(" AND totalfat_imputation_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case transfatPer100g:
sb.append(" AND transfat_imputation_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case satFatPer100g:
sb.append(" AND satfat_imputation_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case addedSodium:
sb.append(" AND contains_added_sodium_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case addedSugar:
sb.append(" AND contains_added_sugar_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case addedFat:
sb.append(" AND contains_added_fat_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case addedTransfat:
sb.append(" AND contains_added_transfat_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case caffeine:
sb.append(" AND contains_caffeine_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case freeSugars:
sb.append(" AND contains_free_sugars_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
case sugarSubstitute:
sb.append(" AND contains_sugar_substitutes_update_date BETWEEN CAST(? AS date) AND CAST(? AS date)").append("\n");
break;
}
}
}
}
if (search.getComments() != null && !search.getComments().isEmpty()) {
sb.append(" AND LOWER(comments) LIKE ?").append("\n");
}
if (search.getCommitDateFrom() != null && search.getCommitDateFrom().matches("\\d{4}-\\d{2}-\\d{2}") && search.getCommitDateTo() != null && search.getCommitDateTo().matches("\\d{4}-\\d{2}-\\d{2}")) {
sb.append(" AND commit_date BETWEEN CAST(? AS date) AND CAST(? AS date)") .append("\n");
}
search.setSql(sb.toString());
try {
meta = conn.getMetaData(); // Create Oracle DatabaseMetaData object
logger.error("[01;34mJDBC driver version is " + meta.getDriverVersion() + "[00;00m"); // Retrieve driver information
PreparedStatement stmt = conn.prepareStatement(search.getSql()); // Create PreparedStatement
int i = 0; // keeps count of the number of placeholders
if (search != null) {
if (search.getDataSource() != null && search.getDataSource().matches("food|recipe")) {
stmt.setInt(++i, search.getDataSource().equals("food") ? 1 : 2);
}
if (search.getFoodRecipeName() != null && !search.getFoodRecipeName().isEmpty()) {
stmt.setString(++i, new String("%" + search.getFoodRecipeName() + "%").toLowerCase());
}
if (search.getFoodRecipeCode() != null && !search.getFoodRecipeCode().isEmpty()) {
stmt.setInt(++i, Integer.parseInt(search.getFoodRecipeCode()));
stmt.setString(++i, new String("" + search.getFoodRecipeCode() + "%"));
}
if (search.getCnfCode() != null && !search.getCnfCode().isEmpty()) {
stmt.setInt(++i, Integer.parseInt(search.getCnfCode()));
}
if (search.getSubgroupCode() != null && !search.getSubgroupCode().isEmpty()) {
stmt.setString(++i, new String("" + search.getSubgroupCode() + "%"));
}
if (search.getCfgTier() != null && !search.getCfgTier().equals(CfgTier.ALL.getCode())) {
switch (search.getCfgTier()) {
case 1:
case 2:
case 3:
case 4:
stmt.setInt(++i, search.getCfgTier());
break;
}
}
if (search.getRecipe() != null && !search.getRecipe().equals(RecipeRolled.IGNORE.getCode())) {
switch (search.getRecipe()) {
case 1:
case 2:
stmt.setInt(++i, search.getRecipe());
break;
}
}
if (search.getContainsAdded() != null && notIgnore) {
Set<String> keys = map.keySet();
for (String key : keys) {
switch (ContainsAdded.valueOf(key)) {
case sodium:
case sugar:
case fat:
case transfat:
case caffeine:
case freeSugars:
case sugarSubstitute:
stmt.setInt(++i, map.get(key).equals("true") ? 1 : 2);
break;
}
}
}
if (search.getLastUpdateDateFrom() != null && search.getLastUpdateDateFrom().matches("\\d{4}-\\d{2}-\\d{2}") && search.getLastUpdateDateTo() != null && search.getLastUpdateDateTo().matches("\\d{4}-\\d{2}-\\d{2}")) {
if (search.getLastUpdatedFilter() != null) {
logger.error("[01;32m" + search.getLastUpdatedFilter() + "[00;00m");
for (String name : search.getLastUpdatedFilter()) {
switch (Missing.valueOf(name)) {
case refAmount:
case cfgServing:
case tier4Serving:
stmt.setString(++i, search.getLastUpdateDateFrom());
stmt.setString(++i, search.getLastUpdateDateTo());
break;
case energy:
case cnfCode:
break;
case rollUp:
case sodiumPer100g:
case sugarPer100g:
case fatPer100g:
case transfatPer100g:
case satFatPer100g:
case addedSodium:
case addedSugar:
case addedFat:
case addedTransfat:
case caffeine:
case freeSugars:
case sugarSubstitute:
stmt.setString(++i, search.getLastUpdateDateFrom());
stmt.setString(++i, search.getLastUpdateDateTo());
break;
}
}
}
}
if (search.getComments() != null && !search.getComments().isEmpty()) {
stmt.setString(++i, new String("%" + search.getComments() + "%").toLowerCase());
}
if (search.getCommitDateFrom() != null && search.getCommitDateFrom().matches("\\d{4}-\\d{2}-\\d{2}") && search.getCommitDateTo() != null && search.getCommitDateTo().matches("\\d{4}-\\d{2}-\\d{2}")) {
stmt.setString(++i, search.getCommitDateFrom());
stmt.setString(++i, search.getCommitDateTo());
}
}
logger.error("[01;34mSQL query to follow:\n" + stmt.toString() + "[00;00m");
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
CanadaFoodGuideDataset foodItem = new CanadaFoodGuideDataset();
foodItem.setType(rs.getInt ("type") == 1 ? "food" : "recipe" );
foodItem.setCode(rs.getInt ("code") );
foodItem.setName(rs.getString ("name") );
if (rs.getString("cnf_group_code") != null) {
foodItem.setCnfGroupCode(rs.getInt ("cnf_group_code") );
}
if (rs.getString("cfg_code") != null) {
foodItem.setCfgCode(new PseudoInteger(rs.getInt ("cfg_code")) ); // editable
} else {
foodItem.setCfgCode(new PseudoInteger() );
}
foodItem.setCommitDate(rs.getDate ("cfg_code_update_date") );
if (rs.getString("energy_kcal") != null) {
foodItem.setEnergyKcal(rs.getDouble ("energy_kcal") );
}
if (rs.getString("sodium_amount_per_100g") != null) {
foodItem.setSodiumAmountPer100g(new PseudoDouble(rs.getDouble ("sodium_amount_per_100g")) ); // editable
} else {
foodItem.setSodiumAmountPer100g(new PseudoDouble() );
}
foodItem.setSodiumImputationReference(new PseudoString(rs.getString ("sodium_imputation_reference")) );
foodItem.setSodiumImputationDate(rs.getDate ("sodium_imputation_date") );
if (rs.getString("sugar_amount_per_100g") != null) {
foodItem.setSugarAmountPer100g(new PseudoDouble(rs.getDouble ("sugar_amount_per_100g")) ); // editable
} else {
foodItem.setSugarAmountPer100g(new PseudoDouble() );
}
foodItem.setSugarImputationReference(new PseudoString(rs.getString ("sugar_imputation_reference")) );
foodItem.setSugarImputationDate(rs.getDate ("sugar_imputation_date") );
if (rs.getString("transfat_amount_per_100g") != null) {
foodItem.setTransfatAmountPer100g(new PseudoDouble(rs.getDouble ("transfat_amount_per_100g")) ); // editable
} else {
foodItem.setTransfatAmountPer100g(new PseudoDouble() );
}
foodItem.setTransfatImputationReference(new PseudoString(rs.getString ("transfat_imputation_reference")) );
foodItem.setTransfatImputationDate(rs.getDate ("transfat_imputation_date") );
if (rs.getString("satfat_amount_per_100g") != null) {
foodItem.setSatfatAmountPer100g(new PseudoDouble(rs.getDouble ("satfat_amount_per_100g")) ); // editable
} else {
foodItem.setSatfatAmountPer100g(new PseudoDouble() );
}
foodItem.setSatfatImputationReference(new PseudoString(rs.getString ("satfat_imputation_reference")) );
foodItem.setSatfatImputationDate(rs.getDate ("satfat_imputation_date") );
if (rs.getString("totalfat_amount_per_100g") != null) {
foodItem.setTotalfatAmountPer100g(new PseudoDouble(rs.getDouble ("totalfat_amount_per_100g")) ); // editable
} else {
foodItem.setTotalfatAmountPer100g(new PseudoDouble() );
}
foodItem.setTotalfatImputationReference(new PseudoString(rs.getString ("totalfat_imputation_reference")) );
foodItem.setTotalfatImputationDate(rs.getDate ("totalfat_imputation_date") );
if (rs.getString("contains_added_sodium") != null) {
foodItem.setContainsAddedSodium(new PseudoBoolean(rs.getBoolean ("contains_added_sodium")) ); // editable
} else {
foodItem.setContainsAddedSodium(new PseudoBoolean() );
}
foodItem.setContainsAddedSodiumUpdateDate(rs.getDate ("contains_added_sodium_update_date") );
if (rs.getString("contains_added_sugar") != null) {
foodItem.setContainsAddedSugar(new PseudoBoolean(rs.getBoolean ("contains_added_sugar")) ); // editable
} else {
foodItem.setContainsAddedSugar(new PseudoBoolean() );
}
foodItem.setContainsAddedSugarUpdateDate(rs.getDate ("contains_added_sugar_update_date") );
if (rs.getString("contains_free_sugars") != null) {
foodItem.setContainsFreeSugars(new PseudoBoolean(rs.getBoolean ("contains_free_sugars")) ); // editable
} else {
foodItem.setContainsFreeSugars(new PseudoBoolean() );
}
foodItem.setContainsFreeSugarsUpdateDate(rs.getDate ("contains_free_sugars_update_date") );
if (rs.getString("contains_added_fat") != null) {
foodItem.setContainsAddedFat(new PseudoBoolean(rs.getBoolean ("contains_added_fat")) ); // editable
} else {
foodItem.setContainsAddedFat(new PseudoBoolean() );
}
foodItem.setContainsAddedFatUpdateDate(rs.getDate ("contains_added_fat_update_date") );
if (rs.getString("contains_added_transfat") != null) {
foodItem.setContainsAddedTransfat(new PseudoBoolean(rs.getBoolean ("contains_added_transfat")) ); // editable
} else {
foodItem.setContainsAddedTransfat(new PseudoBoolean() );
}
foodItem.setContainsAddedTransfatUpdateDate(rs.getDate ("contains_added_transfat_update_date") );
if (rs.getString("contains_caffeine") != null) {
foodItem.setContainsCaffeine(new PseudoBoolean(rs.getBoolean ("contains_caffeine")) ); // editable
} else {
foodItem.setContainsCaffeine(new PseudoBoolean() );
}
foodItem.setContainsCaffeineUpdateDate(rs.getDate ("contains_caffeine_update_date") );
if (rs.getString("contains_sugar_substitutes") != null) {
foodItem.setContainsSugarSubstitutes(new PseudoBoolean(rs.getBoolean ("contains_sugar_substitutes")) ); // editable
} else {
foodItem.setContainsSugarSubstitutes(new PseudoBoolean() );
}
foodItem.setContainsSugarSubstitutesUpdateDate(rs.getDate ("contains_sugar_substitutes_update_date") );
if (rs.getString("reference_amount_g") != null) {
foodItem.setReferenceAmountG(rs.getDouble ("reference_amount_g") ); // editable
}
foodItem.setReferenceAmountMeasure(rs.getString ("reference_amount_measure") );
foodItem.setReferenceAmountUpdateDate(rs.getDate ("reference_amount_update_date") );
if (rs.getString("food_guide_serving_g") != null) {
foodItem.setFoodGuideServingG(new PseudoDouble(rs.getDouble ("food_guide_serving_g")) ); // editable
} else {
foodItem.setFoodGuideServingG(new PseudoDouble() );
}
foodItem.setFoodGuideServingMeasure(new PseudoString(rs.getString ("food_guide_serving_measure")) );
foodItem.setFoodGuideUpdateDate(rs.getDate ("food_guide_update_date") );
if (rs.getString("tier_4_serving_g") != null) {
foodItem.setTier4ServingG(new PseudoDouble(rs.getDouble ("tier_4_serving_g")) ); // editable
} else {
foodItem.setTier4ServingG(new PseudoDouble() );
}
foodItem.setTier4ServingMeasure(new PseudoString(rs.getString ("tier_4_serving_measure")) );
foodItem.setTier4ServingUpdateDate(rs.getDate ("tier_4_serving_update_date") );
if (rs.getString("rolled_up") != null) {
foodItem.setRolledUp(new PseudoBoolean(rs.getBoolean ("rolled_up")) ); // editable
} else {
foodItem.setRolledUp(new PseudoBoolean() );
}
foodItem.setRolledUpUpdateDate(rs.getDate ("rolled_up_update_date") );
if (rs.getString("apply_small_ra_adjustment") != null) {
foodItem.setOverrideSmallRaAdjustment(rs.getBoolean ("apply_small_ra_adjustment") );
}
if (rs.getString("replacement_code") != null) {
foodItem.setReplacementCode(new PseudoInteger(rs.getInt ("replacement_code")) ); // editable
} else {
foodItem.setReplacementCode(new PseudoInteger() );
}
foodItem.setCommitDate(rs.getDate ("commit_date") );
foodItem.setComments(new PseudoString(rs.getString ("comments")) ); // editable
list.add(foodItem);
}
conn.close();
} catch(SQLException e) {
// TODO: proper response to handle exceptions
e.printStackTrace();
}
}
return getResponse(Response.Status.OK, list);
}
private Response getResponse(Response.Status status, Object obj) {
return Response.status(status)
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "*")
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, "origin, content-type, accept, authorization")
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true")
.header(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, "GET, POST, PUT, DELETE, OPTIONS, HEAD")
.header(HttpHeaders.ACCESS_CONTROL_MAX_AGE, "1209600")
.entity(obj).build();
}
private static <T> List<T> castList(Object obj, Class<T> clazz) {
List<T> result = new ArrayList<T>();
if (obj instanceof List<?>) {
for (Object o : (List<?>)obj) {
result.add(clazz.cast(o));
}
return result;
}
return null;
}
@SuppressWarnings("unchecked")
private int updateIfModified(String key, String value, List<Bson> sets, int changes, Map<Integer, Map<String, Object>> original_values_map, Map<Integer, Map<String, Object>> toupdate_values_map, Map<String, Object> map) {
logger.error("[01;34m" + key + ": " + ((Map<String, Object>)toupdate_values_map.get(map.get("code")).get(key)).get("value") + "[00;00m");
if (((Map<String, Object>)toupdate_values_map.get(map.get("code")).get(key)).get("value") != null && !((Map<String, Object>)toupdate_values_map.get(map.get("code")).get(key)).get("value").equals(((Map<String, Object>)original_values_map.get(map.get("code")).get(key)).get("value"))) {
sets.add(set("data.$." + key + ".value", ((Map<String, Object>)map.get(key)).get("value")));
if (!value.isEmpty()) {
sets.add(currentDate("data.$." + value));
}
++changes;
logger.error("[01;31mvalue changed: " + ((Map<String, Object>)map.get(key)).get("value") + "[00;00m");
}
return changes;
}
private void convertMetaDataObjectToSingleValue(Document obj, String key) {
obj.put(key, obj.get(key + "value"));
// convertMetaDataObjectToSingleValue((Document)obj, key);
}
}
|
Changed REST service that classifies an existing dataset - POST datasets/id/classify
- integrated boolean
- integrated metadata
- implemented de-serialize metadata object
- implemented re-serialize metadata object
|
src/main/java/ca/gc/ip346/classification/resource/FoodsResource.java
|
Changed REST service that classifies an existing dataset - POST datasets/id/classify - integrated boolean - integrated metadata - implemented de-serialize metadata object - implemented re-serialize metadata object
|
|
Java
|
mit
|
17a71299b7c8fb71775dbf73d9008cb5630fa03b
| 0
|
joshuapinter/react-native-unified-contacts,joshuapinter/react-native-unified-contacts,joshuapinter/react-native-unified-contacts,joshuapinter/react-native-unified-contacts
|
package com.joshuapinter;
import android.app.Activity;
import android.content.ContentResolver;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.net.Uri;
import android.provider.ContactsContract;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import android.widget.Toast;
import com.facebook.react.bridge.ActivityEventListener;
import com.facebook.react.bridge.BaseActivityEventListener;
import com.facebook.react.bridge.Callback;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import java.util.HashMap;
import java.util.Map;
class RNUnifiedContactsModule extends ReactContextBaseJavaModule {
private Promise mSelectContactPromise;
private Callback mSuccessCallback;
private Callback mErrorCallback;
public RNUnifiedContactsModule(ReactApplicationContext reactContext) {
super(reactContext);
reactContext.addActivityEventListener(mActivityEventListener);
}
private final ActivityEventListener mActivityEventListener = new BaseActivityEventListener() {
@Override
public void onActivityResult(Activity activity, int requestCode, int resultCode, Intent data) {
// Get the URI and query the content provider for the phone number
Uri contactUri = data.getData();
ContentResolver contentResolver = activity.getContentResolver();
Cursor contactCursor = contentResolver.query(contactUri, null, null, null, null);
contactCursor.moveToFirst();
int columnId = contactCursor.getColumnIndex(ContactsContract.Contacts._ID);
String contactId = contactCursor.getString(columnId);
int columnDisplayName = contactCursor.getColumnIndex(ContactsContract.Contacts.DISPLAY_NAME);
String contactDisplayName = contactCursor.getString(columnDisplayName);
Log.w("Test1", contactDisplayName);
mSuccessCallback.invoke(contactDisplayName);
// String[] projection = new String[]{ContactsContract.CommonDataKinds.StructuredName.GIVEN_NAME};
// Cursor cursor = activity.getContentResolver().query(contactUri, projection,
// null, null, null);
// // If the cursor returned is valid, get the phone number
// if (cursor != null && cursor.moveToFirst()) {
// int nameIndex = cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredName.GIVEN_NAME);
// String name = cursor.getString(nameIndex);
//
// Log.w("Test1", "name:");
// Log.w("Test1", name);
// // Do something with the phone number
// mSuccessCallback.invoke(name);
// }
//
// Log.w("Test1", "onActivityResult after");
// mSelectContactPromise.resolve("Resolve1");
}
};
// // column index of the phone number
// Cursor pCur = cr.query(ContactsContract.CommonDataKinds.Phone.CONTENT_URI,null,
// ContactsContract.CommonDataKinds.Phone.CONTACT_ID +" = ?",
// new String[]{id}, null);
// while (pCur.moveToNext()) {
// String phone = pCur.getString(
// pCur.getColumnIndex(ContactsContract.CommonDataKinds.Phone.NUMBER));
// txtTelefono.setText(phone); //print data
// }
// pCur.close();
// // column index of the email
// Cursor emailCur = cr.query(
// ContactsContract.CommonDataKinds.Email.CONTENT_URI,
// null,
// ContactsContract.CommonDataKinds.Email.CONTACT_ID + " = ?",
// new String[]{id}, null);
// while (emailCur.moveToNext()) {
// // This would allow you get several email addresses
// // if the email addresses were stored in an array
// String email = emailCur.getString(
// emailCur.getColumnIndex(ContactsContract.CommonDataKinds.Email.ADDRESS));
// txtMailContacto.setText(email); //print data
// }
// emailCur.close();
// } catch (Exception e) {
// e.printStackTrace();
// }
// }
@Override
public String getName() {
return "RNUnifiedContacts";
}
@Override
public Map<String, Object> getConstants() {
final Map<String, Object> constants = new HashMap<>();
// constants.put(DURATION_SHORT_KEY, Toast.LENGTH_SHORT);
// constants.put(DURATION_LONG_KEY, Toast.LENGTH_LONG);
return constants;
// @"phoneNumberLabel": @{
// @"HOME" : @"home",
// @"WORK" : @"work",
// @"MOBILE" : @"mobile",
// @"IPHONE" : @"iPhone",
// @"MAIN" : @"main",
// @"HOME_FAX" : @"home fax",
// @"WORK_FAX" : @"work fax",
// @"PAGER" : @"pager",
// @"OTHER" : @"other",
// },
// @"emailAddressLabel": @{
// @"HOME" : @"home",
// @"WORK" : @"work",
// @"ICLOUD" : @"iCloud",
// @"OTHER" : @"other",
// },
}
@ReactMethod
public void test1(String message, int duration) {
Toast.makeText(getReactApplicationContext(), message, duration).show();
}
@ReactMethod
public void test2(String message, int duration) {
Toast.makeText(getReactApplicationContext(), message, duration).show();
}
@ReactMethod
public void selectContact(Callback errorCallback, Callback successCallback) {
mErrorCallback = errorCallback;
mSuccessCallback = successCallback;
Intent intent = new Intent(Intent.ACTION_PICK);
intent.setType(ContactsContract.Contacts.CONTENT_TYPE);
Activity currentActivity = getCurrentActivity();
if (intent.resolveActivity(currentActivity.getPackageManager()) != null) {
currentActivity.startActivityForResult(intent, 1);
}
}
@ReactMethod
public void getMessages() {
Activity currentActivity = getCurrentActivity();
if (ContextCompat.checkSelfPermission(currentActivity.getBaseContext(), "android.permission.READ_SMS") != PackageManager.PERMISSION_GRANTED) {
Log.w("Test3", "request permission");
final int REQUEST_CODE_ASK_PERMISSIONS = 123;
ActivityCompat.requestPermissions(currentActivity, new String[]{"android.permission.READ_SMS"}, REQUEST_CODE_ASK_PERMISSIONS);
}
// ActivityCompat.requestPermissions(currentActivity,
// new String[]{Manifest.permission.READ_SMS},
// 1);
Cursor cursor = currentActivity.getContentResolver().query(Uri.parse("content://sms/sent"), null, null, null, null);
if (cursor.moveToFirst()) { // must check the result to prevent exception
do {
String msgData = "";
for (int idx = 0; idx < cursor.getColumnCount(); idx++) {
msgData += " " + cursor.getColumnName(idx) + ":" + cursor.getString(idx);
}
Log.w("Test1", msgData);
} while (cursor.moveToNext());
} else {
Log.w("Test2", "Messages empty.");
// empty box, no SMS
}
}
}
|
android/src/main/java/com/joshuapinter/RNUnifiedContactsModule.java
|
package com.joshuapinter;
import android.app.Activity;
import android.content.ContentResolver;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.net.Uri;
import android.provider.ContactsContract;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import android.widget.Toast;
import com.facebook.react.bridge.ActivityEventListener;
import com.facebook.react.bridge.BaseActivityEventListener;
import com.facebook.react.bridge.Callback;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import java.util.HashMap;
import java.util.Map;
public class RNUnifiedContactsModule extends ReactContextBaseJavaModule {
private Promise mSelectContactPromise;
private Callback mSuccessCallback;
private Callback mErrorCallback;
public RNUnifiedContactsModule(ReactApplicationContext reactContext) {
super(reactContext);
reactContext.addActivityEventListener(mActivityEventListener);
}
private final ActivityEventListener mActivityEventListener = new BaseActivityEventListener() {
@Override
public void onActivityResult(Activity activity, int requestCode, int resultCode, Intent data) {
// Get the URI and query the content provider for the phone number
Uri contactUri = data.getData();
ContentResolver contentResolver = activity.getContentResolver();
Cursor contactCursor = contentResolver.query(contactUri, null, null, null, null);
contactCursor.moveToFirst();
int columnId = contactCursor.getColumnIndex(ContactsContract.Contacts._ID);
String contactId = contactCursor.getString(columnId);
int columnDisplayName = contactCursor.getColumnIndex(ContactsContract.Contacts.DISPLAY_NAME);
String contactDisplayName = contactCursor.getString(columnDisplayName);
Log.w("Test1", contactDisplayName);
mSuccessCallback.invoke(contactDisplayName);
// String[] projection = new String[]{ContactsContract.CommonDataKinds.StructuredName.GIVEN_NAME};
// Cursor cursor = activity.getContentResolver().query(contactUri, projection,
// null, null, null);
// // If the cursor returned is valid, get the phone number
// if (cursor != null && cursor.moveToFirst()) {
// int nameIndex = cursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredName.GIVEN_NAME);
// String name = cursor.getString(nameIndex);
//
// Log.w("Test1", "name:");
// Log.w("Test1", name);
// // Do something with the phone number
// mSuccessCallback.invoke(name);
// }
//
// Log.w("Test1", "onActivityResult after");
// mSelectContactPromise.resolve("Resolve1");
}
};
// // column index of the phone number
// Cursor pCur = cr.query(ContactsContract.CommonDataKinds.Phone.CONTENT_URI,null,
// ContactsContract.CommonDataKinds.Phone.CONTACT_ID +" = ?",
// new String[]{id}, null);
// while (pCur.moveToNext()) {
// String phone = pCur.getString(
// pCur.getColumnIndex(ContactsContract.CommonDataKinds.Phone.NUMBER));
// txtTelefono.setText(phone); //print data
// }
// pCur.close();
// // column index of the email
// Cursor emailCur = cr.query(
// ContactsContract.CommonDataKinds.Email.CONTENT_URI,
// null,
// ContactsContract.CommonDataKinds.Email.CONTACT_ID + " = ?",
// new String[]{id}, null);
// while (emailCur.moveToNext()) {
// // This would allow you get several email addresses
// // if the email addresses were stored in an array
// String email = emailCur.getString(
// emailCur.getColumnIndex(ContactsContract.CommonDataKinds.Email.ADDRESS));
// txtMailContacto.setText(email); //print data
// }
// emailCur.close();
// } catch (Exception e) {
// e.printStackTrace();
// }
// }
@Override
public String getName() {
return "RNUnifiedContacts";
}
@Override
public Map<String, Object> getConstants() {
final Map<String, Object> constants = new HashMap<>();
// constants.put(DURATION_SHORT_KEY, Toast.LENGTH_SHORT);
// constants.put(DURATION_LONG_KEY, Toast.LENGTH_LONG);
return constants;
// @"phoneNumberLabel": @{
// @"HOME" : @"home",
// @"WORK" : @"work",
// @"MOBILE" : @"mobile",
// @"IPHONE" : @"iPhone",
// @"MAIN" : @"main",
// @"HOME_FAX" : @"home fax",
// @"WORK_FAX" : @"work fax",
// @"PAGER" : @"pager",
// @"OTHER" : @"other",
// },
// @"emailAddressLabel": @{
// @"HOME" : @"home",
// @"WORK" : @"work",
// @"ICLOUD" : @"iCloud",
// @"OTHER" : @"other",
// },
}
@ReactMethod
public void test1(String message, int duration) {
Toast.makeText(getReactApplicationContext(), message, duration).show();
}
@ReactMethod
public void test2(String message, int duration) {
Toast.makeText(getReactApplicationContext(), message, duration).show();
}
@ReactMethod
public void selectContact(Callback errorCallback, Callback successCallback) {
mErrorCallback = errorCallback;
mSuccessCallback = successCallback;
Intent intent = new Intent(Intent.ACTION_PICK);
intent.setType(ContactsContract.Contacts.CONTENT_TYPE);
Activity currentActivity = getCurrentActivity();
if (intent.resolveActivity(currentActivity.getPackageManager()) != null) {
currentActivity.startActivityForResult(intent, 1);
}
}
@ReactMethod
public void getMessages() {
Activity currentActivity = getCurrentActivity();
if (ContextCompat.checkSelfPermission(currentActivity.getBaseContext(), "android.permission.READ_SMS") != PackageManager.PERMISSION_GRANTED) {
Log.w("Test3", "request permission");
final int REQUEST_CODE_ASK_PERMISSIONS = 123;
ActivityCompat.requestPermissions(currentActivity, new String[]{"android.permission.READ_SMS"}, REQUEST_CODE_ASK_PERMISSIONS);
}
// ActivityCompat.requestPermissions(currentActivity,
// new String[]{Manifest.permission.READ_SMS},
// 1);
Cursor cursor = currentActivity.getContentResolver().query(Uri.parse("content://sms/sent"), null, null, null, null);
if (cursor.moveToFirst()) { // must check the result to prevent exception
do {
String msgData = "";
for (int idx = 0; idx < cursor.getColumnCount(); idx++) {
msgData += " " + cursor.getColumnName(idx) + ":" + cursor.getString(idx);
}
Log.w("Test1", msgData);
} while (cursor.moveToNext());
} else {
Log.w("Test2", "Messages empty.");
// empty box, no SMS
}
}
}
|
Removed public from Unified Contacts module.
|
android/src/main/java/com/joshuapinter/RNUnifiedContactsModule.java
|
Removed public from Unified Contacts module.
|
|
Java
|
mit
|
50cc7de7f99c1d22dc449ff00550fc3ad1a96bb3
| 0
|
bhandfast/NightWatch,mgranberry/NightWatch
|
package com.dexdrip.stephenblack.nightwatch;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Color;
import android.graphics.Paint;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.widget.DrawerLayout;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.widget.TextView;
import java.text.DecimalFormat;
import java.util.Date;
import lecho.lib.hellocharts.ViewportChangeListener;
import lecho.lib.hellocharts.gesture.ZoomType;
import lecho.lib.hellocharts.model.Viewport;
import lecho.lib.hellocharts.view.LineChartView;
import lecho.lib.hellocharts.view.PreviewLineChartView;
public class Home extends Activity {
private String menu_name = "DexDrip";
private LineChartView chart;
private PreviewLineChartView previewChart;
Viewport tempViewport = new Viewport();
Viewport holdViewport = new Viewport();
public float left;
public float right;
public float top;
public float bottom;
public boolean updateStuff;
public boolean updatingPreviewViewport = false;
public boolean updatingChartViewport = false;
public BgGraphBuilder bgGraphBuilder;
BroadcastReceiver _broadcastReceiver;
@Override
protected void onCreate(Bundle savedInstanceState) {
startService(new Intent(getApplicationContext(), DataCollectionService.class));
super.onCreate(savedInstanceState);
PreferenceManager.setDefaultValues(this, R.xml.pref_general, false);
PreferenceManager.setDefaultValues(this, R.xml.pref_bg_notification, false);
setContentView(R.layout.activity_home);
}
@Override
protected void onResume(){
super.onResume();
bgGraphBuilder = new BgGraphBuilder(this);
displayCurrentInfo();
_broadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context ctx, Intent intent) {
if (intent.getAction().compareTo(Intent.ACTION_TIME_TICK) == 0) {
displayCurrentInfo();
}
}
};
registerReceiver(_broadcastReceiver, new IntentFilter(Intent.ACTION_TIME_TICK));
holdViewport.set(0, 0, 0, 0);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu_home, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle item selection
switch (item.getItemId()) {
case R.id.action_settings:
startActivity(new Intent(getApplicationContext(), SettingsActivity.class));
return true;
default:
return true;
}
}
public void setupCharts() {
updateStuff = false;
chart = (LineChartView) findViewById(R.id.chart);
chart.setZoomType(ZoomType.HORIZONTAL);
previewChart = (PreviewLineChartView) findViewById(R.id.chart_preview);
previewChart.setZoomType(ZoomType.HORIZONTAL);
chart.setLineChartData(bgGraphBuilder.lineData());
previewChart.setLineChartData(bgGraphBuilder.previewLineData());
updateStuff = true;
previewChart.setViewportCalculationEnabled(true);
chart.setViewportCalculationEnabled(true);
previewChart.setViewportChangeListener(new ViewportListener());
chart.setViewportChangeListener(new ChartViewPortListener());
setViewport();
}
private class ChartViewPortListener implements ViewportChangeListener {
@Override
public void onViewportChanged(Viewport newViewport) {
if (!updatingPreviewViewport) {
updatingChartViewport = true;
previewChart.setZoomType(ZoomType.HORIZONTAL);
previewChart.setCurrentViewport(newViewport, false);
updatingChartViewport = false;
}
}
}
private class ViewportListener implements ViewportChangeListener {
@Override
public void onViewportChanged(Viewport newViewport) {
if (!updatingChartViewport) {
updatingPreviewViewport = true;
chart.setZoomType(ZoomType.HORIZONTAL);
chart.setCurrentViewport(newViewport, false);
tempViewport = newViewport;
updatingPreviewViewport = false;
}
if (updateStuff == true) {
holdViewport.set(newViewport.left, newViewport.top, newViewport.right, newViewport.bottom);
}
}
}
public void setViewport() {
if (tempViewport.left == 0.0 || holdViewport.left == 0.0 || holdViewport.right >= (new Date().getTime())) {
previewChart.setCurrentViewport(bgGraphBuilder.advanceViewport(chart, previewChart), false);
} else {
previewChart.setCurrentViewport(holdViewport, false);
}
}
@Override
public void onPause() {
super.onPause();
if (_broadcastReceiver != null)
unregisterReceiver(_broadcastReceiver);
}
public void displayCurrentInfo() {
final TextView currentBgValueText = (TextView)findViewById(R.id.currentBgValueRealTime);
final TextView notificationText = (TextView)findViewById(R.id.notices);
if ((currentBgValueText.getPaintFlags() & Paint.STRIKE_THRU_TEXT_FLAG) > 0) {
currentBgValueText.setPaintFlags(currentBgValueText.getPaintFlags() & (~Paint.STRIKE_THRU_TEXT_FLAG));
}
Bg lastBgreading = Bg.last();
if (lastBgreading != null) {
notificationText.setText(lastBgreading.readingAge());
currentBgValueText.setText(bgGraphBuilder.unitized_string(lastBgreading.sgv_double()) + " " + lastBgreading.slopeArrow());
if ((new Date().getTime()) - (60000 * 16) - lastBgreading.datetime > 0) {
notificationText.setTextColor(Color.parseColor("#C30909"));
currentBgValueText.setPaintFlags(currentBgValueText.getPaintFlags() | Paint.STRIKE_THRU_TEXT_FLAG);
} else {
notificationText.setTextColor(Color.WHITE);
}
double estimate = lastBgreading.sgv_double();
if(bgGraphBuilder.unitized(estimate) <= bgGraphBuilder.lowMark) {
currentBgValueText.setTextColor(Color.parseColor("#C30909"));
} else if(bgGraphBuilder.unitized(estimate) >= bgGraphBuilder.highMark) {
currentBgValueText.setTextColor(Color.parseColor("#FFBB33"));
} else {
currentBgValueText.setTextColor(Color.WHITE);
}
}
setupCharts();
}
}
|
mobile/src/main/java/com/dexdrip/stephenblack/nightwatch/Home.java
|
package com.dexdrip.stephenblack.nightwatch;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Color;
import android.graphics.Paint;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.widget.DrawerLayout;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.widget.TextView;
import java.text.DecimalFormat;
import java.util.Date;
import lecho.lib.hellocharts.ViewportChangeListener;
import lecho.lib.hellocharts.gesture.ZoomType;
import lecho.lib.hellocharts.model.Viewport;
import lecho.lib.hellocharts.view.LineChartView;
import lecho.lib.hellocharts.view.PreviewLineChartView;
public class Home extends Activity {
private String menu_name = "DexDrip";
private LineChartView chart;
private PreviewLineChartView previewChart;
Viewport tempViewport = new Viewport();
Viewport holdViewport = new Viewport();
public float left;
public float right;
public float top;
public float bottom;
public boolean updateStuff;
public boolean updatingPreviewViewport = false;
public boolean updatingChartViewport = false;
public BgGraphBuilder bgGraphBuilder;
BroadcastReceiver _broadcastReceiver;
@Override
protected void onCreate(Bundle savedInstanceState) {
startService(new Intent(getApplicationContext(), DataCollectionService.class));
super.onCreate(savedInstanceState);
PreferenceManager.setDefaultValues(this, R.xml.pref_general, false);
PreferenceManager.setDefaultValues(this, R.xml.pref_bg_notification, false);
setContentView(R.layout.activity_home);
}
@Override
protected void onResume(){
super.onResume();
_broadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context ctx, Intent intent) {
if (intent.getAction().compareTo(Intent.ACTION_TIME_TICK) == 0) {
updateCurrentBgInfo();
}
}
};
registerReceiver(_broadcastReceiver, new IntentFilter(Intent.ACTION_TIME_TICK));
holdViewport.set(0, 0, 0, 0);
setupCharts();
updateCurrentBgInfo();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu_home, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle item selection
switch (item.getItemId()) {
case R.id.action_settings:
startActivity(new Intent(getApplicationContext(), SettingsActivity.class));
return true;
default:
return true;
}
}
public void setupCharts() {
bgGraphBuilder = new BgGraphBuilder(this);
updateStuff = false;
chart = (LineChartView) findViewById(R.id.chart);
chart.setZoomType(ZoomType.HORIZONTAL);
previewChart = (PreviewLineChartView) findViewById(R.id.chart_preview);
previewChart.setZoomType(ZoomType.HORIZONTAL);
chart.setLineChartData(bgGraphBuilder.lineData());
previewChart.setLineChartData(bgGraphBuilder.previewLineData());
updateStuff = true;
previewChart.setViewportCalculationEnabled(true);
chart.setViewportCalculationEnabled(true);
previewChart.setViewportChangeListener(new ViewportListener());
chart.setViewportChangeListener(new ChartViewPortListener());
setViewport();
}
private class ChartViewPortListener implements ViewportChangeListener {
@Override
public void onViewportChanged(Viewport newViewport) {
if (!updatingPreviewViewport) {
updatingChartViewport = true;
previewChart.setZoomType(ZoomType.HORIZONTAL);
previewChart.setCurrentViewport(newViewport, false);
updatingChartViewport = false;
}
}
}
private class ViewportListener implements ViewportChangeListener {
@Override
public void onViewportChanged(Viewport newViewport) {
if (!updatingChartViewport) {
updatingPreviewViewport = true;
chart.setZoomType(ZoomType.HORIZONTAL);
chart.setCurrentViewport(newViewport, false);
tempViewport = newViewport;
updatingPreviewViewport = false;
}
if (updateStuff == true) {
holdViewport.set(newViewport.left, newViewport.top, newViewport.right, newViewport.bottom);
}
}
}
public void setViewport() {
if (tempViewport.left == 0.0 || holdViewport.left == 0.0 || holdViewport.right >= (new Date().getTime())) {
previewChart.setCurrentViewport(bgGraphBuilder.advanceViewport(chart, previewChart), false);
} else {
previewChart.setCurrentViewport(holdViewport, false);
}
}
@Override
public void onPause() {
super.onPause();
if (_broadcastReceiver != null)
unregisterReceiver(_broadcastReceiver);
}
public void updateCurrentBgInfo() {
final TextView currentBgValueText = (TextView) findViewById(R.id.currentBgValueRealTime);
final TextView notificationText = (TextView)findViewById(R.id.notices);
notificationText.setText("");
displayCurrentInfo();
}
public void displayCurrentInfo() {
DecimalFormat df = new DecimalFormat("#");
df.setMaximumFractionDigits(0);
final TextView currentBgValueText = (TextView)findViewById(R.id.currentBgValueRealTime);
final TextView notificationText = (TextView)findViewById(R.id.notices);
if ((currentBgValueText.getPaintFlags() & Paint.STRIKE_THRU_TEXT_FLAG) > 0) {
currentBgValueText.setPaintFlags(currentBgValueText.getPaintFlags() & (~Paint.STRIKE_THRU_TEXT_FLAG));
}
Bg lastBgreading = Bg.last();
if (lastBgreading != null) {
notificationText.setText(lastBgreading.readingAge());
currentBgValueText.setText(bgGraphBuilder.unitized_string(lastBgreading.sgv_double()) + " " + lastBgreading.slopeArrow());
if ((new Date().getTime()) - (60000 * 16) - lastBgreading.datetime > 0) {
notificationText.setTextColor(Color.parseColor("#C30909"));
currentBgValueText.setPaintFlags(currentBgValueText.getPaintFlags() | Paint.STRIKE_THRU_TEXT_FLAG);
} else {
notificationText.setTextColor(Color.WHITE);
}
double estimate = lastBgreading.sgv_double();
if(bgGraphBuilder.unitized(estimate) <= bgGraphBuilder.lowMark) {
currentBgValueText.setTextColor(Color.parseColor("#C30909"));
} else if(bgGraphBuilder.unitized(estimate) >= bgGraphBuilder.highMark) {
currentBgValueText.setTextColor(Color.parseColor("#FFBB33"));
} else {
currentBgValueText.setTextColor(Color.WHITE);
}
}
setupCharts();
}
}
|
small cleanup with some reprioritizing of element loading
|
mobile/src/main/java/com/dexdrip/stephenblack/nightwatch/Home.java
|
small cleanup with some reprioritizing of element loading
|
|
Java
|
mit
|
61ad29bdf1493a98bf19e8b2892401aa53493bf6
| 0
|
lidox/big-data-fun
|
package reactiontest.offline;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;
import org.codehaus.jettison.json.JSONArray;
/**
* Offline data analysis via Apache Flink. The data is comming from
* www.humanbenchmark.com/tests/reactiontime
*/
public class BatchFunctions {
private ExecutionEnvironment env = null;
public BatchFunctions() {
env = ExecutionEnvironment.getExecutionEnvironment();
}
public DataSet<Tuple2<Double, Integer>> loadDataSetOfAllTime(){
DataSet<String> inputData = env.readTextFile(getFilePath("human-benchmark-alltime.json"));
return inputData.flatMap(new Tokenizer());
}
public DataSet<Tuple2<Double, Integer>> loadDataSetOfOctober2016(){
DataSet<String> inputData = env.readTextFile(getFilePath("human-benchmark-october.json"));
return inputData.flatMap(new Tokenizer());
}
public void printInputData(DataSet<Tuple2<Double, Integer>> data){
if(data != null)
try {
data.print();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Get reaction test count by sum function
* @return the count of all reaction tests
*/
public int getReactionTestCountBySum(DataSet<Tuple2<Double, Integer>> data){
int count = 0;
try {
DataSet<Tuple2<Double, Integer>> sumData = data.sum(1);
// I use the collected data for streams later, so i need to collect instead
// of printing it directly
count = sumData.collect().get(0).f1;
} catch (Exception e) {
e.printStackTrace();
}
return count;
}
/**
* Get reaction test count by reduce function
* @return the count of all reaction tests
*/
public int getReactionTestCountByReduce(DataSet<Tuple2<Double, Integer>> data){
int count = 0;
try {
DataSet<Tuple2<Double, Integer>> sumData = data.reduce(new ReduceFunction<Tuple2<Double,Integer>>() {
private static final long serialVersionUID = -5937101140633725165L;
@Override
public Tuple2<Double, Integer> reduce(Tuple2<Double, Integer> recent,Tuple2<Double, Integer> current) {
Tuple2<Double, Integer> ret = new Tuple2<Double, Integer>(0.,0);
try {
ret = new Tuple2<Double, Integer>(current.f0, recent.f1 + current.f1);
} catch (Exception e) {
e.printStackTrace();
}
return ret;
}
});
// I use the collected data for streams later, so i need to collect instead
// of printing it directly. And in this case, i know, that the DataSet contains only a single tuple.
// Thus I won't have problems calculating over big data
count = sumData.collect().get(0).f1;
} catch (Exception e) {
e.printStackTrace();
}
return count;
}
/**
* Get the file path from resource folder by filename
* @param fileName the name of the file
* @return the absolute file path
*/
public String getFilePath(String fileName) {
ClassLoader classLoader = getClass().getClassLoader();
File file = new File(classLoader.getResource(fileName).getFile());
String fileToRead = file.getAbsolutePath();
return fileToRead;
}
public static class Tokenizer implements FlatMapFunction<String, Tuple2<Double, Integer>>{
private static final long serialVersionUID = 8139075182383902551L;
@Override
public void flatMap(String value, Collector<Tuple2<Double, Integer>> out) {
List<Tuple2<Double, Integer>> jsonList = getListByJSON(value);
for(Tuple2<Double, Integer> item: jsonList){
out.collect(item);
}
}
/**
* Converts a JSON string to Tuple. E.g. request:
* { "medicalid":"Markus", "operationissue":"blablub", "age":54, "gender":"Male", "datetime":"2016-11-03 20:59:28.807", "type":"PreOperation", "times":[412,399,324] }
* @param jsonString
* @return
*/
public static List<Tuple2<Double, Integer>> getListByJSON(String jsonString){
List<Tuple2<Double, Integer>> retList = new ArrayList<>();
try {
JSONArray transactionJSON = new JSONArray(jsonString);
for(int i = 0 ; i< transactionJSON.length(); i++){
JSONArray a = (JSONArray) transactionJSON.get(i);
Double reactionTime = Double.parseDouble(a.get(0).toString());
Integer userCount = Integer.parseInt(a.get(1).toString());
retList.add(new Tuple2<Double, Integer>(reactionTime, userCount));
}
} catch (Exception e) {
e.printStackTrace();
}
return retList;
}
}
/**
* Get the reaction time with the minimal user count using the reduce function
* @return the reaction time tuple with the minimal user count
*/
public Tuple2<Double, Integer> getReactionTimeByMinUserCount(DataSet<Tuple2<Double, Integer>> data) {
Tuple2<Double, Integer> ret = new Tuple2<Double, Integer>();
try {
DataSet<Tuple2<Double, Integer>> minData = data.reduce(new ReduceFunction<Tuple2<Double,Integer>>() {
private static final long serialVersionUID = 1439796543225086584L;
@Override
public Tuple2<Double, Integer> reduce(Tuple2<Double, Integer> recent, Tuple2<Double, Integer> current) {
int recentUserCount = recent.f1;
int currentUserCount = current.f1;
if(recentUserCount < currentUserCount){
return recent;
}
else{
return current;
}
}
});
// I use the collected data for streams later, so i need to collect instead
// of printing it directly. And in this case, i know, that the DataSet contains only a single tuple.
// Thus I won't have problems calculating over big data
ret = minData.collect().get(0);
} catch (Exception e) {
e.printStackTrace();
}
return ret;
}
/**
* Get the reaction time with the maximal user count using the reduce function
* @return the reaction time tuple with the maximal user count
*/
public Tuple2<Double, Integer> getReactionTimeByMaxUserCount(DataSet<Tuple2<Double, Integer>> data) {
Tuple2<Double, Integer> ret = new Tuple2<Double, Integer>();
try {
DataSet<Tuple2<Double, Integer>> maxData = data.reduce(new ReduceFunction<Tuple2<Double,Integer>>() {
private static final long serialVersionUID = 1439796543225086584L;
@Override
public Tuple2<Double, Integer> reduce(Tuple2<Double, Integer> recent, Tuple2<Double, Integer> current) {
int recentUserCount = recent.f1;
int currentUserCount = current.f1;
if(recentUserCount > currentUserCount){
return recent;
}
else{
return current;
}
}
});
// I use the collected data for streams later, so i need to collect instead
// of printing it directly. And in this case, i know, that the DataSet contains only a single tuple.
// Thus I won't have problems calculating over big data
ret = maxData.collect().get(0);
} catch (Exception e) {
e.printStackTrace();
}
return ret;
}
/**
* Get the max user count by build in aggregation functions
* @return a tuple of max reaction time and user count for a specific reaction test
*/
public Tuple2<Double, Integer> getMaxUserCountByAggregate(DataSet<Tuple2<Double, Integer>> data) {
Tuple2<Double, Integer> ret = null;
DataSet<Tuple2<Double, Integer>> output = data
.max(0)
.andMax(1);
try {
ret = output.collect().get(0);
} catch (Exception e) {
e.printStackTrace();
}
return ret;
}
/**
* computes the average reaction time of the users
* @return the average reaction time
* @throws Exception
*/
public double getAverageReaction(DataSet<Tuple2<Double, Integer>> data) throws Exception {
DataSet<Tuple2<Double, Integer>> output = data.flatMap(new Averagenizer());
output = output.sum(0).andSum(1);
Tuple2<Double, Integer> finalTuple = output.collect().get(0);
return finalTuple.f0 / finalTuple.f1;
}
/**
*
* Multiply reaction time with user count and return tuple
*
*/
public static class Averagenizer implements FlatMapFunction<Tuple2<Double, Integer>, Tuple2<Double, Integer>>{
private static final long serialVersionUID = 71246547222383551L;
@Override
public void flatMap(Tuple2<Double, Integer> value, Collector<Tuple2<Double, Integer>> out) {
double reactionTime = value.f0;
int userCount = value.f1;
double reactionTimeSum = reactionTime * userCount;
out.collect(new Tuple2<Double, Integer>(reactionTimeSum, userCount));
}
}
/**
*
* Get every reaction time with user count and return tuple
*
*/
public static class Medianizer implements FlatMapFunction<Tuple2<Double, Integer>, Tuple2<Double, Integer>>{
private static final long serialVersionUID = 39387247222383551L;
@Override
public void flatMap(Tuple2<Double, Integer> value, Collector<Tuple2<Double, Integer>> out) {
int userCount = value.f1;
double reactionTime = value.f0;
for(int i = 0; i < userCount; i++){
out.collect(new Tuple2<Double, Integer>(reactionTime, 1));
}
}
}
public double getMedianReactionTime(DataSet<Tuple2<Double, Integer>> data) throws Exception {
DataSet<Tuple2<Double, Integer>> allReactionTests = data.flatMap(new Medianizer());
List<Tuple2<Double, Integer>> reactionTimeList = allReactionTests.collect();
double median = getMedianByList(reactionTimeList);
return median;
}
/**
* Get the median by given list
* @param reactionTimeList the list containing reaction times
* @return the median of the reaction times
*/
private double getMedianByList(List<Tuple2<Double, Integer>> reactionTimeList) {
double median = 0;
int itemCount = reactionTimeList.size();
if (itemCount % 2 == 0)
median = ((double) reactionTimeList.get(itemCount/2).f0 + (double) reactionTimeList.get(itemCount /2 - 1).f0)/2;
else
median = (double) reactionTimeList.get(itemCount/2).f0;
return median;
}
}
|
com.artursworld/src/main/java/reactiontest/offline/BatchFunctions.java
|
package reactiontest.offline;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;
import org.codehaus.jettison.json.JSONArray;
/**
* Offline data analysis via Apache Flink. The data is comming from
* www.humanbenchmark.com/tests/reactiontime
*/
public class BatchFunctions {
private ExecutionEnvironment env = null;
public BatchFunctions() {
env = ExecutionEnvironment.getExecutionEnvironment();
}
public DataSet<Tuple2<Double, Integer>> loadDataSetOfAllTime(){
DataSet<String> inputData = env.readTextFile(getFilePath("human-benchmark-alltime.json"));
return inputData.flatMap(new Tokenizer());
}
public DataSet<Tuple2<Double, Integer>> loadDataSetOfOctober2016(){
DataSet<String> inputData = env.readTextFile(getFilePath("human-benchmark-october.json"));
return inputData.flatMap(new Tokenizer());
}
public void printInputData(DataSet<Tuple2<Double, Integer>> data){
if(data != null)
try {
data.print();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Get reaction test count by sum function
* @return the count of all reaction tests
*/
public int getReactionTestCountBySum(DataSet<Tuple2<Double, Integer>> data){
int count = 0;
try {
DataSet<Tuple2<Double, Integer>> sumData = data.sum(1);
count = sumData.collect().get(0).f1;
} catch (Exception e) {
e.printStackTrace();
}
return count;
}
/**
* Get reaction test count by reduce function
* @return the count of all reaction tests
*/
public int getReactionTestCountByReduce(DataSet<Tuple2<Double, Integer>> data){
int count = 0;
try {
DataSet<Tuple2<Double, Integer>> sumData = data.reduce(new ReduceFunction<Tuple2<Double,Integer>>() {
private static final long serialVersionUID = -5937101140633725165L;
@Override
public Tuple2<Double, Integer> reduce(Tuple2<Double, Integer> recent,Tuple2<Double, Integer> current) {
Tuple2<Double, Integer> ret = new Tuple2<Double, Integer>(0.,0);
try {
ret = new Tuple2<Double, Integer>(current.f0, recent.f1 + current.f1);
} catch (Exception e) {
e.printStackTrace();
}
return ret;
}
});
count = sumData.collect().get(0).f1;
} catch (Exception e) {
e.printStackTrace();
}
return count;
}
/**
* Get the file path from resource folder by filename
* @param fileName the name of the file
* @return the absolute file path
*/
public String getFilePath(String fileName) {
ClassLoader classLoader = getClass().getClassLoader();
File file = new File(classLoader.getResource(fileName).getFile());
String fileToRead = file.getAbsolutePath();
return fileToRead;
}
public static class Tokenizer implements FlatMapFunction<String, Tuple2<Double, Integer>>{
private static final long serialVersionUID = 8139075182383902551L;
@Override
public void flatMap(String value, Collector<Tuple2<Double, Integer>> out) {
List<Tuple2<Double, Integer>> jsonList = getListByJSON(value);
for(Tuple2<Double, Integer> item: jsonList){
out.collect(item);
}
}
/**
* Converts a JSON string to Tuple. E.g. request:
* { "medicalid":"Markus", "operationissue":"blablub", "age":54, "gender":"Male", "datetime":"2016-11-03 20:59:28.807", "type":"PreOperation", "times":[412,399,324] }
* @param jsonString
* @return
*/
public static List<Tuple2<Double, Integer>> getListByJSON(String jsonString){
List<Tuple2<Double, Integer>> retList = new ArrayList<>();
try {
JSONArray transactionJSON = new JSONArray(jsonString);
for(int i = 0 ; i< transactionJSON.length(); i++){
JSONArray a = (JSONArray) transactionJSON.get(i);
Double reactionTime = Double.parseDouble(a.get(0).toString());
Integer userCount = Integer.parseInt(a.get(1).toString());
retList.add(new Tuple2<Double, Integer>(reactionTime, userCount));
}
} catch (Exception e) {
e.printStackTrace();
}
return retList;
}
}
/**
* Get the reaction time with the minimal user count using the reduce function
* @return the reaction time tuple with the minimal user count
*/
public Tuple2<Double, Integer> getReactionTimeByMinUserCount(DataSet<Tuple2<Double, Integer>> data) {
Tuple2<Double, Integer> ret = new Tuple2<Double, Integer>();
try {
DataSet<Tuple2<Double, Integer>> minData = data.reduce(new ReduceFunction<Tuple2<Double,Integer>>() {
private static final long serialVersionUID = 1439796543225086584L;
@Override
public Tuple2<Double, Integer> reduce(Tuple2<Double, Integer> recent, Tuple2<Double, Integer> current) {
int recentUserCount = recent.f1;
int currentUserCount = current.f1;
if(recentUserCount < currentUserCount){
return recent;
}
else{
return current;
}
}
});
ret = minData.collect().get(0);
} catch (Exception e) {
e.printStackTrace();
}
return ret;
}
/**
* Get the reaction time with the maximal user count using the reduce function
* @return the reaction time tuple with the maximal user count
*/
public Tuple2<Double, Integer> getReactionTimeByMaxUserCount(DataSet<Tuple2<Double, Integer>> data) {
Tuple2<Double, Integer> ret = new Tuple2<Double, Integer>();
try {
DataSet<Tuple2<Double, Integer>> maxData = data.reduce(new ReduceFunction<Tuple2<Double,Integer>>() {
private static final long serialVersionUID = 1439796543225086584L;
@Override
public Tuple2<Double, Integer> reduce(Tuple2<Double, Integer> recent, Tuple2<Double, Integer> current) {
int recentUserCount = recent.f1;
int currentUserCount = current.f1;
if(recentUserCount > currentUserCount){
return recent;
}
else{
return current;
}
}
});
ret = maxData.collect().get(0);
} catch (Exception e) {
e.printStackTrace();
}
return ret;
}
/**
* Get the max user count by build in aggregation functions
* @return a tuple of max reaction time and user count for a specific reaction test
*/
public Tuple2<Double, Integer> getMaxUserCountByAggregate(DataSet<Tuple2<Double, Integer>> data) {
Tuple2<Double, Integer> ret = null;
DataSet<Tuple2<Double, Integer>> output = data
.max(0)
.andMax(1);
try {
ret = output.collect().get(0);
} catch (Exception e) {
e.printStackTrace();
}
return ret;
}
/**
* computes the average reaction time of the users
* @return the average reaction time
* @throws Exception
*/
public double getAverageReaction(DataSet<Tuple2<Double, Integer>> data) throws Exception {
DataSet<Tuple2<Double, Integer>> output = data.flatMap(new Averagenizer());
output = output.sum(0).andSum(1);
Tuple2<Double, Integer> finalTuple = output.collect().get(0);
return finalTuple.f0 / finalTuple.f1;
}
/**
*
* Multiply reaction time with user count and return tuple
*
*/
public static class Averagenizer implements FlatMapFunction<Tuple2<Double, Integer>, Tuple2<Double, Integer>>{
private static final long serialVersionUID = 71246547222383551L;
@Override
public void flatMap(Tuple2<Double, Integer> value, Collector<Tuple2<Double, Integer>> out) {
double reactionTime = value.f0;
int userCount = value.f1;
double reactionTimeSum = reactionTime * userCount;
out.collect(new Tuple2<Double, Integer>(reactionTimeSum, userCount));
}
}
/**
*
* Get every reaction time with user count and return tuple
*
*/
public static class Medianizer implements FlatMapFunction<Tuple2<Double, Integer>, Tuple2<Double, Integer>>{
private static final long serialVersionUID = 39387247222383551L;
@Override
public void flatMap(Tuple2<Double, Integer> value, Collector<Tuple2<Double, Integer>> out) {
int userCount = value.f1;
double reactionTime = value.f0;
for(int i = 0; i < userCount; i++){
out.collect(new Tuple2<Double, Integer>(reactionTime, 1));
}
}
}
/*
public static class Counter implements FlatMapFunction<Tuple2<Double, Integer>, Tuple2<Double, Integer>>{
private static final long serialVersionUID = 956347222383551L;
@Override
public void flatMap(Tuple2<Double, Integer> value, Collector<Tuple2<Double, Integer>> out) {
int globalCounter = 1;
int userCount = value.f1;
double reactionTime = value.f0;
for(int i = 0; i < userCount; i++){
out.collect(new Tuple2<Double, Integer>(reactionTime, globalCounter));
globalCounter++;
}
}
}
*/
public double getMedianReactionTime(DataSet<Tuple2<Double, Integer>> data) throws Exception {
DataSet<Tuple2<Double, Integer>> allReactionTests = data.flatMap(new Medianizer());
List<Tuple2<Double, Integer>> reactionTimeList = allReactionTests.collect();
double median = getMedianByList(reactionTimeList);
return median;
}
/**
* Get the median by given list
* @param reactionTimeList the list containing reaction times
* @return the median of the reaction times
*/
private double getMedianByList(List<Tuple2<Double, Integer>> reactionTimeList) {
double median = 0;
int itemCount = reactionTimeList.size();
if (itemCount % 2 == 0)
median = ((double) reactionTimeList.get(itemCount/2).f0 + (double) reactionTimeList.get(itemCount /2 - 1).f0)/2;
else
median = (double) reactionTimeList.get(itemCount/2).f0;
return median;
}
}
|
fix #11 by comments
|
com.artursworld/src/main/java/reactiontest/offline/BatchFunctions.java
|
fix #11 by comments
|
|
Java
|
epl-1.0
|
5e2876b57a56eb02f737b580fd3eccd655d9d1f6
| 0
|
Abnormal-Jester/BoardGameSet
|
package tictactoe;
import game.CharBoard;
import game.Coordinate;
import game.PieceType;
/**
* This class creates a 2d array of squares and keeps track of the piece
* locations, win conditions, and text display.
*/
public class TicTacToeBoard extends CharBoard {
/**
* This constructor initializes the board of a Tic-Tac-Toe game.
*/
public TicTacToeBoard() {
super(3);
}
// TODO make this copy constructor more robust.
/**
* This constructor is a copy constructor, but it fails if the CharBoard
* that the constructor accepts does not have the same size as a tic tac toe
* board.
*
* @param board
*/
public TicTacToeBoard(CharBoard board) {
super(3);
if (board.getBoard().length == 3 && board.getBoard()[0].length == 3) {
for (int j = 0; j < getBoard().length; j++) {
for (int i = 0; i < getBoard()[j].length; i++) {
getBoard()[j][i] = board.getBoard()[j][i];
}
}
}
}
/**
* This method verifies that a piece can be placed on this square. For
* TicTacToe, this only involves checking if the coordinate is empty.
*
* @param piece
* the piece that will be checked
* @param c
* the coordinates the piece will be checked on
* @return if the piece can be placed on the coordinate
*/
@Override
public boolean canPlace(PieceType piece, Coordinate c) {
return isEmpty(c);
}
/**
* This method checks if there are 3 pieces in a row and returns an exit
* character if there are 3 pieces in a row. Otherwise, it checks if there
* is an empty square. If there is an empty square, continue the game.
* Otherwise, end the game as a tie.
*/
@Override
public char gameEnd() {
char[][] tempBoard = getBoard();
// check rows
for (int i = 0; i < tempBoard.length; i++)
if (tempBoard[i][0] == tempBoard[i][1] && tempBoard[i][1] == tempBoard[i][2])
return tempBoard[i][0];
// check columns
for (int j = 0; j < tempBoard[0].length; j++)
if (tempBoard[0][j] == tempBoard[1][j] && tempBoard[1][j] == tempBoard[2][j])
return tempBoard[0][j];
// check diagonals
if (tempBoard[0][0] == tempBoard[1][1] && tempBoard[1][1] == tempBoard[2][2])
return tempBoard[1][1];
if (tempBoard[2][0] == tempBoard[1][1] && tempBoard[1][1] == tempBoard[0][2])
return tempBoard[1][1];
for (int i = 0; i < tempBoard.length; i++)
for (int j = 0; j < tempBoard[i].length; j++)
if (tempBoard[i][j] == ' ')
return ' ';
return '_';
}
}
|
BoardGame/src/tictactoe/TicTacToeBoard.java
|
package tictactoe;
import game.CharBoard;
import game.Coordinate;
import game.PieceType;
/**
* This class creates a 2d array of squares and keeps track of the piece
* locations, win conditions, and text display.
*/
public class TicTacToeBoard extends CharBoard {
/**
* This constructor initializes the board of a Tic-Tac-Toe game.
*/
public TicTacToeBoard() {
super(3);
}
/**
* This method verifies that a piece can be placed on this square. For
* TicTacToe, this only involves checking if the coordinate is empty.
*
* @param piece
* the piece that will be checked
* @param c
* the coordinates the piece will be checked on
* @return if the piece can be placed on the coordinate
*/
@Override
public boolean canPlace(PieceType piece, Coordinate c) {
return isEmpty(c);
}
/**
* This method checks if there are 3 pieces in a row and returns an exit
* character if there are 3 pieces in a row. Otherwise, it checks if there
* is an empty square. If there is an empty square, continue the game.
* Otherwise, end the game as a tie.
*/
@Override
public char gameEnd() {
char[][] tempBoard = getBoard();
// check rows
for (int i = 0; i < tempBoard.length; i++)
if (tempBoard[i][0] == tempBoard[i][1] && tempBoard[i][1] == tempBoard[i][2])
return tempBoard[i][0];
// check columns
for (int j = 0; j < tempBoard[0].length; j++)
if (tempBoard[0][j] == tempBoard[1][j] && tempBoard[1][j] == tempBoard[2][j])
return tempBoard[0][j];
// check diagonals
if (tempBoard[0][0] == tempBoard[1][1] && tempBoard[1][1] == tempBoard[2][2])
return tempBoard[1][1];
if (tempBoard[2][0] == tempBoard[1][1] && tempBoard[1][1] == tempBoard[0][2])
return tempBoard[1][1];
for (int i = 0; i < tempBoard.length; i++)
for (int j = 0; j < tempBoard[i].length; j++)
if (tempBoard[i][j] == ' ')
return ' ';
return '_';
}
}
|
add copy constructor
The current copy constructor is not robust in any way. Further work must
be done on it.
|
BoardGame/src/tictactoe/TicTacToeBoard.java
|
add copy constructor
|
|
Java
|
mpl-2.0
|
190520d8357a49fcd9ffbd0ea66730020344652c
| 0
|
hhdevelopment/ocelot,hhdevelopment/ocelot,ocelotds/ocelot,hhdevelopment/ocelot,ocelotds/ocelot,ocelotds/ocelot,ocelotds/ocelot,ocelotds/ocelot,hhdevelopment/ocelot
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package org.ocelotds.processors;
import org.ocelotds.annotations.JsCacheResult;
import org.ocelotds.annotations.TransientDataService;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import javax.annotation.processing.Messager;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementVisitor;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.TypeParameterElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.ExecutableType;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.ElementFilter;
import javax.tools.Diagnostic;
import org.ocelotds.KeyMaker;
import org.ocelotds.annotations.DataService;
/**
* Visitor of class annoted org.ocelotds.annotations.DataService<br>
* Generate javascript classes
*
* @author hhfrancois
*/
public class DataServiceVisitor implements ElementVisitor<String, Writer> {
private final ProcessingEnvironment environment;
/**
* Tools for log processor
*/
private final Messager messager;
private final KeyMaker keyMaker;
public DataServiceVisitor(ProcessingEnvironment environment) {
this.environment = environment;
this.messager = environment.getMessager();
this.keyMaker = new KeyMaker();
}
@Override
public String visitType(TypeElement typeElement, Writer writer) {
try {
createClassComment(typeElement, writer);
String jsclsname = getJsClassname(typeElement);
writer.append("function ").append(jsclsname).append("() {\n");
String classname = typeElement.getQualifiedName().toString();
writer.append("\tthis.ds = \"").append(classname).append("\";\n");
writer.append("}\n");
writer.append(jsclsname).append(".prototype = {\n");
List<ExecutableElement> methodElements = ElementFilter.methodsIn(typeElement.getEnclosedElements());
Iterator<ExecutableElement> iterator = methodElements.iterator();
Collection<String> methodProceeds = new ArrayList<>();
boolean first = true;
while (iterator.hasNext()) {
ExecutableElement methodElement = iterator.next();
if (isConsiderateMethod(methodProceeds, methodElement)) {
visitMethodElement(first, methodProceeds, classname, jsclsname, methodElement, writer);
first = false;
}
}
writer.append("\n};\n");
} catch (IOException ex) {
}
return null;
}
/**
*
* @param first
* @param methodProceeds
* @param classname
* @param jsclsname
* @param methodElement
* @param writer
* @throws IOException
*/
void visitMethodElement(boolean first, Collection<String> methodProceeds, String classname, String jsclsname, ExecutableElement methodElement, Writer writer) throws IOException {
if (!first) { // previous method exist
writer.append(",\n");
}
String methodName = methodElement.getSimpleName().toString();
List<String> argumentsType = getArgumentsType(methodElement);
List<String> arguments = getArguments(methodElement);
TypeMirror returnType = methodElement.getReturnType();
createMethodComment(methodElement, arguments, argumentsType, returnType, writer);
writer.append("\t").append(methodName).append(" : function (");
if (arguments.size() != argumentsType.size()) {
messager.printMessage(Diagnostic.Kind.ERROR, (new StringBuilder())
.append("Cannot Create service : ").append(jsclsname).append(" cause method ")
.append(methodName).append(" arguments inconsistent - argNames : ")
.append(arguments.size()).append(" / args : ").append(argumentsType.size()).toString());
return;
}
int i = 0;
while (i < argumentsType.size()) {
writer.append((String) arguments.get(i));
if ((++i) < arguments.size()) {
writer.append(", ");
}
}
writer.append(") {\n");
createMethodBody(classname, methodElement, arguments.iterator(), writer);
writer.append("\t}");
}
/**
* Compute the name of javascript class
*
* @param typeElement
* @return
*/
String getJsClassname(TypeElement typeElement) {
DataService dsAnno = typeElement.getAnnotation(DataService.class);
String name = dsAnno.name();
if (name.isEmpty()) {
name = typeElement.getSimpleName().toString();
}
return name;
}
/**
* Create comment of the class
*
* @param typeElement
* @param writer
*/
void createClassComment(TypeElement typeElement, Writer writer) {
try {
String comment = environment.getElementUtils().getDocComment(typeElement);
if (comment == null) {
List<? extends TypeMirror> interfaces = typeElement.getInterfaces();
for (TypeMirror typeMirror : interfaces) {
TypeElement element = (TypeElement) environment.getTypeUtils().asElement(typeMirror);
comment = environment.getElementUtils().getDocComment(element);
if (comment != null) {
writer.append("/**\n *").append(computeComment(comment)).append("/\n");
}
}
} else {
writer.append("/**\n *").append(computeComment(comment)).append("/\n");
}
} catch (IOException ioe) {
}
}
/**
* Transform raw multilines comment in prefixed comment
*
* @param comment
* @return
*/
String computeComment(String comment) {
return comment.replaceAll("\n", "\n *");
}
/**
* Return if the method have to considerate<br>
* The method is public,<br>
* Not annotated by TransientDataService<br>
* Not static and not from Object herited.
*
* @param methodProceeds
* @param methodElement
* @return
*/
boolean isConsiderateMethod(Collection<String> methodProceeds, ExecutableElement methodElement) {
int argNum = methodElement.getParameters().size();
String signature = methodElement.getSimpleName().toString() + "(" + argNum + ")";
// Check if method ith same signature has been already proceed.
if (methodProceeds.contains(signature)) {
return false;
}
methodProceeds.add(signature);
// Herited from Object
TypeElement objectElement = environment.getElementUtils().getTypeElement(Object.class.getName());
if (objectElement.getEnclosedElements().contains(methodElement)) {
return false;
}
// Static, not public ?
if (!methodElement.getModifiers().contains(Modifier.PUBLIC) || methodElement.getModifiers().contains(Modifier.STATIC)) {
return false;
}
// TransientDataService ?
List<? extends AnnotationMirror> annotationMirrors = methodElement.getAnnotationMirrors();
for (AnnotationMirror annotationMirror : annotationMirrors) {
if (annotationMirror.getAnnotationType().toString().equals(TransientDataService.class.getName())) {
return false;
}
}
return true;
}
/**
* Get argument types list from method
*
* @param methodElement
* @return
*/
List<String> getArgumentsType(ExecutableElement methodElement) {
ExecutableType methodType = (ExecutableType) methodElement.asType();
List<String> argumentsType = new ArrayList<>();
for (TypeMirror argumentType : methodType.getParameterTypes()) {
argumentsType.add(argumentType.toString());
}
return argumentsType;
}
/**
* Get argument names list from method
*
* @param methodElement
* @return
*/
List<String> getArguments(ExecutableElement methodElement) {
List<String> arguments = new ArrayList<>();
for (VariableElement variableElement : methodElement.getParameters()) {
arguments.add(variableElement.toString());
}
return arguments;
}
/**
* Create javascript comment from Method comment
*
* @param methodElement
* @param argumentsName
* @param argumentsType
* @param returnType
* @param writer
*/
void createMethodComment(ExecutableElement methodElement, List<String> argumentsName, List<String> argumentsType, TypeMirror returnType, Writer writer) {
try {
String methodComment = environment.getElementUtils().getDocComment(methodElement);
writer.append("\t/**\n");
// The javadoc comment
if (methodComment != null) {
methodComment = methodComment.split("@")[0];
int lastIndexOf = methodComment.lastIndexOf('\n');
if (lastIndexOf >= 0) {
methodComment = methodComment.substring(0, lastIndexOf); // include the \n
}
writer.append("\t *").append(methodComment).append("\n");
}
// La liste des arguments de la javadoc
Iterator<String> typeIterator = argumentsType.iterator();
for (String argumentName : argumentsName) {
String type = typeIterator.next();
writer.append("\t * @param {").append(type).append("} ").append(argumentName).append("\n");
}
// Si la methode retourne ou non quelque chose
if (!returnType.toString().equals("void")) {
writer.append("\t * @return {").append(returnType.toString()).append("}\n");
}
writer.append("\t */\n");
} catch (IOException ex) {
}
}
/**
* Create javascript method body
*
* @param classname
* @param methodElement
* @param arguments
* @param writer
*/
void createMethodBody(String classname, ExecutableElement methodElement, Iterator<String> arguments, Writer writer) {
try {
String methodName = methodElement.getSimpleName().toString();
writer.append("\t\tvar op = \"").append(methodName).append("\";\n");
StringBuilder args = new StringBuilder("");
StringBuilder paramNames = new StringBuilder("");
StringBuilder keys = new StringBuilder("");
if (arguments != null && arguments.hasNext()) {
JsCacheResult jcr = methodElement.getAnnotation(JsCacheResult.class);
boolean allArgs = true;
// if there is a jcr annotation with value diferrent of *, so we dont use all arguments
if (null != jcr && null != jcr.keys() && (jcr.keys().length == 0 || (jcr.keys().length > 0 && !"*".equals(jcr.keys()[0])))) {
allArgs = false;
for (int i = 0; i < jcr.keys().length; i++) {
String arg = jcr.keys()[i];
keys.append(arg);
if (i < jcr.keys().length - 1) {
keys.append(",");
}
}
}
while (arguments.hasNext()) {
String arg = arguments.next();
if (allArgs) {
keys.append(arg);
}
args.append(arg);
paramNames.append("\"").append(arg).append("\"");
if (arguments.hasNext()) {
args.append(",");
paramNames.append(",");
if (allArgs) {
keys.append(",");
}
}
}
}
String md5 = "\"" + keyMaker.getMd5(classname + "." + methodName);
writer.append("\t\tvar id = " + md5 + "_\" + JSON.stringify([").append(keys.toString()).append("]).md5();\n");
writer.append("\t\treturn OcelotPromiseFactory.createPromise(this.ds, id, op, [").append(paramNames.toString()).append("], [").append(args.toString()).append("]").append(");\n");
} catch (IOException ex) {
}
}
@Override
public String visit(Element e, Writer p) {
return null;
}
@Override
public String visit(Element e) {
return null;
}
@Override
public String visitPackage(PackageElement e, Writer p) {
return null;
}
@Override
public String visitVariable(VariableElement e, Writer p) {
return null;
}
@Override
public String visitExecutable(ExecutableElement e, Writer p) {
return null;
}
@Override
public String visitTypeParameter(TypeParameterElement e, Writer p) {
return null;
}
@Override
public String visitUnknown(Element e, Writer p) {
return null;
}
/**
* Validation management
*
* @AssertFalse : The value of the field or property must be false.
* @AssertTrue : The value of the field or property must be true.
* @DecimalMax : @DecimalMax("30.00") : The value of the field or property must be a decimal value lower than or equal to the number in the value element.
* @DecimalMin : @DecimalMin("5.00") : The value of the field or property must be a decimal value greater than or equal to the number in the value element.
* @Digits : @Digits(integer=6, fraction=2) : The value of the field or property must be a number within a specified range. The integer element specifies the maximum integral digits for the number,
* and the fraction element specifies the maximum fractional digits for the number.
* @Future : The value of the field or property must be a date in the future.
* @Max : @Max(10) : The value of the field or property must be an integer value lower than or equal to the number in the value element.
* @Min : @Min(5) : The value of the field or property must be an integer value greater than or equal to the number in the value element.
* @NotNull : The value of the field or property must not be null.
* @Null : The value of the field or property must be null.
* @Past : The value of the field or property must be a date in the past.
* @Pattern : @Pattern(regexp="\\(\\d{3}\\)\\d{3}-\\d{4}") : The value of the field or property must match the regular expression defined in the regexp element.
* @Size : @Size(min=2, max=240) : The size of the field or property is evaluated and must match the specified boundaries. If the field or property is a String, the size of the string is evaluated.
* If the field or property is a Collection, the size of the Collection is evaluated. If the field or property is a Map, the size of the Map is evaluated. If the field or property is an array, the
* size of the array is evaluated. Use one of the optional max or min elements to specify the boundaries.
*/
}
|
ocelot-processor/src/main/java/org/ocelotds/processors/DataServiceVisitor.java
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package org.ocelotds.processors;
import org.ocelotds.annotations.JsCacheResult;
import org.ocelotds.annotations.TransientDataService;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import javax.annotation.processing.Messager;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementVisitor;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.TypeParameterElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.ExecutableType;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.ElementFilter;
import javax.tools.Diagnostic;
import org.ocelotds.KeyMaker;
import org.ocelotds.annotations.DataService;
/**
* Visitor of class annoted org.ocelotds.annotations.DataService<br>
* Generate javascript classes
*
* @author hhfrancois
*/
public class DataServiceVisitor implements ElementVisitor<String, Writer> {
private final ProcessingEnvironment environment;
/**
* Tools for log processor
*/
private final Messager messager;
private final KeyMaker keyMaker;
public DataServiceVisitor(ProcessingEnvironment environment) {
this.environment = environment;
this.messager = environment.getMessager();
this.keyMaker = new KeyMaker();
}
@Override
public String visitType(TypeElement typeElement, Writer writer) {
try {
createClassComment(typeElement, writer);
String jsclsname = getJsClassname(typeElement);
writer.append("function ").append(jsclsname).append("() {\n");
String classname = typeElement.getQualifiedName().toString();
writer.append("\tthis.ds = \"").append(classname).append("\";\n");
writer.append("}\n");
writer.append(jsclsname).append(".prototype = {\n");
List<ExecutableElement> methodElements = ElementFilter.methodsIn(typeElement.getEnclosedElements());
Iterator<ExecutableElement> iterator = methodElements.iterator();
Collection<String> methodProceeds = new ArrayList<>();
boolean first = true;
while (iterator.hasNext()) {
ExecutableElement methodElement = iterator.next();
if (isConsiderateMethod(methodProceeds, methodElement)) {
visitMethodElement(first, methodProceeds, classname, jsclsname, methodElement, writer);
first = false;
}
}
writer.append("\n};\n");
} catch (IOException ex) {
}
return null;
}
/**
*
* @param first
* @param methodProceeds
* @param classname
* @param jsclsname
* @param methodElement
* @param writer
* @throws IOException
*/
void visitMethodElement(boolean first, Collection<String> methodProceeds, String classname, String jsclsname, ExecutableElement methodElement, Writer writer) throws IOException {
if (!first) { // previous method exist
writer.append(",\n");
}
String methodName = methodElement.getSimpleName().toString();
List<String> argumentsType = getArgumentsType(methodElement);
List<String> arguments = getArguments(methodElement);
TypeMirror returnType = methodElement.getReturnType();
createMethodComment(methodElement, arguments, argumentsType, returnType, writer);
writer.append("\t").append(methodName).append(" : function (");
if (arguments.size() != argumentsType.size()) {
messager.printMessage(Diagnostic.Kind.ERROR, (new StringBuilder())
.append("Cannot Create service : ").append(jsclsname).append(" cause method ")
.append(methodName).append(" arguments inconsistent - argNames : ")
.append(arguments.size()).append(" / args : ").append(argumentsType.size()).toString());
return;
}
int i = 0;
while (i < argumentsType.size()) {
writer.append((String) arguments.get(i));
if ((++i) < arguments.size()) {
writer.append(", ");
}
}
writer.append(") {\n");
createMethodBody(classname, methodElement, arguments.iterator(), writer);
writer.append("\t}");
}
/**
* Compute the name of javascript class
*
* @param typeElement
* @return
*/
String getJsClassname(TypeElement typeElement) {
DataService dsAnno = typeElement.getAnnotation(DataService.class);
String name = dsAnno.name();
if (name.isEmpty()) {
name = typeElement.getSimpleName().toString();
}
return name;
}
/**
* Create comment of the class
*
* @param typeElement
* @param writer
*/
void createClassComment(TypeElement typeElement, Writer writer) {
try {
String comment = environment.getElementUtils().getDocComment(typeElement);
if (comment == null) {
List<? extends TypeMirror> interfaces = typeElement.getInterfaces();
for (TypeMirror typeMirror : interfaces) {
TypeElement element = (TypeElement) environment.getTypeUtils().asElement(typeMirror);
comment = environment.getElementUtils().getDocComment(element);
if (comment != null) {
writer.append("/**\n *").append(computeComment(comment)).append("/\n");
}
}
} else {
writer.append("/**\n *").append(computeComment(comment)).append("/\n");
}
} catch (IOException ioe) {
}
}
/**
* Transform raw multilines comment in prefixed comment
*
* @param comment
* @return
*/
String computeComment(String comment) {
return comment.replaceAll("\n", "\n *");
}
/**
* Return if the method have to considerate<br>
* The method is public,<br>
* Not annotated by TransientDataService<br>
* Not static and not from Object herited.
*
* @param methodProceeds
* @param methodElement
* @return
*/
boolean isConsiderateMethod(Collection<String> methodProceeds, ExecutableElement methodElement) {
int argNum = methodElement.getParameters().size();
String signature = methodElement.getSimpleName().toString() + "(" + argNum + ")";
// Check if method ith same signature has been already proceed.
if (methodProceeds.contains(signature)) {
return false;
}
methodProceeds.add(signature);
// Herited from Object
TypeElement objectElement = environment.getElementUtils().getTypeElement(Object.class.getName());
if (objectElement.getEnclosedElements().contains(methodElement)) {
return false;
}
// Static, not public ?
if (!methodElement.getModifiers().contains(Modifier.PUBLIC) || methodElement.getModifiers().contains(Modifier.STATIC)) {
return false;
}
// TransientDataService ?
List<? extends AnnotationMirror> annotationMirrors = methodElement.getAnnotationMirrors();
for (AnnotationMirror annotationMirror : annotationMirrors) {
if (annotationMirror.getAnnotationType().toString().equals(TransientDataService.class.getName())) {
return false;
}
}
return true;
}
/**
* Get argument types list from method
*
* @param methodElement
* @return
*/
List<String> getArgumentsType(ExecutableElement methodElement) {
ExecutableType methodType = (ExecutableType) methodElement.asType();
List<String> argumentsType = new ArrayList<>();
for (TypeMirror argumentType : methodType.getParameterTypes()) {
argumentsType.add(argumentType.toString());
}
return argumentsType;
}
/**
* Get argument names list from method
*
* @param methodElement
* @return
*/
List<String> getArguments(ExecutableElement methodElement) {
List<String> arguments = new ArrayList<>();
for (VariableElement variableElement : methodElement.getParameters()) {
arguments.add(variableElement.toString());
}
return arguments;
}
/**
* Create javascript comment from Method comment
*
* @param methodElement
* @param argumentsName
* @param argumentsType
* @param returnType
* @param writer
*/
void createMethodComment(ExecutableElement methodElement, List<String> argumentsName, List<String> argumentsType, TypeMirror returnType, Writer writer) {
try {
String methodComment = environment.getElementUtils().getDocComment(methodElement);
writer.append("\t/**\n");
// The javadoc comment
if (methodComment != null) {
methodComment = methodComment.split("@")[0];
int lastIndexOf = methodComment.lastIndexOf('\n');
if (lastIndexOf >= 0) {
methodComment = methodComment.substring(0, lastIndexOf); // include the \n
}
writer.append("\t *").append(methodComment).append("\t *\n");
}
// La liste des arguments de la javadoc
Iterator<String> typeIterator = argumentsType.iterator();
for (String argumentName : argumentsName) {
String type = typeIterator.next();
writer.append("\t * @param {").append(type).append("} ").append(argumentName).append("\n");
}
// Si la methode retourne ou non quelque chose
if (!returnType.toString().equals("void")) {
writer.append("\t * @return {").append(returnType.toString()).append("}\n");
}
writer.append("\t */\n");
} catch (IOException ex) {
}
}
/**
* Create javascript method body
*
* @param classname
* @param methodElement
* @param arguments
* @param writer
*/
void createMethodBody(String classname, ExecutableElement methodElement, Iterator<String> arguments, Writer writer) {
try {
String methodName = methodElement.getSimpleName().toString();
writer.append("\t\tvar op = \"").append(methodName).append("\";\n");
StringBuilder args = new StringBuilder("");
StringBuilder paramNames = new StringBuilder("");
StringBuilder keys = new StringBuilder("");
if (arguments != null && arguments.hasNext()) {
JsCacheResult jcr = methodElement.getAnnotation(JsCacheResult.class);
boolean allArgs = true;
// if there is a jcr annotation with value diferrent of *, so we dont use all arguments
if (null != jcr && null != jcr.keys() && (jcr.keys().length == 0 || (jcr.keys().length > 0 && !"*".equals(jcr.keys()[0])))) {
allArgs = false;
for (int i = 0; i < jcr.keys().length; i++) {
String arg = jcr.keys()[i];
keys.append(arg);
if (i < jcr.keys().length - 1) {
keys.append(",");
}
}
}
while (arguments.hasNext()) {
String arg = arguments.next();
if (allArgs) {
keys.append(arg);
}
args.append(arg);
paramNames.append("\"").append(arg).append("\"");
if (arguments.hasNext()) {
args.append(",");
paramNames.append(",");
if (allArgs) {
keys.append(",");
}
}
}
}
String md5 = "\"" + keyMaker.getMd5(classname + "." + methodName);
writer.append("\t\tvar id = " + md5 + "_\" + JSON.stringify([").append(keys.toString()).append("]).md5();\n");
writer.append("\t\treturn OcelotPromiseFactory.createPromise(this.ds, id, op, [").append(paramNames.toString()).append("], [").append(args.toString()).append("]").append(");\n");
} catch (IOException ex) {
}
}
@Override
public String visit(Element e, Writer p) {
return null;
}
@Override
public String visit(Element e) {
return null;
}
@Override
public String visitPackage(PackageElement e, Writer p) {
return null;
}
@Override
public String visitVariable(VariableElement e, Writer p) {
return null;
}
@Override
public String visitExecutable(ExecutableElement e, Writer p) {
return null;
}
@Override
public String visitTypeParameter(TypeParameterElement e, Writer p) {
return null;
}
@Override
public String visitUnknown(Element e, Writer p) {
return null;
}
/**
* Validation management
*
* @AssertFalse : The value of the field or property must be false.
* @AssertTrue : The value of the field or property must be true.
* @DecimalMax : @DecimalMax("30.00") : The value of the field or property must be a decimal value lower than or equal to the number in the value element.
* @DecimalMin : @DecimalMin("5.00") : The value of the field or property must be a decimal value greater than or equal to the number in the value element.
* @Digits : @Digits(integer=6, fraction=2) : The value of the field or property must be a number within a specified range. The integer element specifies the maximum integral digits for the number,
* and the fraction element specifies the maximum fractional digits for the number.
* @Future : The value of the field or property must be a date in the future.
* @Max : @Max(10) : The value of the field or property must be an integer value lower than or equal to the number in the value element.
* @Min : @Min(5) : The value of the field or property must be an integer value greater than or equal to the number in the value element.
* @NotNull : The value of the field or property must not be null.
* @Null : The value of the field or property must be null.
* @Past : The value of the field or property must be a date in the past.
* @Pattern : @Pattern(regexp="\\(\\d{3}\\)\\d{3}-\\d{4}") : The value of the field or property must match the regular expression defined in the regexp element.
* @Size : @Size(min=2, max=240) : The size of the field or property is evaluated and must match the specified boundaries. If the field or property is a String, the size of the string is evaluated.
* If the field or property is a Collection, the size of the Collection is evaluated. If the field or property is a Map, the size of the Map is evaluated. If the field or property is an array, the
* size of the array is evaluated. Use one of the optional max or min elements to specify the boundaries.
*/
}
|
remove unusefull \n
|
ocelot-processor/src/main/java/org/ocelotds/processors/DataServiceVisitor.java
|
remove unusefull \n
|
|
Java
|
agpl-3.0
|
acb171445b97d0effa3117ae85a4237125c9676e
| 0
|
akvo/akvo-flow,akvo/akvo-flow,akvo/akvo-flow,akvo/akvo-flow,akvo/akvo-flow
|
/*
* Copyright (C) 2010-2012 Stichting Akvo (Akvo Foundation)
*
* This file is part of Akvo FLOW.
*
* Akvo FLOW is free software: you can redistribute it and modify it under the terms of
* the GNU Affero General Public License (AGPL) as published by the Free Software Foundation,
* either version 3 of the License or any later version.
*
* Akvo FLOW is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License included below for more details.
*
* The full license text can also be seen at <http://www.gnu.org/licenses/agpl.html>.
*/
package org.waterforpeople.mapping.app.web;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.http.HttpServletRequest;
import org.waterforpeople.mapping.app.gwt.client.survey.QuestionDto.QuestionType;
import org.waterforpeople.mapping.app.gwt.client.surveyinstance.QuestionAnswerStoreDto;
import org.waterforpeople.mapping.app.gwt.server.survey.SurveyServiceImpl;
import org.waterforpeople.mapping.app.gwt.server.surveyinstance.SurveyInstanceServiceImpl;
import org.waterforpeople.mapping.app.web.dto.DataProcessorRequest;
import org.waterforpeople.mapping.app.web.dto.RawDataImportRequest;
import org.waterforpeople.mapping.dao.QuestionAnswerStoreDao;
import org.waterforpeople.mapping.dao.SurveyInstanceDAO;
import org.waterforpeople.mapping.domain.QuestionAnswerStore;
import org.waterforpeople.mapping.domain.SurveyInstance;
import com.gallatinsystems.common.Constants;
import com.gallatinsystems.common.util.PropertyUtil;
import com.gallatinsystems.framework.rest.AbstractRestApiServlet;
import com.gallatinsystems.framework.rest.RestRequest;
import com.gallatinsystems.framework.rest.RestResponse;
import com.gallatinsystems.messaging.dao.MessageDao;
import com.gallatinsystems.messaging.domain.Message;
import com.gallatinsystems.survey.dao.QuestionDao;
import com.gallatinsystems.survey.dao.SurveyDAO;
import com.gallatinsystems.survey.dao.SurveyGroupDAO;
import com.gallatinsystems.survey.dao.SurveyUtils;
import com.gallatinsystems.survey.domain.Question;
import com.gallatinsystems.survey.domain.SurveyGroup;
import com.gallatinsystems.survey.domain.Question.Type;
import com.gallatinsystems.survey.domain.Survey;
import com.gallatinsystems.surveyal.app.web.SurveyalRestRequest;
import com.gallatinsystems.surveyal.dao.SurveyedLocaleDao;
import com.gallatinsystems.surveyal.domain.SurveyedLocale;
import com.google.appengine.api.backends.BackendServiceFactory;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.api.taskqueue.Queue;
import com.google.appengine.api.taskqueue.QueueFactory;
import com.google.appengine.api.taskqueue.TaskOptions;
public class RawDataRestServlet extends AbstractRestApiServlet {
private static final Logger log = Logger.getLogger("RawDataRestServlet");
private static final long serialVersionUID = 2409014651721639814L;
private SurveyInstanceDAO instanceDao;
private SurveyDAO sDao;
private SurveyGroupDAO sgDao;
public RawDataRestServlet() {
instanceDao = new SurveyInstanceDAO();
sDao = new SurveyDAO();
sgDao = new SurveyGroupDAO();
}
@Override
protected RestRequest convertRequest() throws Exception {
HttpServletRequest req = getRequest();
RestRequest restRequest = new RawDataImportRequest();
restRequest.populateFromHttpRequest(req);
return restRequest;
}
@Override
protected RestResponse handleRequest(RestRequest req) throws Exception {
SurveyInstanceServiceImpl sisi = new SurveyInstanceServiceImpl();
RawDataImportRequest importReq = (RawDataImportRequest) req;
if (RawDataImportRequest.SAVE_SURVEY_INSTANCE_ACTION.equals(importReq
.getAction())) {
Survey s = null;
if (importReq.getSurveyId() != null) {
s = sDao.getByKey(importReq.getSurveyId());
}
if (s == null) {
updateMessageBoard(importReq.getSurveyId(), "Survey id [" + importReq.getSurveyId()
+ "] doesn't exist");
return null;
}
SurveyGroup sg = null;
if (s.getSurveyGroupId() != null) {
sg = sgDao.getByKey(s.getSurveyGroupId());
}
if (sg == null) {
updateMessageBoard(importReq.getSurveyId(), "Survey group [" + s.getSurveyGroupId()
+ "] doesn't exist");
return null;
}
boolean isNewInstance = importReq.getSurveyInstanceId() == null;
boolean isMonitoringForm = sg.getMonitoringGroup()
&& !sg.getNewLocaleSurveyId().equals(s.getKey().getId());
SurveyInstance instance = null;
if (isNewInstance) {
if (isMonitoringForm) {
updateMessageBoard(s.getKey().getId(),
"Importing new data into a monitoring form is not supported at the moment");
return null;
}
instance = createInstance(importReq);
} else {
instance = instanceDao.getByKey(importReq.getSurveyInstanceId());
if (instance == null) {
updateMessageBoard(importReq.getSurveyInstanceId(), "Survey instance id ["
+ importReq.getSurveyInstanceId() + "] doesn't exist");
return null;
}
}
if (!instance.getSurveyId().equals(importReq.getSurveyId())) {
updateMessageBoard(
importReq.getSurveyInstanceId(),
"Wrong survey selected when importing instance id ["
+ importReq.getSurveyInstanceId() + "]");
return null;
}
List<QuestionAnswerStoreDto> dtoList = new ArrayList<QuestionAnswerStoreDto>();
for (Map.Entry<Long, String[]> item : importReq
.getQuestionAnswerMap().entrySet()) {
QuestionAnswerStoreDto qasDto = new QuestionAnswerStoreDto();
qasDto.setQuestionID(item.getKey().toString());
qasDto.setSurveyInstanceId(instance.getKey().getId());
qasDto.setValue(item.getValue()[0]);
qasDto.setType(item.getValue()[1]);
qasDto.setSurveyId(importReq.getSurveyId());
qasDto.setCollectionDate(importReq.getCollectionDate());
dtoList.add(qasDto);
}
sisi.updateQuestions(dtoList, true, false);
if (isNewInstance) {
// create new surveyed locale and launch task to complete processing
SurveyedLocale locale = new SurveyedLocale();
locale.setIdentifier(SurveyedLocale.generateBase32Uuid());
instance.setSurveyedLocaleIdentifier(locale.getIdentifier());
String privacyLevel = sg.getPrivacyLevel() != null ? sg.getPrivacyLevel()
.toString() : SurveyGroup.PrivacyLevel.PRIVATE.toString();
locale.setLocaleType(privacyLevel);
locale.setSurveyGroupId(sg.getKey().getId());
locale.setCreationSurveyId(s.getKey().getId());
locale = new SurveyedLocaleDao().save(locale);
instance.setSurveyedLocaleId(locale.getKey().getId());
Queue defaultQueue = QueueFactory.getDefaultQueue();
TaskOptions processSurveyedLocaleOptions = TaskOptions.Builder
.withUrl("/app_worker/surveyalservlet")
.param(SurveyalRestRequest.ACTION_PARAM,
SurveyalRestRequest.INGEST_INSTANCE_ACTION)
.param(SurveyalRestRequest.SURVEY_INSTANCE_PARAM,
Long.toString(instance.getKey().getId()))
.countdownMillis(Constants.TASK_DELAY);
defaultQueue.add(processSurveyedLocaleOptions);
// data summarisation
List<QuestionAnswerStore> qasList = instanceDao.listQuestionAnswerStoreByType(
new Long(importReq.getSurveyInstanceId()), "GEO");
if (qasList != null && qasList.size() > 0) {
Queue summQueue = QueueFactory.getQueue("dataSummarization");
summQueue.add(TaskOptions.Builder
.withUrl("/app_worker/dataprocessor")
.param(
DataProcessorRequest.ACTION_PARAM,
DataProcessorRequest.SURVEY_INSTANCE_SUMMARIZER)
.param("surveyInstanceId", importReq.getSurveyInstanceId() + "")
.param("qasId", qasList.get(0).getKey().getId() + "")
.param("delta", 1 + ""));
}
}
} else if (RawDataImportRequest.RESET_SURVEY_INSTANCE_ACTION
.equals(importReq.getAction())) {
SurveyInstance instance = instanceDao.getByKey(importReq
.getSurveyInstanceId());
List<QuestionAnswerStore> oldAnswers = instanceDao
.listQuestionAnswerStore(importReq.getSurveyInstanceId(),
null);
if (oldAnswers != null && oldAnswers.size() > 0) {
instanceDao.delete(oldAnswers);
if (instance != null) {
instance.setLastUpdateDateTime(new Date());
if (importReq.getSubmitter() != null
&& importReq.getSubmitter().trim().length() > 0
&& !"null".equalsIgnoreCase(importReq
.getSubmitter().trim())) {
instance.setSubmitterName(importReq.getSubmitter());
}
instance.setSurveyId(importReq.getSurveyId());
if (importReq.getSurveyDuration() != null) {
instance.setSurveyalTime(importReq.getSurveyDuration());
}
instanceDao.save(instance);
}
} else {
if (instance == null) {
instance = new SurveyInstance();
instance.setKey(KeyFactory.createKey(
SurveyInstance.class.getSimpleName(),
importReq.getSurveyInstanceId()));
instance.setSurveyId(importReq.getSurveyId());
instance.setCollectionDate(importReq.getCollectionDate());
instance.setSubmitterName(importReq.getSubmitter());
instance.setUserID(1L);
instance.setUuid(UUID.randomUUID().toString());
if (importReq.getSurveyDuration() != null) {
instance.setSurveyalTime(importReq.getSurveyDuration());
}
instanceDao.save(instance);
} else {
instance.setLastUpdateDateTime(new Date());
if (importReq.getSubmitter() != null
&& importReq.getSubmitter().trim().length() > 0
&& !"null".equalsIgnoreCase(importReq
.getSubmitter().trim())) {
instance.setSubmitterName(importReq.getSubmitter());
}
instance.setSurveyId(importReq.getSurveyId());
if (importReq.getSurveyDuration() != null) {
instance.setSurveyalTime(importReq.getSurveyDuration());
}
instanceDao.save(instance);
}
}
} else if (RawDataImportRequest.SAVE_FIXED_FIELD_SURVEY_INSTANCE_ACTION
.equals(importReq.getAction())) {
if (importReq.getFixedFieldValues() != null
&& importReq.getFixedFieldValues().size() > 0) {
// this method assumes we're always creating a new instance
SurveyInstance inst = createInstance(importReq);
QuestionDao questionDao = new QuestionDao();
List<Question> questionList = questionDao
.listQuestionsBySurvey(importReq.getSurveyId());
if (questionList != null
&& questionList.size() >= importReq
.getFixedFieldValues().size()) {
List<QuestionAnswerStore> answers = new ArrayList<QuestionAnswerStore>();
for (int i = 0; i < importReq.getFixedFieldValues().size(); i++) {
String val = importReq.getFixedFieldValues().get(i);
if (val != null && val.trim().length() > 0) {
QuestionAnswerStore ans = new QuestionAnswerStore();
ans.setQuestionID(questionList.get(i).getKey()
.getId()
+ "");
ans.setValue(val);
Type type = questionList.get(i).getType();
if (Type.GEO == type) {
ans.setType(QuestionType.GEO.toString());
} else if (Type.PHOTO == type) {
ans.setType("IMAGE");
} else {
ans.setType("VALUE");
}
ans.setSurveyId(importReq.getSurveyId());
ans.setSurveyInstanceId(importReq
.getSurveyInstanceId());
ans.setCollectionDate(importReq.getCollectionDate());
answers.add(ans);
}
}
if (answers.size() > 0) {
QuestionAnswerStoreDao qasDao = new QuestionAnswerStoreDao();
qasDao.save(answers);
sisi.sendProcessingMessages(inst);
}
} else {
log("No questions found for the survey id "
+ importReq.getSurveyId());
}
// todo: send processing message
}
} else if (RawDataImportRequest.UPDATE_SUMMARIES_ACTION
.equalsIgnoreCase(importReq.getAction())) {
// first rebuild the summaries
log.log(Level.INFO, "Rebuilding summaries for surveyId "
+ importReq.getSurveyId().toString());
TaskOptions options = TaskOptions.Builder.withUrl(
"/app_worker/dataprocessor").param(
DataProcessorRequest.ACTION_PARAM,
DataProcessorRequest.REBUILD_QUESTION_SUMMARY_ACTION);
String backendPub = PropertyUtil.getProperty("backendpublish");
if (backendPub != null && "true".equals(backendPub)) {
// change the host so the queue invokes the backend
options = options
.header("Host",
BackendServiceFactory.getBackendService()
.getBackendAddress("dataprocessor"));
}
Long surveyId = importReq.getSurveyId();
if (surveyId != null && surveyId > 0) {
options.param(DataProcessorRequest.SURVEY_ID_PARAM,
surveyId.toString());
}
com.google.appengine.api.taskqueue.Queue queue = com.google.appengine.api.taskqueue.QueueFactory
.getDefaultQueue();
queue.add(options);
// now remap to access point
if (surveyId != null) {
SurveyServiceImpl ssi = new SurveyServiceImpl();
ssi.rerunAPMappings(surveyId);
}
} else if (RawDataImportRequest.SAVE_MESSAGE_ACTION
.equalsIgnoreCase(importReq.getAction())) {
List<Long> ids = new ArrayList<Long>();
ids.add(importReq.getSurveyId());
SurveyUtils.notifyReportService(ids, "invalidate");
MessageDao mdao = new MessageDao();
Message msg = new Message();
SurveyDAO sdao = new SurveyDAO();
Survey s = sdao.getById(importReq.getSurveyId());
msg.setShortMessage("Spreadsheet processed");
msg.setObjectId(importReq.getSurveyId());
msg.setObjectTitle(s.getPath() + "/" + s.getName());
msg.setActionAbout("spreadsheetProcessed");
mdao.save(msg);
}
return null;
}
/**
* constructs and persists a new surveyInstance using the data from the import request
*
* @param importReq
* @return
*/
private SurveyInstance createInstance(RawDataImportRequest importReq) {
SurveyInstance inst = new SurveyInstance();
inst.setUserID(1L);
inst.setSurveyId(importReq.getSurveyId());
inst.setCollectionDate(importReq.getCollectionDate() != null ? importReq
.getCollectionDate() : new Date());
inst.setApproximateLocationFlag("False");
inst.setDeviceIdentifier("IMPORTER");
inst.setUuid(UUID.randomUUID().toString());
inst.setSurveyedLocaleId(importReq.getSurveyedLocaleId());
inst.setUuid(UUID.randomUUID().toString());
inst.setSubmitterName(importReq.getSubmitter());
inst.setSurveyalTime(importReq.getSurveyDuration());
// set the key so the subsequent logic can populate it in the
// QuestionAnswerStore objects
SurveyInstanceDAO instDao = new SurveyInstanceDAO();
inst = instDao.save(inst);
importReq.setSurveyInstanceId(inst.getKey().getId());
if (importReq.getCollectionDate() == null) {
importReq.setCollectionDate(inst.getCollectionDate());
}
return inst;
}
private void updateMessageBoard(long objectId, String shortMessage) {
MessageDao mDao = new MessageDao();
Message message = new Message();
message.setObjectId(objectId);
message.setActionAbout("importData");
message.setShortMessage(shortMessage);
mDao.save(message);
}
@Override
protected void writeOkResponse(RestResponse resp) throws Exception {
// no-op
}
}
|
GAE/src/org/waterforpeople/mapping/app/web/RawDataRestServlet.java
|
/*
* Copyright (C) 2010-2012 Stichting Akvo (Akvo Foundation)
*
* This file is part of Akvo FLOW.
*
* Akvo FLOW is free software: you can redistribute it and modify it under the terms of
* the GNU Affero General Public License (AGPL) as published by the Free Software Foundation,
* either version 3 of the License or any later version.
*
* Akvo FLOW is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License included below for more details.
*
* The full license text can also be seen at <http://www.gnu.org/licenses/agpl.html>.
*/
package org.waterforpeople.mapping.app.web;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.http.HttpServletRequest;
import org.waterforpeople.mapping.app.gwt.client.survey.QuestionDto.QuestionType;
import org.waterforpeople.mapping.app.gwt.client.surveyinstance.QuestionAnswerStoreDto;
import org.waterforpeople.mapping.app.gwt.server.survey.SurveyServiceImpl;
import org.waterforpeople.mapping.app.gwt.server.surveyinstance.SurveyInstanceServiceImpl;
import org.waterforpeople.mapping.app.web.dto.DataProcessorRequest;
import org.waterforpeople.mapping.app.web.dto.RawDataImportRequest;
import org.waterforpeople.mapping.dao.QuestionAnswerStoreDao;
import org.waterforpeople.mapping.dao.SurveyInstanceDAO;
import org.waterforpeople.mapping.domain.QuestionAnswerStore;
import org.waterforpeople.mapping.domain.SurveyInstance;
import com.gallatinsystems.common.Constants;
import com.gallatinsystems.common.util.PropertyUtil;
import com.gallatinsystems.framework.rest.AbstractRestApiServlet;
import com.gallatinsystems.framework.rest.RestRequest;
import com.gallatinsystems.framework.rest.RestResponse;
import com.gallatinsystems.messaging.dao.MessageDao;
import com.gallatinsystems.messaging.domain.Message;
import com.gallatinsystems.survey.dao.QuestionDao;
import com.gallatinsystems.survey.dao.SurveyDAO;
import com.gallatinsystems.survey.dao.SurveyGroupDAO;
import com.gallatinsystems.survey.dao.SurveyUtils;
import com.gallatinsystems.survey.domain.Question;
import com.gallatinsystems.survey.domain.SurveyGroup;
import com.gallatinsystems.survey.domain.Question.Type;
import com.gallatinsystems.survey.domain.Survey;
import com.gallatinsystems.surveyal.app.web.SurveyalRestRequest;
import com.gallatinsystems.surveyal.dao.SurveyedLocaleDao;
import com.gallatinsystems.surveyal.domain.SurveyedLocale;
import com.google.appengine.api.backends.BackendServiceFactory;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.api.taskqueue.Queue;
import com.google.appengine.api.taskqueue.QueueFactory;
import com.google.appengine.api.taskqueue.TaskOptions;
public class RawDataRestServlet extends AbstractRestApiServlet {
private static final Logger log = Logger.getLogger("RawDataRestServlet");
private static final long serialVersionUID = 2409014651721639814L;
private SurveyInstanceDAO instanceDao;
private SurveyDAO sDao;
private SurveyGroupDAO sgDao;
public RawDataRestServlet() {
instanceDao = new SurveyInstanceDAO();
sDao = new SurveyDAO();
sgDao = new SurveyGroupDAO();
}
@Override
protected RestRequest convertRequest() throws Exception {
HttpServletRequest req = getRequest();
RestRequest restRequest = new RawDataImportRequest();
restRequest.populateFromHttpRequest(req);
return restRequest;
}
@Override
protected RestResponse handleRequest(RestRequest req) throws Exception {
SurveyInstanceServiceImpl sisi = new SurveyInstanceServiceImpl();
RawDataImportRequest importReq = (RawDataImportRequest) req;
if (RawDataImportRequest.SAVE_SURVEY_INSTANCE_ACTION.equals(importReq
.getAction())) {
Survey s = null;
if (importReq.getSurveyId() != null) {
s = sDao.getByKey(importReq.getSurveyId());
}
if (s == null) {
updateMessageBoard(importReq.getSurveyId(), "Survey id [" + importReq.getSurveyId()
+ "] doesn't exist");
return null;
}
boolean isNewInstance = importReq.getSurveyInstanceId() == null;
SurveyInstanceDAO siDao = new SurveyInstanceDAO();
SurveyInstance instance = null;
if (isNewInstance) {
instance = createInstance(importReq);
} else {
instance = instanceDao.getByKey(importReq.getSurveyInstanceId());
if (instance == null) {
updateMessageBoard(importReq.getSurveyInstanceId(), "Survey instance id ["
+ importReq.getSurveyInstanceId() + "] doesn't exist");
return null;
}
}
if (!instance.getSurveyId().equals(importReq.getSurveyId())) {
updateMessageBoard(
importReq.getSurveyInstanceId(),
"Wrong survey selected when importing instance id ["
+ importReq.getSurveyInstanceId() + "]");
return null;
}
List<QuestionAnswerStoreDto> dtoList = new ArrayList<QuestionAnswerStoreDto>();
for (Map.Entry<Long, String[]> item : importReq
.getQuestionAnswerMap().entrySet()) {
QuestionAnswerStoreDto qasDto = new QuestionAnswerStoreDto();
qasDto.setQuestionID(item.getKey().toString());
qasDto.setSurveyInstanceId(instance.getKey().getId());
qasDto.setValue(item.getValue()[0]);
qasDto.setType(item.getValue()[1]);
qasDto.setSurveyId(importReq.getSurveyId());
qasDto.setCollectionDate(importReq.getCollectionDate());
dtoList.add(qasDto);
}
sisi.updateQuestions(dtoList, true, false);
if (isNewInstance) {
// create new surveyed locale and launch task to complete processing
SurveyedLocale locale = new SurveyedLocale();
locale.setIdentifier(SurveyedLocale.generateBase32Uuid());
instance.setSurveyedLocaleIdentifier(locale.getIdentifier());
SurveyGroup sg = null;
if (s.getSurveyGroupId() != null) {
sg = new SurveyGroupDAO().getByKey(s.getSurveyGroupId());
}
if (sg != null) {
String privacyLevel = sg.getPrivacyLevel() != null ? sg.getPrivacyLevel()
.toString() : SurveyGroup.PrivacyLevel.PRIVATE.toString();
locale.setLocaleType(privacyLevel);
locale.setSurveyGroupId(sg.getKey().getId());
locale.setCreationSurveyId(s.getKey().getId());
}
locale = new SurveyedLocaleDao().save(locale);
instance.setSurveyedLocaleId(locale.getKey().getId());
Queue defaultQueue = QueueFactory.getDefaultQueue();
TaskOptions processSurveyedLocaleOptions = TaskOptions.Builder
.withUrl("/app_worker/surveyalservlet")
.param(SurveyalRestRequest.ACTION_PARAM,
SurveyalRestRequest.INGEST_INSTANCE_ACTION)
.param(SurveyalRestRequest.SURVEY_INSTANCE_PARAM,
Long.toString(instance.getKey().getId()))
.countdownMillis(Constants.TASK_DELAY);
defaultQueue.add(processSurveyedLocaleOptions);
// data summarisation
List<QuestionAnswerStore> qasList = instanceDao.listQuestionAnswerStoreByType(
new Long(importReq.getSurveyInstanceId()), "GEO");
if (qasList != null && qasList.size() > 0) {
Queue summQueue = QueueFactory.getQueue("dataSummarization");
summQueue.add(TaskOptions.Builder
.withUrl("/app_worker/dataprocessor")
.param(
DataProcessorRequest.ACTION_PARAM,
DataProcessorRequest.SURVEY_INSTANCE_SUMMARIZER)
.param("surveyInstanceId", importReq.getSurveyInstanceId() + "")
.param("qasId", qasList.get(0).getKey().getId() + "")
.param("delta", 1 + ""));
}
}
} else if (RawDataImportRequest.RESET_SURVEY_INSTANCE_ACTION
.equals(importReq.getAction())) {
SurveyInstance instance = instanceDao.getByKey(importReq
.getSurveyInstanceId());
List<QuestionAnswerStore> oldAnswers = instanceDao
.listQuestionAnswerStore(importReq.getSurveyInstanceId(),
null);
if (oldAnswers != null && oldAnswers.size() > 0) {
instanceDao.delete(oldAnswers);
if (instance != null) {
instance.setLastUpdateDateTime(new Date());
if (importReq.getSubmitter() != null
&& importReq.getSubmitter().trim().length() > 0
&& !"null".equalsIgnoreCase(importReq
.getSubmitter().trim())) {
instance.setSubmitterName(importReq.getSubmitter());
}
instance.setSurveyId(importReq.getSurveyId());
if (importReq.getSurveyDuration() != null) {
instance.setSurveyalTime(importReq.getSurveyDuration());
}
instanceDao.save(instance);
}
} else {
if (instance == null) {
instance = new SurveyInstance();
instance.setKey(KeyFactory.createKey(
SurveyInstance.class.getSimpleName(),
importReq.getSurveyInstanceId()));
instance.setSurveyId(importReq.getSurveyId());
instance.setCollectionDate(importReq.getCollectionDate());
instance.setSubmitterName(importReq.getSubmitter());
instance.setUserID(1L);
instance.setUuid(UUID.randomUUID().toString());
if (importReq.getSurveyDuration() != null) {
instance.setSurveyalTime(importReq.getSurveyDuration());
}
instanceDao.save(instance);
} else {
instance.setLastUpdateDateTime(new Date());
if (importReq.getSubmitter() != null
&& importReq.getSubmitter().trim().length() > 0
&& !"null".equalsIgnoreCase(importReq
.getSubmitter().trim())) {
instance.setSubmitterName(importReq.getSubmitter());
}
instance.setSurveyId(importReq.getSurveyId());
if (importReq.getSurveyDuration() != null) {
instance.setSurveyalTime(importReq.getSurveyDuration());
}
instanceDao.save(instance);
}
}
} else if (RawDataImportRequest.SAVE_FIXED_FIELD_SURVEY_INSTANCE_ACTION
.equals(importReq.getAction())) {
if (importReq.getFixedFieldValues() != null
&& importReq.getFixedFieldValues().size() > 0) {
// this method assumes we're always creating a new instance
SurveyInstance inst = createInstance(importReq);
QuestionDao questionDao = new QuestionDao();
List<Question> questionList = questionDao
.listQuestionsBySurvey(importReq.getSurveyId());
if (questionList != null
&& questionList.size() >= importReq
.getFixedFieldValues().size()) {
List<QuestionAnswerStore> answers = new ArrayList<QuestionAnswerStore>();
for (int i = 0; i < importReq.getFixedFieldValues().size(); i++) {
String val = importReq.getFixedFieldValues().get(i);
if (val != null && val.trim().length() > 0) {
QuestionAnswerStore ans = new QuestionAnswerStore();
ans.setQuestionID(questionList.get(i).getKey()
.getId()
+ "");
ans.setValue(val);
Type type = questionList.get(i).getType();
if (Type.GEO == type) {
ans.setType(QuestionType.GEO.toString());
} else if (Type.PHOTO == type) {
ans.setType("IMAGE");
} else {
ans.setType("VALUE");
}
ans.setSurveyId(importReq.getSurveyId());
ans.setSurveyInstanceId(importReq
.getSurveyInstanceId());
ans.setCollectionDate(importReq.getCollectionDate());
answers.add(ans);
}
}
if (answers.size() > 0) {
QuestionAnswerStoreDao qasDao = new QuestionAnswerStoreDao();
qasDao.save(answers);
sisi.sendProcessingMessages(inst);
}
} else {
log("No questions found for the survey id "
+ importReq.getSurveyId());
}
// todo: send processing message
}
} else if (RawDataImportRequest.UPDATE_SUMMARIES_ACTION
.equalsIgnoreCase(importReq.getAction())) {
// first rebuild the summaries
log.log(Level.INFO, "Rebuilding summaries for surveyId "
+ importReq.getSurveyId().toString());
TaskOptions options = TaskOptions.Builder.withUrl(
"/app_worker/dataprocessor").param(
DataProcessorRequest.ACTION_PARAM,
DataProcessorRequest.REBUILD_QUESTION_SUMMARY_ACTION);
String backendPub = PropertyUtil.getProperty("backendpublish");
if (backendPub != null && "true".equals(backendPub)) {
// change the host so the queue invokes the backend
options = options
.header("Host",
BackendServiceFactory.getBackendService()
.getBackendAddress("dataprocessor"));
}
Long surveyId = importReq.getSurveyId();
if (surveyId != null && surveyId > 0) {
options.param(DataProcessorRequest.SURVEY_ID_PARAM,
surveyId.toString());
}
com.google.appengine.api.taskqueue.Queue queue = com.google.appengine.api.taskqueue.QueueFactory
.getDefaultQueue();
queue.add(options);
// now remap to access point
if (surveyId != null) {
SurveyServiceImpl ssi = new SurveyServiceImpl();
ssi.rerunAPMappings(surveyId);
}
} else if (RawDataImportRequest.SAVE_MESSAGE_ACTION
.equalsIgnoreCase(importReq.getAction())) {
List<Long> ids = new ArrayList<Long>();
ids.add(importReq.getSurveyId());
SurveyUtils.notifyReportService(ids, "invalidate");
MessageDao mdao = new MessageDao();
Message msg = new Message();
SurveyDAO sdao = new SurveyDAO();
Survey s = sdao.getById(importReq.getSurveyId());
msg.setShortMessage("Spreadsheet processed");
msg.setObjectId(importReq.getSurveyId());
msg.setObjectTitle(s.getPath() + "/" + s.getName());
msg.setActionAbout("spreadsheetProcessed");
mdao.save(msg);
}
return null;
}
/**
* constructs and persists a new surveyInstance using the data from the import request
*
* @param importReq
* @return
*/
private SurveyInstance createInstance(RawDataImportRequest importReq) {
SurveyInstance inst = new SurveyInstance();
inst.setUserID(1L);
inst.setSurveyId(importReq.getSurveyId());
inst.setCollectionDate(importReq.getCollectionDate() != null ? importReq
.getCollectionDate() : new Date());
inst.setApproximateLocationFlag("False");
inst.setDeviceIdentifier("IMPORTER");
inst.setUuid(UUID.randomUUID().toString());
inst.setSurveyedLocaleId(importReq.getSurveyedLocaleId());
inst.setUuid(UUID.randomUUID().toString());
inst.setSubmitterName(importReq.getSubmitter());
inst.setSurveyalTime(importReq.getSurveyDuration());
// set the key so the subsequent logic can populate it in the
// QuestionAnswerStore objects
SurveyInstanceDAO instDao = new SurveyInstanceDAO();
inst = instDao.save(inst);
importReq.setSurveyInstanceId(inst.getKey().getId());
if (importReq.getCollectionDate() == null) {
importReq.setCollectionDate(inst.getCollectionDate());
}
return inst;
}
private void updateMessageBoard(long objectId, String shortMessage) {
MessageDao mDao = new MessageDao();
Message message = new Message();
message.setObjectId(objectId);
message.setActionAbout("importData");
message.setShortMessage(shortMessage);
mDao.save(message);
}
@Override
protected void writeOkResponse(RestResponse resp) throws Exception {
// no-op
}
}
|
[#1400] Prevent uploading new data without survey instance id against monitoring surveys
|
GAE/src/org/waterforpeople/mapping/app/web/RawDataRestServlet.java
|
[#1400] Prevent uploading new data without survey instance id against monitoring surveys
|
|
Java
|
agpl-3.0
|
15f3a6dc87ce84a1d337df8864e63116ddf021d5
| 0
|
acontes/scheduling,acontes/scheduling,acontes/scheduling,acontes/scheduling,acontes/scheduling,acontes/scheduling,acontes/scheduling
|
/*
* ################################################################
*
* ProActive: The Java(TM) library for Parallel, Distributed,
* Concurrent computing with Security and Mobility
*
* Copyright (C) 1997-2008 INRIA/University of Nice-Sophia Antipolis
* Contact: proactive@ow2.org
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version
* 2 of the License, or any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* Initial developer(s): The ProActive Team
* http://proactive.inria.fr/team_members.htm
* Contributor(s): ActiveEon Team - http://www.activeeon.com
*
* ################################################################
* $$ACTIVEEON_CONTRIBUTOR$$
*/
package nodesource;
import static junit.framework.Assert.assertTrue;
import java.io.File;
import nodestate.FunctionalTDefaultRM;
import nodestate.RMEventReceiver;
import org.objectweb.proactive.api.PAActiveObject;
import org.ow2.proactive.resourcemanager.RMFactory;
import org.ow2.proactive.resourcemanager.common.event.RMEventType;
import org.ow2.proactive.resourcemanager.nodesource.infrastructure.manager.GCMInfrastructure;
import org.ow2.proactive.resourcemanager.nodesource.policy.StaticPolicy;
import org.ow2.proactive.resourcemanager.nodesource.policy.TimeSlotPolicy;
import org.ow2.proactive.utils.FileToBytesConverter;
/**
*
* Test checks the correct behavior of node source consisted of GCM infrastructure manager
* and time slot acquisition policy.
*
* This test may failed by timeout if the machine is too slow, so gcm deployment takes more than 15 secs
*
*/
public class TestGCMInfrastructureTimeSlotPolicy extends FunctionalTDefaultRM {
protected byte[] GCMDeploymentData;
protected RMEventReceiver receiver;
protected int descriptorNodeNumber = 1;
protected Object[] getPolicyParams() {
return new Object[] { TimeSlotPolicy.dateFormat.format(System.currentTimeMillis()),
TimeSlotPolicy.dateFormat.format(System.currentTimeMillis() + 15000), "0", "true" };
}
protected void init() throws Exception {
String oneNodeescriptor = FunctionalTDefaultRM.class.getResource("/nodesource/1node.xml").getPath();
GCMDeploymentData = FileToBytesConverter.convertFileToByteArray((new File(oneNodeescriptor)));
}
protected void createEmptyNodeSource(String sourceName) throws Exception {
admin.createNodesource(sourceName, GCMInfrastructure.class.getName(), null, StaticPolicy.class
.getName(), null);
receiver.waitForNEvent(1);
assertTrue(receiver.cleanNgetNodeSourcesCreatedEvents().size() == 1);
}
protected void createDefaultNodeSource(String sourceName) throws Exception {
// creating node source
admin.createNodesource(sourceName, GCMInfrastructure.class.getName(),
new Object[] { GCMDeploymentData }, TimeSlotPolicy.class.getName(), getPolicyParams());
receiver.waitForNEvent(1);
assertTrue(receiver.cleanNgetNodeSourcesCreatedEvents().size() == 1);
}
protected void removeNodeSource(String sourceName) throws Exception {
// removing node source
admin.removeSource(sourceName, true);
//wait for the event of the node source removal
receiver.waitForNEvent(1);
assertTrue(receiver.cleanNgetNodeSourcesRemovedEvents().size() == 1);
}
/** Actions to be Perform by this test.
* The method is called automatically by Junit framework.
* @throws Exception If the test fails.
*/
@org.junit.Test
public void action() throws Exception {
init();
RMFactory.setOsJavaProperty();
RMEventType[] eventsList = { RMEventType.NODE_ADDED, RMEventType.NODE_BUSY, RMEventType.NODE_DOWN,
RMEventType.NODE_FREE, RMEventType.NODE_REMOVED, RMEventType.NODE_TO_RELEASE,
RMEventType.NODESOURCE_CREATED, RMEventType.NODESOURCE_REMOVED };
receiver = (RMEventReceiver) PAActiveObject.newActive(RMEventReceiver.class.getName(), new Object[] {
monitor, eventsList });
receiver.cleanEventLists();
String source1 = "Node source 1";
log("Test 1 - creation/removal of empty node source");
createEmptyNodeSource(source1);
removeNodeSource(source1);
log("Test 2 - creation/removal of the node source with nodes");
createDefaultNodeSource(source1);
receiver.waitForNEvent(descriptorNodeNumber);
assertTrue(receiver.cleanNgetNodesAddedEvents().size() == descriptorNodeNumber);
log("Test 2 - nodes will be removed in 15 secs");
// wait for the nodes release
receiver.waitForNEvent(descriptorNodeNumber);
assertTrue(receiver.cleanNgetNodesremovedEvents().size() == descriptorNodeNumber);
removeNodeSource(source1);
}
}
|
src/resource-manager/tests/nodesource/TestGCMInfrastructureTimeSlotPolicy.java
|
/*
* ################################################################
*
* ProActive: The Java(TM) library for Parallel, Distributed,
* Concurrent computing with Security and Mobility
*
* Copyright (C) 1997-2008 INRIA/University of Nice-Sophia Antipolis
* Contact: proactive@ow2.org
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version
* 2 of the License, or any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* Initial developer(s): The ProActive Team
* http://proactive.inria.fr/team_members.htm
* Contributor(s): ActiveEon Team - http://www.activeeon.com
*
* ################################################################
* $$ACTIVEEON_CONTRIBUTOR$$
*/
package nodesource;
import static junit.framework.Assert.assertTrue;
import java.io.File;
import nodestate.FunctionalTDefaultRM;
import nodestate.RMEventReceiver;
import org.objectweb.proactive.api.PAActiveObject;
import org.ow2.proactive.resourcemanager.RMFactory;
import org.ow2.proactive.resourcemanager.common.event.RMEventType;
import org.ow2.proactive.resourcemanager.nodesource.infrastructure.manager.GCMInfrastructure;
import org.ow2.proactive.resourcemanager.nodesource.policy.StaticPolicy;
import org.ow2.proactive.resourcemanager.nodesource.policy.TimeSlotPolicy;
import org.ow2.proactive.utils.FileToBytesConverter;
/**
*
* Test checks the correct behavior of node source consisted of GCM infrastructure manager
* and time slot acquisition policy.
*
* This test may failed by timeout if the machine is too slow, so gcm deployment takes more than 15 secs
*
*/
public class TestGCMInfrastructureTimeSlotPolicy extends FunctionalTDefaultRM {
protected byte[] GCMDeploymentData;
protected RMEventReceiver receiver;
protected int descriptorNodeNumber = 1;
protected Object[] getPolicyParams() {
return new Object[] { TimeSlotPolicy.dateFormat.format(System.currentTimeMillis()),
TimeSlotPolicy.dateFormat.format(System.currentTimeMillis() + 15000), "0", "true" };
}
protected void init() throws Exception {
String oneNodeescriptor = FunctionalTDefaultRM.class.getResource(
"/nodesource/GCMNodeSourceDeployment.xml").getPath();
GCMDeploymentData = FileToBytesConverter.convertFileToByteArray((new File(oneNodeescriptor)));
}
protected void createEmptyNodeSource(String sourceName) throws Exception {
admin.createNodesource(sourceName, GCMInfrastructure.class.getName(), null, StaticPolicy.class
.getName(), null);
receiver.waitForNEvent(1);
assertTrue(receiver.cleanNgetNodeSourcesCreatedEvents().size() == 1);
}
protected void createDefaultNodeSource(String sourceName) throws Exception {
// creating node source
admin.createNodesource(sourceName, GCMInfrastructure.class.getName(),
new Object[] { GCMDeploymentData }, TimeSlotPolicy.class.getName(), getPolicyParams());
receiver.waitForNEvent(1);
assertTrue(receiver.cleanNgetNodeSourcesCreatedEvents().size() == 1);
}
protected void removeNodeSource(String sourceName) throws Exception {
// removing node source
admin.removeSource(sourceName, true);
//wait for the event of the node source removal
receiver.waitForNEvent(1);
assertTrue(receiver.cleanNgetNodeSourcesRemovedEvents().size() == 1);
}
/** Actions to be Perform by this test.
* The method is called automatically by Junit framework.
* @throws Exception If the test fails.
*/
@org.junit.Test
public void action() throws Exception {
init();
RMFactory.setOsJavaProperty();
RMEventType[] eventsList = { RMEventType.NODE_ADDED, RMEventType.NODE_BUSY, RMEventType.NODE_DOWN,
RMEventType.NODE_FREE, RMEventType.NODE_REMOVED, RMEventType.NODE_TO_RELEASE,
RMEventType.NODESOURCE_CREATED, RMEventType.NODESOURCE_REMOVED };
receiver = (RMEventReceiver) PAActiveObject.newActive(RMEventReceiver.class.getName(), new Object[] {
monitor, eventsList });
receiver.cleanEventLists();
String source1 = "Node source 1";
log("Test 1 - creation/removal of empty node source");
createEmptyNodeSource(source1);
removeNodeSource(source1);
log("Test 2 - creation/removal of the node source with nodes");
createDefaultNodeSource(source1);
receiver.waitForNEvent(descriptorNodeNumber);
assertTrue(receiver.cleanNgetNodesAddedEvents().size() == descriptorNodeNumber);
log("Test 2 - nodes will be removed in 15 secs");
// wait for the nodes release
receiver.waitForNEvent(descriptorNodeNumber);
assertTrue(receiver.cleanNgetNodesremovedEvents().size() == descriptorNodeNumber);
removeNodeSource(source1);
}
}
|
Fix path to gcm deployment descriptor in TestGCMInfrastructureTimeSlotPolicy
git-svn-id: 27916816d6cfa57849e9a885196bf7392b80e1ac@11846 28e8926c-6b08-0410-baaa-805c5e19b8d6
|
src/resource-manager/tests/nodesource/TestGCMInfrastructureTimeSlotPolicy.java
|
Fix path to gcm deployment descriptor in TestGCMInfrastructureTimeSlotPolicy
|
|
Java
|
lgpl-2.1
|
20d6eda5792e89f1f1d1545918511eb6cc872cf6
| 0
|
RockManJoe64/swingx,RockManJoe64/swingx
|
/*
* $Id$
*
* Copyright 2004 Sun Microsystems, Inc., 4150 Network Circle,
* Santa Clara, California 95054, U.S.A. All rights reserved.
*/
package org.jdesktop.swingx;
import java.awt.Point;
import java.awt.event.MouseEvent;
import javax.swing.JTable;
import javax.swing.SwingUtilities;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeNode;
import javax.swing.tree.TreePath;
import org.jdesktop.swingx.table.TableColumnExt;
import org.jdesktop.swingx.treetable.AbstractTreeTableModel;
import org.jdesktop.swingx.treetable.DefaultTreeTableModel;
import org.jdesktop.swingx.treetable.FileSystemModel;
import org.jdesktop.swingx.treetable.TreeTableModel;
import org.jdesktop.swingx.util.PropertyChangeReport;
public class JXTreeTableUnitTest extends InteractiveTestCase {
protected TreeTableModel treeTableModel;
protected TreeTableModel simpleTreeTableModel;
public JXTreeTableUnitTest() {
super("JXTreeTable Unit Test");
}
/**
* test if table and tree rowHeights are the same.
*
*/
public void testAdjustedRowHeights() {
JXTreeTable treeTable = new JXTreeTable(simpleTreeTableModel);
JXTree tree = (JXTree) treeTable.getCellRenderer(0, 0);
// sanity: same initially
assertEquals("table and tree rowHeights must be equal",
treeTable.getRowHeight(), tree.getRowHeight());
// change treeTable height
treeTable.setRowHeight(treeTable.getRowHeight() * 2);
assertEquals("table and tree rowHeights must be equal",
treeTable.getRowHeight(), tree.getRowHeight());
// change treeTable height
tree.setRowHeight(tree.getRowHeight() * 2);
assertEquals("table and tree rowHeights must be equal",
treeTable.getRowHeight(), tree.getRowHeight());
}
/**
* #321-swingx: missing tree property toggleClickCount, largeModel.
*
*/
public void testToggleClickCount() {
JXTreeTable treeTable = new JXTreeTable(simpleTreeTableModel);
int clickCount = treeTable.getToggleClickCount();
// asserting documented default clickCount == 2
assertEquals("default clickCount", 2, clickCount);
int newClickCount = clickCount + 1;
treeTable.setToggleClickCount(newClickCount);
assertEquals("toggleClickCount must be changed",
newClickCount, treeTable.getToggleClickCount());
boolean largeModel = treeTable.isLargeModel();
assertFalse("initial largeModel", largeModel);
treeTable.setLargeModel(!largeModel);
assertTrue("largeModel property must be toggled", treeTable.isLargeModel());
}
/**
* Issue #168-jdnc: dnd enabled breaks node collapse/expand.
* testing auto-detection of dragHackEnabled.
*
*/
public void testDragHackFlagOn() {
JXTreeTable treeTable = new JXTreeTable(simpleTreeTableModel);
assertNull(treeTable.getClientProperty(JXTreeTable.DRAG_HACK_FLAG_KEY));
treeTable.getTreeTableHacker().expandOrCollapseNode(0, new MouseEvent(treeTable, 0, 0, 0, 0, 0, 1, false));
Boolean dragHackFlag = (Boolean) treeTable.getClientProperty(JXTreeTable.DRAG_HACK_FLAG_KEY);
assertNotNull(dragHackFlag);
assertTrue(dragHackFlag);
}
/**
* Issue #168-jdnc: dnd enabled breaks node collapse/expand.
* testing auto-detection of dragHackEnabled.
*
*/
public void testDragHackFlagOff() {
System.setProperty("sun.swing.enableImprovedDragGesture", "true");
JXTreeTable treeTable = new JXTreeTable(simpleTreeTableModel);
assertNull(treeTable.getClientProperty(JXTreeTable.DRAG_HACK_FLAG_KEY));
treeTable.getTreeTableHacker().expandOrCollapseNode(0, new MouseEvent(treeTable, 0, 0, 0, 0, 0, 1, false));
Boolean dragHackFlag = (Boolean) treeTable.getClientProperty(JXTreeTable.DRAG_HACK_FLAG_KEY);
assertNotNull(dragHackFlag);
assertFalse(dragHackFlag);
System.getProperties().remove("sun.swing.enableImprovedDragGesture");
}
/**
* loosely related to Issue #248-swingx: setRootVisible (true) after
* initial rootInvisible didn't show the root.
*
* this here is a sanity test that there is exactly one row, the problem
* is a missing visual update of the table.
*
*/
public void testEmptyModelInitiallyInvisibleRoot() {
final DefaultMutableTreeNode root = new DefaultMutableTreeNode();
final InsertTreeTableModel model = new InsertTreeTableModel(root);
final JXTreeTable treeTable = new JXTreeTable(model);
// sanity...
assertFalse(treeTable.isRootVisible());
assertEquals("no rows with invisible root", 0, treeTable.getRowCount());
treeTable.setRootVisible(true);
// sanity...
assertTrue(treeTable.isRootVisible());
assertEquals("one row with visible root", 1, treeTable.getRowCount());
}
/**
* Issue #247-swingx: update probs with insert node.
* The insert under a collapsed node fires a dataChanged on the table
* which results in the usual total "memory" loss (f.i. selection)
*
* The tree model is after setup is (see the bug report as well):
* root
* childA
* childB
*
* In the view childA is collapsed:
* root
* childA
*
*/
public void testInsertUnderCollapsedNode() {
final DefaultMutableTreeNode root = new DefaultMutableTreeNode();
final InsertTreeTableModel model = new InsertTreeTableModel(root);
DefaultMutableTreeNode childA = model.addChild(root);
final DefaultMutableTreeNode childB = model.addChild(childA);
final JXTreeTable treeTable = new JXTreeTable(model);
treeTable.setRootVisible(true);
// sanity...
assertEquals(2, treeTable.getRowCount());
final int selected = 1;
// select childA
treeTable.setRowSelectionInterval(selected, selected);
model.addChild(childB);
// need to invoke - the tableEvent is fired delayed as well
// Note: doing so will make the test _appear_ to pass, the
// assertion failure can be seen as an output only!
// any idea how to make the test fail?
SwingUtilities.invokeLater(new Runnable() {
public void run() {
int selectedAfterInsert = treeTable.getSelectedRow();
assertEquals(selected, selectedAfterInsert);
}
});
}
/**
* Model used to show insert update issue.
*/
public static class InsertTreeTableModel extends DefaultTreeTableModel {
private boolean rootIsFolder;
public InsertTreeTableModel(TreeNode root) {
super(root);
}
public InsertTreeTableModel(TreeNode root, boolean rootIsFolder) {
super(root);
this.rootIsFolder = rootIsFolder;
}
@Override
public boolean isLeaf(Object node) {
if (rootIsFolder && (node == getRoot())) {
return false;
}
return super.isLeaf(node);
}
public int getColumnCount() {
return 2;
}
public DefaultMutableTreeNode addChild(DefaultMutableTreeNode parent) {
DefaultMutableTreeNode newNode = new DefaultMutableTreeNode("Child");
parent.add(newNode);
nodesWereInserted(parent, new int[] {parent.getIndex(newNode) });
// fireTreeNodesInserted(this, getPathToRoot(parent),
// new int[] { parent.getIndex(newNode) },
// new Object[] { newNode });
return newNode;
}
}
/**
* Issue #230-swingx:
* JXTreeTable should fire property change on setTreeTableModel.
*
*
*
*/
public void testTreeTableModelIsBoundProperty() {
JXTreeTable treeTable = new JXTreeTable();
PropertyChangeReport report = new PropertyChangeReport();
treeTable.addPropertyChangeListener(report);
treeTable.setTreeTableModel(simpleTreeTableModel);
int allPropertyCount = report.getEventCount();
int treeTMPropertyCount = report.getEventCount("treeTableModel");
assertEquals("treeTable must have fired exactly one event for property treeTableModel",
1, treeTMPropertyCount);
assertEquals("treeTable must have fired event for property treeTableModel only",
allPropertyCount, treeTMPropertyCount);
// sanity: must not fire when setting to same
report.clear();
treeTable.setTreeTableModel(simpleTreeTableModel);
assertEquals("treeTable must not have fired", 0, report.getEventCount());
}
/**
* Issue #54: hidden columns not removed on setModel.
* sanity test (make sure nothing evil introduced in treeTable as
* compared to table)
*/
public void testRemoveAllColumsAfterModelChanged() {
JXTreeTable table = new JXTreeTable(new FileSystemModel());
TableColumnExt columnX = table.getColumnExt(1);
columnX.setVisible(false);
int columnCount = table.getColumnCount(true);
assertEquals("total column count must be same as model", table.getModel().getColumnCount(), columnCount);
assertEquals("visible column count must one less as total", columnCount - 1, table.getColumnCount());
table.setTreeTableModel(new FileSystemModel());
assertEquals("visible columns must be same as total",
table.getColumnCount(), table.getColumnCount(true));
}
/**
* Issue #241: treeModelListeners not removed.
*
*/
public void testRemoveListeners() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setTreeTableModel(new FileSystemModel());
assertEquals(0, ((AbstractTreeTableModel) treeTableModel).getTreeModelListeners().length);
}
public void testRowForPath() {
JXTreeTable treeTable = new JXTreeTable(simpleTreeTableModel);
// @todo - make sure we find an expandible row instead of hardcoding
int rowCount = treeTable.getRowCount();
int row = 2;
TreePath path = treeTable.getPathForRow(row);
assertEquals("original row must be retrieved", row, treeTable.getRowForPath(path));
treeTable.expandRow(row - 1);
// sanity assert
assertTrue("really expanded", treeTable.getRowCount() > rowCount);
TreePath expanded = treeTable.getPathForRow(row);
assertNotSame("path at original row must be different when expanded", path, expanded);
assertEquals("original row must be retrieved", row, treeTable.getRowForPath(expanded));
}
public void testPathForRowContract() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
assertNull("row < 0 must return null path", treeTable.getPathForRow(-1));
assertNull("row >= getRowCount must return null path", treeTable.getPathForRow(treeTable.getRowCount()));
}
public void testTableRowAtNegativePoint() {
JXTable treeTable = new JXTable(1, 4);
int negativeYRowHeight = - treeTable.getRowHeight();
int negativeYRowHeightPlusOne = negativeYRowHeight + 1;
int negativeYMinimal = -1;
assertEquals("negative y location rowheight " + negativeYRowHeight + " must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeight)));
assertEquals("negative y location " + negativeYRowHeightPlusOne +" must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeightPlusOne)));
assertEquals("minimal negative y location must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYMinimal)));
}
public void testTableRowAtOutsidePoint() {
JTable treeTable = new JTable(2, 4);
int negativeYRowHeight = (treeTable.getRowHeight()+ treeTable.getRowMargin()) * treeTable.getRowCount() ;
int negativeYRowHeightPlusOne = negativeYRowHeight - 1;
int negativeYMinimal = -1;
assertEquals("negative y location rowheight " + negativeYRowHeight + " must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeight)));
assertEquals("negative y location " + negativeYRowHeightPlusOne +" must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeightPlusOne)));
// assertEquals("minimal negative y location must return row -1",
// -1, treeTable.rowAtPoint(new Point(-1, negativeYMinimal)));
}
public void testPathForLocationContract() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
// this is actually a JTable rowAtPoint bug: falsely calculates
// row == 0 if - 1 >= y > - getRowHeight()
//assertEquals("location outside must return null path", null, treeTable.getPathForLocation(-1, -(treeTable.getRowHeight() - 1)));
int negativeYRowHeight = - treeTable.getRowHeight();
int negativeYRowHeightPlusOne = negativeYRowHeight + 1;
int negativeYMinimal = -1;
assertEquals("negative y location rowheight " + negativeYRowHeight + " must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeight)));
assertEquals("negative y location " + negativeYRowHeightPlusOne +" must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeightPlusOne)));
assertEquals("minimal negative y location must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYMinimal)));
}
/**
* Issue #151: renderer properties ignored after setting treeTableModel.
*
*/
public void testRendererProperties() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
// storing negates of properties
boolean expandsSelected = !treeTable.getExpandsSelectedPaths();
boolean scrollsOnExpand = !treeTable.getScrollsOnExpand();
boolean showRootHandles = !treeTable.getShowsRootHandles();
boolean rootVisible = !treeTable.isRootVisible();
// setting negates properties
treeTable.setExpandsSelectedPaths(expandsSelected);
treeTable.setScrollsOnExpand(scrollsOnExpand);
treeTable.setShowsRootHandles(showRootHandles);
treeTable.setRootVisible(rootVisible);
// assert negates are set - sanity assert
assertEquals("expand selected", expandsSelected, treeTable
.getExpandsSelectedPaths());
assertEquals("scrolls expand", scrollsOnExpand, treeTable
.getScrollsOnExpand());
assertEquals("shows handles", showRootHandles, treeTable
.getShowsRootHandles());
assertEquals("root visible", rootVisible, treeTable.isRootVisible());
// setting a new model
treeTable.setTreeTableModel(new DefaultTreeTableModel());
// assert negates are set
assertEquals("expand selected", expandsSelected, treeTable
.getExpandsSelectedPaths());
assertEquals("scrolls expand", scrollsOnExpand, treeTable
.getScrollsOnExpand());
assertEquals("shows handles", showRootHandles, treeTable
.getShowsRootHandles());
assertEquals("root visible", rootVisible, treeTable.isRootVisible());
}
/**
* Issue #148: line style client property not respected by renderer.
*
*/
public void testLineStyle() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
String propertyName = "JTree.lineStyle";
treeTable.putClientProperty(propertyName, "Horizontal");
JXTree renderer = (JXTree) treeTable.getCellRenderer(0, 0);
assertEquals(propertyName + " set on renderer", "Horizontal", renderer
.getClientProperty(propertyName));
}
/**
* sanity test: arbitrary client properties not passed to renderer.
*
*/
public void testArbitraryClientProperty() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
String propertyName = "someproperty";
treeTable.putClientProperty(propertyName, "Horizontal");
JXTree renderer = (JXTree) treeTable.getCellRenderer(0, 0);
assertNull(propertyName + " not set on renderer", renderer
.getClientProperty(propertyName));
}
// ------------------ init
protected void setUp() throws Exception {
super.setUp();
JXTree tree = new JXTree();
DefaultTreeModel treeModel = (DefaultTreeModel) tree.getModel();
simpleTreeTableModel = new DefaultTreeTableModel((TreeNode) treeModel.getRoot());
treeTableModel = new FileSystemModel();
}
}
|
src/test/org/jdesktop/swingx/JXTreeTableUnitTest.java
|
/*
* $Id$
*
* Copyright 2004 Sun Microsystems, Inc., 4150 Network Circle,
* Santa Clara, California 95054, U.S.A. All rights reserved.
*/
package org.jdesktop.swingx;
import java.awt.Point;
import java.awt.event.MouseEvent;
import javax.swing.JTable;
import javax.swing.SwingUtilities;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeNode;
import javax.swing.tree.TreePath;
import org.jdesktop.swingx.table.TableColumnExt;
import org.jdesktop.swingx.treetable.AbstractTreeTableModel;
import org.jdesktop.swingx.treetable.DefaultTreeTableModel;
import org.jdesktop.swingx.treetable.FileSystemModel;
import org.jdesktop.swingx.treetable.TreeTableModel;
import org.jdesktop.swingx.util.PropertyChangeReport;
public class JXTreeTableUnitTest extends InteractiveTestCase {
protected TreeTableModel treeTableModel;
protected TreeTableModel simpleTreeTableModel;
public JXTreeTableUnitTest() {
super("JXTreeTable Unit Test");
}
/**
* #321-swingx: missing tree property toggleClickCount, largeModel.
*
*/
public void testToggleClickCount() {
JXTreeTable treeTable = new JXTreeTable(simpleTreeTableModel);
int clickCount = treeTable.getToggleClickCount();
// asserting documented default clickCount == 2
assertEquals("default clickCount", 2, clickCount);
int newClickCount = clickCount + 1;
treeTable.setToggleClickCount(newClickCount);
assertEquals("toggleClickCount must be changed",
newClickCount, treeTable.getToggleClickCount());
boolean largeModel = treeTable.isLargeModel();
assertFalse("initial largeModel", largeModel);
treeTable.setLargeModel(!largeModel);
assertTrue("largeModel property must be toggled", treeTable.isLargeModel());
}
/**
* Issue #168-jdnc: dnd enabled breaks node collapse/expand.
* testing auto-detection of dragHackEnabled.
*
*/
public void testDragHackFlagOn() {
JXTreeTable treeTable = new JXTreeTable(simpleTreeTableModel);
assertNull(treeTable.getClientProperty(JXTreeTable.DRAG_HACK_FLAG_KEY));
treeTable.getTreeTableHacker().expandOrCollapseNode(0, new MouseEvent(treeTable, 0, 0, 0, 0, 0, 1, false));
Boolean dragHackFlag = (Boolean) treeTable.getClientProperty(JXTreeTable.DRAG_HACK_FLAG_KEY);
assertNotNull(dragHackFlag);
assertTrue(dragHackFlag);
}
/**
* Issue #168-jdnc: dnd enabled breaks node collapse/expand.
* testing auto-detection of dragHackEnabled.
*
*/
public void testDragHackFlagOff() {
System.setProperty("sun.swing.enableImprovedDragGesture", "true");
JXTreeTable treeTable = new JXTreeTable(simpleTreeTableModel);
assertNull(treeTable.getClientProperty(JXTreeTable.DRAG_HACK_FLAG_KEY));
treeTable.getTreeTableHacker().expandOrCollapseNode(0, new MouseEvent(treeTable, 0, 0, 0, 0, 0, 1, false));
Boolean dragHackFlag = (Boolean) treeTable.getClientProperty(JXTreeTable.DRAG_HACK_FLAG_KEY);
assertNotNull(dragHackFlag);
assertFalse(dragHackFlag);
System.getProperties().remove("sun.swing.enableImprovedDragGesture");
}
/**
* loosely related to Issue #248-swingx: setRootVisible (true) after
* initial rootInvisible didn't show the root.
*
* this here is a sanity test that there is exactly one row, the problem
* is a missing visual update of the table.
*
*/
public void testEmptyModelInitiallyInvisibleRoot() {
final DefaultMutableTreeNode root = new DefaultMutableTreeNode();
final InsertTreeTableModel model = new InsertTreeTableModel(root);
final JXTreeTable treeTable = new JXTreeTable(model);
// sanity...
assertFalse(treeTable.isRootVisible());
assertEquals("no rows with invisible root", 0, treeTable.getRowCount());
treeTable.setRootVisible(true);
// sanity...
assertTrue(treeTable.isRootVisible());
assertEquals("one row with visible root", 1, treeTable.getRowCount());
}
/**
* Issue #247-swingx: update probs with insert node.
* The insert under a collapsed node fires a dataChanged on the table
* which results in the usual total "memory" loss (f.i. selection)
*
* The tree model is after setup is (see the bug report as well):
* root
* childA
* childB
*
* In the view childA is collapsed:
* root
* childA
*
*/
public void testInsertUnderCollapsedNode() {
final DefaultMutableTreeNode root = new DefaultMutableTreeNode();
final InsertTreeTableModel model = new InsertTreeTableModel(root);
DefaultMutableTreeNode childA = model.addChild(root);
final DefaultMutableTreeNode childB = model.addChild(childA);
final JXTreeTable treeTable = new JXTreeTable(model);
treeTable.setRootVisible(true);
// sanity...
assertEquals(2, treeTable.getRowCount());
final int selected = 1;
// select childA
treeTable.setRowSelectionInterval(selected, selected);
model.addChild(childB);
// need to invoke - the tableEvent is fired delayed as well
// Note: doing so will make the test _appear_ to pass, the
// assertion failure can be seen as an output only!
// any idea how to make the test fail?
SwingUtilities.invokeLater(new Runnable() {
public void run() {
int selectedAfterInsert = treeTable.getSelectedRow();
assertEquals(selected, selectedAfterInsert);
}
});
}
/**
* Model used to show insert update issue.
*/
public static class InsertTreeTableModel extends DefaultTreeTableModel {
private boolean rootIsFolder;
public InsertTreeTableModel(TreeNode root) {
super(root);
}
public InsertTreeTableModel(TreeNode root, boolean rootIsFolder) {
super(root);
this.rootIsFolder = rootIsFolder;
}
@Override
public boolean isLeaf(Object node) {
if (rootIsFolder && (node == getRoot())) {
return false;
}
return super.isLeaf(node);
}
public int getColumnCount() {
return 2;
}
public DefaultMutableTreeNode addChild(DefaultMutableTreeNode parent) {
DefaultMutableTreeNode newNode = new DefaultMutableTreeNode("Child");
parent.add(newNode);
nodesWereInserted(parent, new int[] {parent.getIndex(newNode) });
// fireTreeNodesInserted(this, getPathToRoot(parent),
// new int[] { parent.getIndex(newNode) },
// new Object[] { newNode });
return newNode;
}
}
/**
* Issue #230-swingx:
* JXTreeTable should fire property change on setTreeTableModel.
*
*
*
*/
public void testTreeTableModelIsBoundProperty() {
JXTreeTable treeTable = new JXTreeTable();
PropertyChangeReport report = new PropertyChangeReport();
treeTable.addPropertyChangeListener(report);
treeTable.setTreeTableModel(simpleTreeTableModel);
int allPropertyCount = report.getEventCount();
int treeTMPropertyCount = report.getEventCount("treeTableModel");
assertEquals("treeTable must have fired exactly one event for property treeTableModel",
1, treeTMPropertyCount);
assertEquals("treeTable must have fired event for property treeTableModel only",
allPropertyCount, treeTMPropertyCount);
// sanity: must not fire when setting to same
report.clear();
treeTable.setTreeTableModel(simpleTreeTableModel);
assertEquals("treeTable must not have fired", 0, report.getEventCount());
}
/**
* Issue #54: hidden columns not removed on setModel.
* sanity test (make sure nothing evil introduced in treeTable as
* compared to table)
*/
public void testRemoveAllColumsAfterModelChanged() {
JXTreeTable table = new JXTreeTable(new FileSystemModel());
TableColumnExt columnX = table.getColumnExt(1);
columnX.setVisible(false);
int columnCount = table.getColumnCount(true);
assertEquals("total column count must be same as model", table.getModel().getColumnCount(), columnCount);
assertEquals("visible column count must one less as total", columnCount - 1, table.getColumnCount());
table.setTreeTableModel(new FileSystemModel());
assertEquals("visible columns must be same as total",
table.getColumnCount(), table.getColumnCount(true));
}
/**
* Issue #241: treeModelListeners not removed.
*
*/
public void testRemoveListeners() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
treeTable.setTreeTableModel(new FileSystemModel());
assertEquals(0, ((AbstractTreeTableModel) treeTableModel).getTreeModelListeners().length);
}
public void testRowForPath() {
JXTreeTable treeTable = new JXTreeTable(simpleTreeTableModel);
// @todo - make sure we find an expandible row instead of hardcoding
int rowCount = treeTable.getRowCount();
int row = 2;
TreePath path = treeTable.getPathForRow(row);
assertEquals("original row must be retrieved", row, treeTable.getRowForPath(path));
treeTable.expandRow(row - 1);
// sanity assert
assertTrue("really expanded", treeTable.getRowCount() > rowCount);
TreePath expanded = treeTable.getPathForRow(row);
assertNotSame("path at original row must be different when expanded", path, expanded);
assertEquals("original row must be retrieved", row, treeTable.getRowForPath(expanded));
}
public void testPathForRowContract() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
assertNull("row < 0 must return null path", treeTable.getPathForRow(-1));
assertNull("row >= getRowCount must return null path", treeTable.getPathForRow(treeTable.getRowCount()));
}
public void testTableRowAtNegativePoint() {
JXTable treeTable = new JXTable(1, 4);
int negativeYRowHeight = - treeTable.getRowHeight();
int negativeYRowHeightPlusOne = negativeYRowHeight + 1;
int negativeYMinimal = -1;
assertEquals("negative y location rowheight " + negativeYRowHeight + " must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeight)));
assertEquals("negative y location " + negativeYRowHeightPlusOne +" must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeightPlusOne)));
assertEquals("minimal negative y location must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYMinimal)));
}
public void testTableRowAtOutsidePoint() {
JTable treeTable = new JTable(2, 4);
int negativeYRowHeight = (treeTable.getRowHeight()+ treeTable.getRowMargin()) * treeTable.getRowCount() ;
int negativeYRowHeightPlusOne = negativeYRowHeight - 1;
int negativeYMinimal = -1;
assertEquals("negative y location rowheight " + negativeYRowHeight + " must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeight)));
assertEquals("negative y location " + negativeYRowHeightPlusOne +" must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeightPlusOne)));
// assertEquals("minimal negative y location must return row -1",
// -1, treeTable.rowAtPoint(new Point(-1, negativeYMinimal)));
}
public void testPathForLocationContract() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
// this is actually a JTable rowAtPoint bug: falsely calculates
// row == 0 if - 1 >= y > - getRowHeight()
//assertEquals("location outside must return null path", null, treeTable.getPathForLocation(-1, -(treeTable.getRowHeight() - 1)));
int negativeYRowHeight = - treeTable.getRowHeight();
int negativeYRowHeightPlusOne = negativeYRowHeight + 1;
int negativeYMinimal = -1;
assertEquals("negative y location rowheight " + negativeYRowHeight + " must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeight)));
assertEquals("negative y location " + negativeYRowHeightPlusOne +" must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYRowHeightPlusOne)));
assertEquals("minimal negative y location must return row -1",
-1, treeTable.rowAtPoint(new Point(-1, negativeYMinimal)));
}
/**
* Issue #151: renderer properties ignored after setting treeTableModel.
*
*/
public void testRendererProperties() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
// storing negates of properties
boolean expandsSelected = !treeTable.getExpandsSelectedPaths();
boolean scrollsOnExpand = !treeTable.getScrollsOnExpand();
boolean showRootHandles = !treeTable.getShowsRootHandles();
boolean rootVisible = !treeTable.isRootVisible();
// setting negates properties
treeTable.setExpandsSelectedPaths(expandsSelected);
treeTable.setScrollsOnExpand(scrollsOnExpand);
treeTable.setShowsRootHandles(showRootHandles);
treeTable.setRootVisible(rootVisible);
// assert negates are set - sanity assert
assertEquals("expand selected", expandsSelected, treeTable
.getExpandsSelectedPaths());
assertEquals("scrolls expand", scrollsOnExpand, treeTable
.getScrollsOnExpand());
assertEquals("shows handles", showRootHandles, treeTable
.getShowsRootHandles());
assertEquals("root visible", rootVisible, treeTable.isRootVisible());
// setting a new model
treeTable.setTreeTableModel(new DefaultTreeTableModel());
// assert negates are set
assertEquals("expand selected", expandsSelected, treeTable
.getExpandsSelectedPaths());
assertEquals("scrolls expand", scrollsOnExpand, treeTable
.getScrollsOnExpand());
assertEquals("shows handles", showRootHandles, treeTable
.getShowsRootHandles());
assertEquals("root visible", rootVisible, treeTable.isRootVisible());
}
/**
* Issue #148: line style client property not respected by renderer.
*
*/
public void testLineStyle() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
String propertyName = "JTree.lineStyle";
treeTable.putClientProperty(propertyName, "Horizontal");
JXTree renderer = (JXTree) treeTable.getCellRenderer(0, 0);
assertEquals(propertyName + " set on renderer", "Horizontal", renderer
.getClientProperty(propertyName));
}
/**
* sanity test: arbitrary client properties not passed to renderer.
*
*/
public void testArbitraryClientProperty() {
JXTreeTable treeTable = new JXTreeTable(treeTableModel);
String propertyName = "someproperty";
treeTable.putClientProperty(propertyName, "Horizontal");
JXTree renderer = (JXTree) treeTable.getCellRenderer(0, 0);
assertNull(propertyName + " not set on renderer", renderer
.getClientProperty(propertyName));
}
// ------------------ init
protected void setUp() throws Exception {
super.setUp();
JXTree tree = new JXTree();
DefaultTreeModel treeModel = (DefaultTreeModel) tree.getModel();
simpleTreeTableModel = new DefaultTreeTableModel((TreeNode) treeModel.getRoot());
treeTableModel = new FileSystemModel();
}
}
|
added test for rowHeights
|
src/test/org/jdesktop/swingx/JXTreeTableUnitTest.java
|
added test for rowHeights
|
|
Java
|
lgpl-2.1
|
b939b0ae648ad81bccca6642e6f5e65e8eebc95d
| 0
|
sewe/spotbugs,sewe/spotbugs,spotbugs/spotbugs,spotbugs/spotbugs,KengoTODA/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs,sewe/spotbugs,sewe/spotbugs,spotbugs/spotbugs,KengoTODA/spotbugs,johnscancella/spotbugs,johnscancella/spotbugs,KengoTODA/spotbugs,KengoTODA/spotbugs
|
/*
* Bytecode Analysis Framework
* Copyright (C) 2003,2004 University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs.ba.vna;
import java.util.Arrays;
import java.util.HashMap;
import org.apache.bcel.generic.InstructionHandle;
import edu.umd.cs.findbugs.annotations.SuppressWarnings;
/**
* A cache mapping instructions and input values to the output values they
* produce. We must always produce the same output given identical
* input, or else value number analysis will not terminate.
*
* @author David Hovemeyer
* @see ValueNumberAnalysis
*/
public class ValueNumberCache {
private static final boolean DEBUG = Boolean.getBoolean("vn.debug");
/**
* An entry in the cache.
* It represents an instruction with specific input values.
*/
public static class Entry {
public final InstructionHandle handle;
public final ValueNumber[] inputValueList;
private int cachedHashCode;
@SuppressWarnings("EI2")
public Entry(InstructionHandle handle, ValueNumber[] inputValueList) {
this.handle = handle;
this.inputValueList = inputValueList;
this.cachedHashCode = 0;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Entry))
return false;
Entry other = (Entry) o;
if (handle.getPosition() != other.handle.getPosition())
return false;
ValueNumber[] myList = inputValueList;
ValueNumber[] otherList = other.inputValueList;
if (myList.length != otherList.length)
return false;
for (int i = 0; i < myList.length; ++i)
if (!myList[i].equals(otherList[i]))
return false;
return true;
}
@Override
public int hashCode() {
if (cachedHashCode == 0) {
int code = handle.getPosition();
for (ValueNumber aInputValueList : inputValueList) {
code *= 101;
ValueNumber valueNumber = aInputValueList;
code += valueNumber.hashCode();
}
cachedHashCode = code;
}
return cachedHashCode;
}
@Override
public String toString() {
StringBuffer buf = new StringBuffer();
buf.append(handle.toString());
for (ValueNumber aInputValueList : inputValueList) {
buf.append(", ");
buf.append(aInputValueList.toString());
}
return buf.toString();
}
}
/**
* Map of entries to output values.
*/
private HashMap<Entry, ValueNumber[]> entryToOutputMap = new HashMap<Entry, ValueNumber[]>();
/**
* Look up cached output values for given entry.
*
* @param entry the entry
* @return the list of output values, or null if there is no matching entry
* in the cache
*/
public ValueNumber[] lookupOutputValues(Entry entry) {
if (DEBUG) System.out.println("VN cache lookup: " + entry);
ValueNumber[] result = entryToOutputMap.get(entry);
if (DEBUG) System.out.println(" result ==> " + Arrays.toString(result));
return result;
}
/**
* Add output values for given entry.
* Assumes that lookupOutputValues() has determined that the entry
* is not in the cache.
*
* @param entry the entry
* @param outputValueList the list of output values produced
* by the entry's instruction and input values
*/
public void addOutputValues(Entry entry, ValueNumber[] outputValueList) {
ValueNumber[] old = entryToOutputMap.put(entry, outputValueList);
if (old != null)
throw new IllegalStateException("overwriting output values for entry!");
}
}
// vim:ts=4
|
findbugs/src/java/edu/umd/cs/findbugs/ba/vna/ValueNumberCache.java
|
/*
* Bytecode Analysis Framework
* Copyright (C) 2003,2004 University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs.ba.vna;
import java.util.HashMap;
import org.apache.bcel.generic.InstructionHandle;
import edu.umd.cs.findbugs.annotations.SuppressWarnings;
/**
* A cache mapping instructions and input values to the output values they
* produce. We must always produce the same output given identical
* input, or else value number analysis will not terminate.
*
* @author David Hovemeyer
* @see ValueNumberAnalysis
*/
public class ValueNumberCache {
private static final boolean DEBUG = Boolean.getBoolean("vn.debug");
/**
* An entry in the cache.
* It represents an instruction with specific input values.
*/
public static class Entry {
public final InstructionHandle handle;
public final ValueNumber[] inputValueList;
private int cachedHashCode;
@SuppressWarnings("EI2")
public Entry(InstructionHandle handle, ValueNumber[] inputValueList) {
this.handle = handle;
this.inputValueList = inputValueList;
this.cachedHashCode = 0;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Entry))
return false;
Entry other = (Entry) o;
if (handle.getPosition() != other.handle.getPosition())
return false;
ValueNumber[] myList = inputValueList;
ValueNumber[] otherList = other.inputValueList;
if (myList.length != otherList.length)
return false;
for (int i = 0; i < myList.length; ++i)
if (!myList[i].equals(otherList[i]))
return false;
return true;
}
@Override
public int hashCode() {
if (cachedHashCode == 0) {
int code = handle.getPosition();
for (ValueNumber aInputValueList : inputValueList) {
code *= 101;
ValueNumber valueNumber = aInputValueList;
code += valueNumber.hashCode();
}
cachedHashCode = code;
}
return cachedHashCode;
}
@Override
public String toString() {
StringBuffer buf = new StringBuffer();
buf.append(handle.toString());
for (ValueNumber aInputValueList : inputValueList) {
buf.append(", ");
buf.append(aInputValueList.toString());
}
return buf.toString();
}
}
/**
* Map of entries to output values.
*/
private HashMap<Entry, ValueNumber[]> entryToOutputMap = new HashMap<Entry, ValueNumber[]>();
/**
* Look up cached output values for given entry.
*
* @param entry the entry
* @return the list of output values, or null if there is no matching entry
* in the cache
*/
public ValueNumber[] lookupOutputValues(Entry entry) {
if (DEBUG) System.out.println("VN cache lookup: " + entry);
ValueNumber[] result = entryToOutputMap.get(entry);
if (DEBUG) System.out.println(" result ==> " + result);
return result;
}
/**
* Add output values for given entry.
* Assumes that lookupOutputValues() has determined that the entry
* is not in the cache.
*
* @param entry the entry
* @param outputValueList the list of output values produced
* by the entry's instruction and input values
*/
public void addOutputValues(Entry entry, ValueNumber[] outputValueList) {
ValueNumber[] old = entryToOutputMap.put(entry, outputValueList);
if (old != null)
throw new IllegalStateException("overwriting output values for entry!");
}
}
// vim:ts=4
|
fix bug found by FindBugs
git-svn-id: e7d6bde23f017c9ff4efd468d79d66def666766b@5771 eae3c2d3-9b19-0410-a86e-396b6ccb6ab3
|
findbugs/src/java/edu/umd/cs/findbugs/ba/vna/ValueNumberCache.java
|
fix bug found by FindBugs
|
|
Java
|
lgpl-2.1
|
e90722e183c0bf41dcb3e291eae02afd5f657873
| 0
|
CreativeMD/CreativeCore
|
package team.creative.creativecore.common.gui;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.Consumer;
import org.apache.commons.lang3.ArrayUtils;
import com.mojang.blaze3d.systems.RenderSystem;
import com.mojang.blaze3d.vertex.PoseStack;
import net.fabricmc.api.EnvType;
import net.fabricmc.api.Environment;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.api.distmarker.OnlyIn;
import team.creative.creativecore.common.gui.event.GuiControlChangedEvent;
import team.creative.creativecore.common.gui.event.GuiControlClickEvent;
import team.creative.creativecore.common.gui.event.GuiEvent;
import team.creative.creativecore.common.gui.event.GuiEventManager;
import team.creative.creativecore.common.gui.event.GuiTooltipEvent;
import team.creative.creativecore.common.gui.flow.GuiFlow;
import team.creative.creativecore.common.gui.packet.LayerOpenPacket;
import team.creative.creativecore.common.gui.style.ControlFormatting;
import team.creative.creativecore.common.util.math.geo.Rect;
public class GuiParent extends GuiControl implements IGuiParent, Iterable<GuiChildControl> {
private GuiEventManager eventManager;
protected List<GuiChildControl> controls = new CopyOnWriteArrayList<>();
protected List<GuiChildControl> hoverControls = new CopyOnWriteArrayList<>();
public GuiFlow flow;
public Align align = Align.LEFT;
public VAlign valign = VAlign.TOP;
public int spacing = 2;
public GuiParent(String name, GuiFlow flow) {
super(name);
this.flow = flow;
}
public GuiParent(String name, GuiFlow flow, int width, int height) {
super(name, width, height);
this.flow = flow;
}
public GuiParent(String name, GuiFlow flow, Align align, VAlign valign) {
this(name, flow);
this.align = align;
this.valign = valign;
}
public GuiParent(String name, GuiFlow flow, int width, int height, Align align, VAlign valign) {
this(name, flow, width, height);
this.align = align;
this.valign = valign;
}
public GuiParent(String name, GuiFlow flow, int width, int height, VAlign valign) {
this(name, flow, width, height, Align.LEFT, valign);
}
public GuiParent(String name) {
this(name, GuiFlow.STACK_X);
}
public GuiParent() {
this("");
}
public GuiParent(GuiFlow flow) {
this("", flow);
}
@Override
public boolean isClient() {
return getParent().isClient();
}
public double getScaleFactor() {
return 1;
}
public double getOffsetY() {
return 0;
}
public double getOffsetX() {
return 0;
}
public GuiParent setAlign(Align align) {
this.align = align;
return this;
}
public GuiParent setVAlign(VAlign valign) {
this.valign = valign;
return this;
}
@Override
public boolean isExpandableX() {
if (super.isExpandableX())
return true;
for (GuiChildControl child : controls)
if (child.control.isExpandableX())
return true;
return false;
}
@Override
public boolean isExpandableY() {
if (super.isExpandableY())
return true;
for (GuiChildControl child : controls)
if (child.control.isExpandableY())
return true;
return false;
}
private static GuiControl get(String name, List<GuiChildControl> collection) {
for (int i = 0; i < collection.size(); i++) {
GuiControl control = collection.get(i).control;
if (control.name.equalsIgnoreCase(name))
return control;
else if (control instanceof GuiParent) {
if (control.name.isBlank()) {
GuiControl result = ((GuiParent) control).get(name);
if (result != null)
return result;
} else if (name.startsWith(control.name + "."))
return ((GuiParent) control).get(name.substring(control.name.length() + 1));
}
}
return null;
}
public GuiControl get(String name) {
if (name.isBlank())
return this;
GuiControl result = get(name, controls);
if (result != null)
return result;
return get(name, hoverControls);
}
public <T extends GuiControl> T get(String name, Class<T> clazz) {
GuiControl result = get(name);
if (clazz.isInstance(result))
return (T) result;
return null;
}
public boolean has(String name) {
return get(name) != null;
}
public GuiChildControl add(GuiControl control) {
control.setParent(this);
GuiChildControl child = new GuiChildControl(control);
controls.add(child);
return child;
}
public GuiChildControl addHover(GuiControl control) {
control.setParent(this);
GuiChildControl child = new GuiChildControl(control);
hoverControls.add(child);
return child;
}
public boolean remove(GuiChildControl control) {
return controls.remove(control) || hoverControls.remove(control);
}
public GuiChildControl remove(GuiControl control) {
for (int i = 0; i < controls.size(); i++) {
GuiChildControl child = controls.get(i);
if (child.control == control) {
controls.remove(i);
return child;
}
}
for (int i = 0; i < hoverControls.size(); i++) {
GuiChildControl child = hoverControls.get(i);
if (child.control == control) {
hoverControls.remove(i);
return child;
}
}
return null;
}
public void remove(String... include) {
controls.removeIf((x) -> ArrayUtils.contains(include, x.control.name));
hoverControls.removeIf((x) -> ArrayUtils.contains(include, x.control.name));
}
public void removeExclude(String... exclude) {
controls.removeIf((x) -> !ArrayUtils.contains(exclude, x.control.name));
hoverControls.removeIf((x) -> !ArrayUtils.contains(exclude, x.control.name));
}
public boolean isEmpty() {
return controls.isEmpty() && hoverControls.isEmpty();
}
public void clear() {
controls.clear();
hoverControls.clear();
}
public int size() {
return controls.size() + hoverControls.size();
}
@Override
public Iterator<GuiChildControl> iterator() {
return new Iterator<GuiChildControl>() {
Iterator<GuiChildControl> itr = hoverControls.iterator();
boolean first = true;
@Override
public boolean hasNext() {
if (itr.hasNext())
return true;
if (first) {
itr = controls.iterator();
first = false;
}
return itr.hasNext();
}
@Override
public GuiChildControl next() {
return itr.next();
}
};
}
protected void renderContent(PoseStack matrix, Rect contentRect, Rect realContentRect, int mouseX, int mouseY, List<GuiChildControl> collection, double scale, double xOffset, double yOffset, boolean hover) {
for (int i = collection.size() - 1; i >= 0; i--) {
GuiChildControl child = collection.get(i);
GuiControl control = child.control;
if (!control.visible)
continue;
Rect controlRect = contentRect.child(child.rect, scale, xOffset, yOffset);
Rect realRect = realContentRect.intersection(controlRect);
if (realRect != null || hover) {
if (hover)
RenderSystem.disableScissor();
else
realRect.scissor();
matrix.pushPose();
matrix.translate((child.getX() + xOffset) * scale, (child.getY() + yOffset) * scale, 10);
control.render(matrix, child, controlRect, hover ? controlRect : realRect, mouseX, mouseY);
matrix.popPose();
}
}
}
@Override
@Environment(EnvType.CLIENT)
@OnlyIn(Dist.CLIENT)
protected void renderContent(PoseStack matrix, GuiChildControl control, Rect contentRect, Rect realContentRect, int mouseX, int mouseY) {
if (realContentRect == null)
return;
double scale = getScaleFactor();
double xOffset = getOffsetX();
double yOffset = getOffsetY();
renderContent(matrix, contentRect, realContentRect, mouseX, mouseY, controls, scale, xOffset, yOffset, false);
renderContent(matrix, contentRect, realContentRect, mouseX, mouseY, hoverControls, scale, xOffset, yOffset, true);
super.renderContent(matrix, control, contentRect, realContentRect, mouseX, mouseY);
}
@Override
@Environment(EnvType.CLIENT)
@OnlyIn(Dist.CLIENT)
protected void renderContent(PoseStack matrix, GuiChildControl control, Rect rect, int mouseX, int mouseY) {}
@Override
public boolean isContainer() {
return getParent().isContainer();
}
@Override
public void init() {
for (GuiChildControl child : this)
child.control.init();
}
@Override
public void closed() {
for (GuiChildControl child : this)
child.control.closed();
}
@Override
public void tick() {
for (GuiChildControl child : this)
child.control.tick();
}
@Override
public void closeTopLayer() {
getParent().closeTopLayer();
}
public void closeThisLayer() {
getParent().closeLayer(getLayer());
}
@Override
public void closeLayer(GuiLayer layer) {
getParent().closeLayer(layer);
}
@Override
public GuiLayer openLayer(LayerOpenPacket packet) {
return getParent().openLayer(packet);
}
@Override
public GuiTooltipEvent getTooltipEvent(Rect rect, double x, double y) {
GuiTooltipEvent event = super.getTooltipEvent(rect, x, y);
if (event != null)
return event;
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
for (GuiChildControl child : this)
if (child.control.isInteractable() && child.isMouseOver(x, y)) {
event = child.control.getTooltipEvent(child.rect, x - child.getX(), y - child.getY());
if (event != null)
return event;
}
return null;
}
@Override
public boolean testForDoubleClick(Rect rect, double x, double y) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
for (GuiChildControl child : this)
if (child.control.isInteractable() && child.control.testForDoubleClick(child.rect, x - child.getX(), y - child.getY()))
return true;
return false;
}
@Override
public void mouseMoved(Rect rect, double x, double y) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
for (GuiChildControl child : this)
if (child.control.isInteractable())
child.control.mouseMoved(child.rect, x - child.getX(), y - child.getY());
}
@Override
public boolean mouseClicked(Rect rect, double x, double y, int button) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
boolean result = false;
for (GuiChildControl child : this)
if (!result && child.control.isInteractable() && child.isMouseOver(x, y) && child.control.mouseClicked(child.rect, x - child.getX(), y - child.getY(), button)) {
raiseEvent(new GuiControlClickEvent(child.control, button, false));
result = true;
} else
child.control.looseFocus();
return result;
}
@Override
public boolean mouseDoubleClicked(Rect rect, double x, double y, int button) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
boolean result = false;
for (GuiChildControl child : this)
if (!result && child.control.isInteractable() && child.isMouseOver(x, y) && child.control.mouseDoubleClicked(child.rect, x - child.getX(), y - child.getY(), button)) {
raiseEvent(new GuiControlClickEvent(child.control, button, false));
result = true;
} else
child.control.looseFocus();
return result;
}
@Override
public void mouseReleased(Rect rect, double x, double y, int button) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
for (GuiChildControl child : this)
if (child.control.isInteractable())
child.control.mouseReleased(child.rect, x - child.getX(), y - child.getY(), button);
}
@Override
public void mouseDragged(Rect rect, double x, double y, int button, double dragX, double dragY, double time) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
for (GuiChildControl child : this)
if (child.control.isInteractable())
child.control.mouseDragged(child.rect, x - child.getX(), y - child.getY(), button, dragX, dragY, time);
}
@Override
public boolean mouseScrolled(Rect rect, double x, double y, double delta) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
for (GuiChildControl child : this)
if (child.control.isInteractable() && child.isMouseOver(x, y) && child.control.mouseScrolled(child.rect, x - child.getX(), y - child.getY(), delta))
return true;
return false;
}
@Override
public boolean keyPressed(int keyCode, int scanCode, int modifiers) {
for (GuiChildControl child : this)
if (child.control.isInteractable() && child.control.keyPressed(keyCode, scanCode, modifiers))
return true;
return false;
}
@Override
public boolean keyReleased(int keyCode, int scanCode, int modifiers) {
for (GuiChildControl child : this)
if (child.control.isInteractable() && child.control.keyReleased(keyCode, scanCode, modifiers))
return true;
return false;
}
@Override
public boolean charTyped(char codePoint, int modifiers) {
for (GuiChildControl child : this)
if (child.control.isInteractable() && child.control.charTyped(codePoint, modifiers))
return true;
return false;
}
@Override
public void looseFocus() {
for (GuiChildControl child : this)
child.control.looseFocus();
}
@Override
public void raiseEvent(GuiEvent event) {
if (getParent() == null)
return;
if (eventManager != null)
eventManager.raiseEvent(event);
if (!event.isCanceled())
getParent().raiseEvent(event);
}
public void registerEventClick(Consumer<GuiControlClickEvent> consumer) {
registerEvent(GuiControlClickEvent.class, consumer);
}
public void registerEventChanged(Consumer<GuiControlChangedEvent> consumer) {
registerEvent(GuiControlChangedEvent.class, consumer);
}
public <T extends GuiEvent> void registerEvent(Class<T> clazz, Consumer<T> action) {
if (eventManager == null)
eventManager = new GuiEventManager();
eventManager.registerEvent(clazz, action);
}
@Override
public String getNestedName() {
if (name.isBlank()) {
if (getParent() instanceof GuiControl)
return ((GuiControl) getParent()).getNestedName();
return "";
}
return super.getNestedName();
}
@Override
public void flowX(int width, int preferred) {
flow.flowX(controls, spacing, align, width, preferred);
}
@Override
public void flowY(int height, int preferred) {
flow.flowY(controls, spacing, valign, height, preferred);
}
@Override
public int getMinWidth() {
return flow.minWidth(controls, spacing);
}
@Override
protected int preferredWidth() {
return flow.preferredWidth(controls, spacing);
}
@Override
public int getMinHeight() {
return flow.minHeight(controls, spacing);
}
@Override
protected int preferredHeight() {
return flow.preferredHeight(controls, spacing);
}
@Override
public ControlFormatting getControlFormatting() {
return ControlFormatting.TRANSPARENT;
}
}
|
src/main/java/team/creative/creativecore/common/gui/GuiParent.java
|
package team.creative.creativecore.common.gui;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.Consumer;
import org.apache.commons.lang3.ArrayUtils;
import com.mojang.blaze3d.systems.RenderSystem;
import com.mojang.blaze3d.vertex.PoseStack;
import net.fabricmc.api.EnvType;
import net.fabricmc.api.Environment;
import net.minecraftforge.api.distmarker.Dist;
import net.minecraftforge.api.distmarker.OnlyIn;
import team.creative.creativecore.common.gui.event.GuiControlChangedEvent;
import team.creative.creativecore.common.gui.event.GuiControlClickEvent;
import team.creative.creativecore.common.gui.event.GuiEvent;
import team.creative.creativecore.common.gui.event.GuiEventManager;
import team.creative.creativecore.common.gui.event.GuiTooltipEvent;
import team.creative.creativecore.common.gui.flow.GuiFlow;
import team.creative.creativecore.common.gui.packet.LayerOpenPacket;
import team.creative.creativecore.common.gui.style.ControlFormatting;
import team.creative.creativecore.common.util.math.geo.Rect;
public class GuiParent extends GuiControl implements IGuiParent, Iterable<GuiChildControl> {
private GuiEventManager eventManager;
protected List<GuiChildControl> controls = new CopyOnWriteArrayList<>();
protected List<GuiChildControl> hoverControls = new CopyOnWriteArrayList<>();
public GuiFlow flow;
public Align align = Align.LEFT;
public VAlign valign = VAlign.TOP;
public int spacing = 2;
public GuiParent(String name, GuiFlow flow) {
super(name);
this.flow = flow;
}
public GuiParent(String name, GuiFlow flow, int width, int height) {
super(name, width, height);
this.flow = flow;
}
public GuiParent(String name, GuiFlow flow, Align align, VAlign valign) {
this(name, flow);
this.align = align;
this.valign = valign;
}
public GuiParent(String name, GuiFlow flow, int width, int height, Align align, VAlign valign) {
this(name, flow, width, height);
this.align = align;
this.valign = valign;
}
public GuiParent(String name, GuiFlow flow, int width, int height, VAlign valign) {
this(name, flow, width, height, Align.LEFT, valign);
}
public GuiParent(String name) {
this(name, GuiFlow.STACK_X);
}
public GuiParent() {
this("");
}
public GuiParent(GuiFlow flow) {
this("", flow);
}
@Override
public boolean isClient() {
return getParent().isClient();
}
public double getScaleFactor() {
return 1;
}
public double getOffsetY() {
return 0;
}
public double getOffsetX() {
return 0;
}
public GuiParent setAlign(Align align) {
this.align = align;
return this;
}
public GuiParent setVAlign(VAlign valign) {
this.valign = valign;
return this;
}
@Override
public boolean isExpandableX() {
if (super.isExpandableX())
return true;
for (GuiChildControl child : controls)
if (child.control.isExpandableX())
return true;
return false;
}
@Override
public boolean isExpandableY() {
if (super.isExpandableY())
return true;
for (GuiChildControl child : controls)
if (child.control.isExpandableY())
return true;
return false;
}
private static GuiControl get(String name, List<GuiChildControl> collection) {
for (int i = 0; i < collection.size(); i++) {
GuiControl control = collection.get(i).control;
if (control.name.equalsIgnoreCase(name))
return control;
else if (control instanceof GuiParent) {
if (control.name.isBlank()) {
GuiControl result = ((GuiParent) control).get(name);
if (result != null)
return result;
} else if (name.startsWith(control.name + "."))
return ((GuiParent) control).get(name.substring(control.name.length() + 1));
}
}
return null;
}
public GuiControl get(String name) {
GuiControl result = get(name, controls);
if (result != null)
return result;
return get(name, hoverControls);
}
public <T extends GuiControl> T get(String name, Class<T> clazz) {
GuiControl result = get(name);
if (clazz.isInstance(result))
return (T) result;
return null;
}
public boolean has(String name) {
return get(name) != null;
}
public GuiChildControl add(GuiControl control) {
control.setParent(this);
GuiChildControl child = new GuiChildControl(control);
controls.add(child);
return child;
}
public GuiChildControl addHover(GuiControl control) {
control.setParent(this);
GuiChildControl child = new GuiChildControl(control);
hoverControls.add(child);
return child;
}
public boolean remove(GuiChildControl control) {
return controls.remove(control) || hoverControls.remove(control);
}
public GuiChildControl remove(GuiControl control) {
for (int i = 0; i < controls.size(); i++) {
GuiChildControl child = controls.get(i);
if (child.control == control) {
controls.remove(i);
return child;
}
}
for (int i = 0; i < hoverControls.size(); i++) {
GuiChildControl child = hoverControls.get(i);
if (child.control == control) {
hoverControls.remove(i);
return child;
}
}
return null;
}
public void remove(String... include) {
controls.removeIf((x) -> ArrayUtils.contains(include, x.control.name));
hoverControls.removeIf((x) -> ArrayUtils.contains(include, x.control.name));
}
public void removeExclude(String... exclude) {
controls.removeIf((x) -> !ArrayUtils.contains(exclude, x.control.name));
hoverControls.removeIf((x) -> !ArrayUtils.contains(exclude, x.control.name));
}
public boolean isEmpty() {
return controls.isEmpty() && hoverControls.isEmpty();
}
public void clear() {
controls.clear();
hoverControls.clear();
}
public int size() {
return controls.size() + hoverControls.size();
}
@Override
public Iterator<GuiChildControl> iterator() {
return new Iterator<GuiChildControl>() {
Iterator<GuiChildControl> itr = hoverControls.iterator();
boolean first = true;
@Override
public boolean hasNext() {
if (itr.hasNext())
return true;
if (first) {
itr = controls.iterator();
first = false;
}
return itr.hasNext();
}
@Override
public GuiChildControl next() {
return itr.next();
}
};
}
protected void renderContent(PoseStack matrix, Rect contentRect, Rect realContentRect, int mouseX, int mouseY, List<GuiChildControl> collection, double scale, double xOffset, double yOffset, boolean hover) {
for (int i = collection.size() - 1; i >= 0; i--) {
GuiChildControl child = collection.get(i);
GuiControl control = child.control;
if (!control.visible)
continue;
Rect controlRect = contentRect.child(child.rect, scale, xOffset, yOffset);
Rect realRect = realContentRect.intersection(controlRect);
if (realRect != null || hover) {
if (hover)
RenderSystem.disableScissor();
else
realRect.scissor();
matrix.pushPose();
matrix.translate((child.getX() + xOffset) * scale, (child.getY() + yOffset) * scale, 10);
control.render(matrix, child, controlRect, hover ? controlRect : realRect, mouseX, mouseY);
matrix.popPose();
}
}
}
@Override
@Environment(EnvType.CLIENT)
@OnlyIn(Dist.CLIENT)
protected void renderContent(PoseStack matrix, GuiChildControl control, Rect contentRect, Rect realContentRect, int mouseX, int mouseY) {
if (realContentRect == null)
return;
double scale = getScaleFactor();
double xOffset = getOffsetX();
double yOffset = getOffsetY();
renderContent(matrix, contentRect, realContentRect, mouseX, mouseY, controls, scale, xOffset, yOffset, false);
renderContent(matrix, contentRect, realContentRect, mouseX, mouseY, hoverControls, scale, xOffset, yOffset, true);
super.renderContent(matrix, control, contentRect, realContentRect, mouseX, mouseY);
}
@Override
@Environment(EnvType.CLIENT)
@OnlyIn(Dist.CLIENT)
protected void renderContent(PoseStack matrix, GuiChildControl control, Rect rect, int mouseX, int mouseY) {}
@Override
public boolean isContainer() {
return getParent().isContainer();
}
@Override
public void init() {
for (GuiChildControl child : this)
child.control.init();
}
@Override
public void closed() {
for (GuiChildControl child : this)
child.control.closed();
}
@Override
public void tick() {
for (GuiChildControl child : this)
child.control.tick();
}
@Override
public void closeTopLayer() {
getParent().closeTopLayer();
}
public void closeThisLayer() {
getParent().closeLayer(getLayer());
}
@Override
public void closeLayer(GuiLayer layer) {
getParent().closeLayer(layer);
}
@Override
public GuiLayer openLayer(LayerOpenPacket packet) {
return getParent().openLayer(packet);
}
@Override
public GuiTooltipEvent getTooltipEvent(Rect rect, double x, double y) {
GuiTooltipEvent event = super.getTooltipEvent(rect, x, y);
if (event != null)
return event;
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
for (GuiChildControl child : this)
if (child.control.isInteractable() && child.isMouseOver(x, y)) {
event = child.control.getTooltipEvent(child.rect, x - child.getX(), y - child.getY());
if (event != null)
return event;
}
return null;
}
@Override
public boolean testForDoubleClick(Rect rect, double x, double y) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
for (GuiChildControl child : this)
if (child.control.isInteractable() && child.control.testForDoubleClick(child.rect, x - child.getX(), y - child.getY()))
return true;
return false;
}
@Override
public void mouseMoved(Rect rect, double x, double y) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
for (GuiChildControl child : this)
if (child.control.isInteractable())
child.control.mouseMoved(child.rect, x - child.getX(), y - child.getY());
}
@Override
public boolean mouseClicked(Rect rect, double x, double y, int button) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
boolean result = false;
for (GuiChildControl child : this)
if (!result && child.control.isInteractable() && child.isMouseOver(x, y) && child.control.mouseClicked(child.rect, x - child.getX(), y - child.getY(), button)) {
raiseEvent(new GuiControlClickEvent(child.control, button, false));
result = true;
} else
child.control.looseFocus();
return result;
}
@Override
public boolean mouseDoubleClicked(Rect rect, double x, double y, int button) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
boolean result = false;
for (GuiChildControl child : this)
if (!result && child.control.isInteractable() && child.isMouseOver(x, y) && child.control.mouseDoubleClicked(child.rect, x - child.getX(), y - child.getY(), button)) {
raiseEvent(new GuiControlClickEvent(child.control, button, false));
result = true;
} else
child.control.looseFocus();
return result;
}
@Override
public void mouseReleased(Rect rect, double x, double y, int button) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
for (GuiChildControl child : this)
if (child.control.isInteractable())
child.control.mouseReleased(child.rect, x - child.getX(), y - child.getY(), button);
}
@Override
public void mouseDragged(Rect rect, double x, double y, int button, double dragX, double dragY, double time) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
for (GuiChildControl child : this)
if (child.control.isInteractable())
child.control.mouseDragged(child.rect, x - child.getX(), y - child.getY(), button, dragX, dragY, time);
}
@Override
public boolean mouseScrolled(Rect rect, double x, double y, double delta) {
x *= getScaleFactor();
y *= getScaleFactor();
int offset = getContentOffset();
x += -getOffsetX() - offset;
y += -getOffsetY() - offset;
for (GuiChildControl child : this)
if (child.control.isInteractable() && child.isMouseOver(x, y) && child.control.mouseScrolled(child.rect, x - child.getX(), y - child.getY(), delta))
return true;
return false;
}
@Override
public boolean keyPressed(int keyCode, int scanCode, int modifiers) {
for (GuiChildControl child : this)
if (child.control.isInteractable() && child.control.keyPressed(keyCode, scanCode, modifiers))
return true;
return false;
}
@Override
public boolean keyReleased(int keyCode, int scanCode, int modifiers) {
for (GuiChildControl child : this)
if (child.control.isInteractable() && child.control.keyReleased(keyCode, scanCode, modifiers))
return true;
return false;
}
@Override
public boolean charTyped(char codePoint, int modifiers) {
for (GuiChildControl child : this)
if (child.control.isInteractable() && child.control.charTyped(codePoint, modifiers))
return true;
return false;
}
@Override
public void looseFocus() {
for (GuiChildControl child : this)
child.control.looseFocus();
}
@Override
public void raiseEvent(GuiEvent event) {
if (getParent() == null)
return;
if (eventManager != null)
eventManager.raiseEvent(event);
if (!event.isCanceled())
getParent().raiseEvent(event);
}
public void registerEventClick(Consumer<GuiControlClickEvent> consumer) {
registerEvent(GuiControlClickEvent.class, consumer);
}
public void registerEventChanged(Consumer<GuiControlChangedEvent> consumer) {
registerEvent(GuiControlChangedEvent.class, consumer);
}
public <T extends GuiEvent> void registerEvent(Class<T> clazz, Consumer<T> action) {
if (eventManager == null)
eventManager = new GuiEventManager();
eventManager.registerEvent(clazz, action);
}
@Override
public String getNestedName() {
if (name.isBlank()) {
if (getParent() instanceof GuiControl)
return ((GuiControl) getParent()).getNestedName();
return "";
}
return super.getNestedName();
}
@Override
public void flowX(int width, int preferred) {
flow.flowX(controls, spacing, align, width, preferred);
}
@Override
public void flowY(int height, int preferred) {
flow.flowY(controls, spacing, valign, height, preferred);
}
@Override
public int getMinWidth() {
return flow.minWidth(controls, spacing);
}
@Override
protected int preferredWidth() {
return flow.preferredWidth(controls, spacing);
}
@Override
public int getMinHeight() {
return flow.minHeight(controls, spacing);
}
@Override
protected int preferredHeight() {
return flow.preferredHeight(controls, spacing);
}
@Override
public ControlFormatting getControlFormatting() {
return ControlFormatting.TRANSPARENT;
}
}
|
Fixed GuiSync by fixing GuiControl.get(String)
|
src/main/java/team/creative/creativecore/common/gui/GuiParent.java
|
Fixed GuiSync by fixing GuiControl.get(String)
|
|
Java
|
apache-2.0
|
d4cc8828ca7b08ec341eafcf6dd547b2e45b252e
| 0
|
ebyhr/presto,smartnews/presto,ebyhr/presto,ebyhr/presto,smartnews/presto,smartnews/presto,ebyhr/presto,smartnews/presto,smartnews/presto,ebyhr/presto
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.sql.planner;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import io.trino.Session;
import io.trino.connector.CatalogName;
import io.trino.execution.scheduler.BucketNodeMap;
import io.trino.execution.scheduler.FixedBucketNodeMap;
import io.trino.execution.scheduler.NodeScheduler;
import io.trino.execution.scheduler.group.DynamicBucketNodeMap;
import io.trino.metadata.InternalNode;
import io.trino.metadata.Split;
import io.trino.operator.BucketPartitionFunction;
import io.trino.operator.PartitionFunction;
import io.trino.spi.connector.BucketFunction;
import io.trino.spi.connector.ConnectorBucketNodeMap;
import io.trino.spi.connector.ConnectorNodePartitioningProvider;
import io.trino.spi.connector.ConnectorPartitionHandle;
import io.trino.spi.connector.ConnectorSplit;
import io.trino.spi.type.Type;
import io.trino.split.EmptySplit;
import io.trino.type.BlockTypeOperators;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.function.ToIntFunction;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.util.Objects.requireNonNull;
public class NodePartitioningManager
{
private final NodeScheduler nodeScheduler;
private final BlockTypeOperators blockTypeOperators;
private final ConcurrentMap<CatalogName, ConnectorNodePartitioningProvider> partitioningProviders = new ConcurrentHashMap<>();
@Inject
public NodePartitioningManager(NodeScheduler nodeScheduler, BlockTypeOperators blockTypeOperators)
{
this.nodeScheduler = requireNonNull(nodeScheduler, "nodeScheduler is null");
this.blockTypeOperators = requireNonNull(blockTypeOperators, "blockTypeOperators is null");
}
public void addPartitioningProvider(CatalogName catalogName, ConnectorNodePartitioningProvider nodePartitioningProvider)
{
requireNonNull(catalogName, "catalogName is null");
requireNonNull(nodePartitioningProvider, "nodePartitioningProvider is null");
checkArgument(partitioningProviders.putIfAbsent(catalogName, nodePartitioningProvider) == null,
"NodePartitioningProvider for connector '%s' is already registered", catalogName);
}
public void removePartitioningProvider(CatalogName catalogName)
{
partitioningProviders.remove(catalogName);
}
public PartitionFunction getPartitionFunction(
Session session,
PartitioningScheme partitioningScheme,
List<Type> partitionChannelTypes)
{
int[] bucketToPartition = partitioningScheme.getBucketToPartition()
.orElseThrow(() -> new IllegalArgumentException("Bucket to partition must be set before a partition function can be created"));
PartitioningHandle partitioningHandle = partitioningScheme.getPartitioning().getHandle();
if (partitioningHandle.getConnectorHandle() instanceof SystemPartitioningHandle) {
return ((SystemPartitioningHandle) partitioningHandle.getConnectorHandle()).getPartitionFunction(
partitionChannelTypes,
partitioningScheme.getHashColumn().isPresent(),
bucketToPartition,
blockTypeOperators);
}
BucketFunction bucketFunction = getBucketFunction(session, partitioningHandle, partitionChannelTypes, bucketToPartition.length);
return new BucketPartitionFunction(bucketFunction, bucketToPartition);
}
public BucketFunction getBucketFunction(Session session, PartitioningHandle partitioningHandle, List<Type> partitionChannelTypes, int bucketCount)
{
ConnectorNodePartitioningProvider partitioningProvider = getPartitioningProvider(partitioningHandle.getConnectorId().get());
BucketFunction bucketFunction = partitioningProvider.getBucketFunction(
partitioningHandle.getTransactionHandle().orElse(null),
session.toConnectorSession(),
partitioningHandle.getConnectorHandle(),
partitionChannelTypes,
bucketCount);
checkArgument(bucketFunction != null, "No bucket function for partitioning: %s", partitioningHandle);
return bucketFunction;
}
public List<ConnectorPartitionHandle> listPartitionHandles(
Session session,
PartitioningHandle partitioningHandle)
{
ConnectorNodePartitioningProvider partitioningProvider = getPartitioningProvider(partitioningHandle.getConnectorId().get());
return partitioningProvider.listPartitionHandles(
partitioningHandle.getTransactionHandle().orElse(null),
session.toConnectorSession(partitioningHandle.getConnectorId().get()),
partitioningHandle.getConnectorHandle());
}
public NodePartitionMap getNodePartitioningMap(Session session, PartitioningHandle partitioningHandle)
{
requireNonNull(session, "session is null");
requireNonNull(partitioningHandle, "partitioningHandle is null");
if (partitioningHandle.getConnectorHandle() instanceof SystemPartitioningHandle) {
return ((SystemPartitioningHandle) partitioningHandle.getConnectorHandle()).getNodePartitionMap(session, nodeScheduler);
}
ConnectorBucketNodeMap connectorBucketNodeMap = getConnectorBucketNodeMap(session, partitioningHandle);
// safety check for crazy partitioning
checkArgument(connectorBucketNodeMap.getBucketCount() < 1_000_000, "Too many buckets in partitioning: %s", connectorBucketNodeMap.getBucketCount());
List<InternalNode> bucketToNode;
if (connectorBucketNodeMap.hasFixedMapping()) {
bucketToNode = getFixedMapping(connectorBucketNodeMap);
}
else {
CatalogName catalogName = partitioningHandle.getConnectorId()
.orElseThrow(() -> new IllegalArgumentException("No connector ID for partitioning handle: " + partitioningHandle));
bucketToNode = createArbitraryBucketToNode(
nodeScheduler.createNodeSelector(session, Optional.of(catalogName)).allNodes(),
connectorBucketNodeMap.getBucketCount());
}
int[] bucketToPartition = new int[connectorBucketNodeMap.getBucketCount()];
BiMap<InternalNode, Integer> nodeToPartition = HashBiMap.create();
int nextPartitionId = 0;
for (int bucket = 0; bucket < bucketToNode.size(); bucket++) {
InternalNode node = bucketToNode.get(bucket);
Integer partitionId = nodeToPartition.get(node);
if (partitionId == null) {
partitionId = nextPartitionId++;
nodeToPartition.put(node, partitionId);
}
bucketToPartition[bucket] = partitionId;
}
List<InternalNode> partitionToNode = IntStream.range(0, nodeToPartition.size())
.mapToObj(partitionId -> nodeToPartition.inverse().get(partitionId))
.collect(toImmutableList());
return new NodePartitionMap(partitionToNode, bucketToPartition, getSplitToBucket(session, partitioningHandle));
}
public BucketNodeMap getBucketNodeMap(Session session, PartitioningHandle partitioningHandle, boolean preferDynamic)
{
ConnectorBucketNodeMap connectorBucketNodeMap = getConnectorBucketNodeMap(session, partitioningHandle);
if (connectorBucketNodeMap.hasFixedMapping()) {
return new FixedBucketNodeMap(getSplitToBucket(session, partitioningHandle), getFixedMapping(connectorBucketNodeMap));
}
if (preferDynamic) {
return new DynamicBucketNodeMap(getSplitToBucket(session, partitioningHandle), connectorBucketNodeMap.getBucketCount());
}
Optional<CatalogName> catalogName = partitioningHandle.getConnectorId();
checkArgument(catalogName.isPresent(), "No connector ID for partitioning handle: %s", partitioningHandle);
return new FixedBucketNodeMap(
getSplitToBucket(session, partitioningHandle),
createArbitraryBucketToNode(
new ArrayList<>(nodeScheduler.createNodeSelector(session, catalogName).allNodes()),
connectorBucketNodeMap.getBucketCount()));
}
private static List<InternalNode> getFixedMapping(ConnectorBucketNodeMap connectorBucketNodeMap)
{
return connectorBucketNodeMap.getFixedMapping().stream()
.map(InternalNode.class::cast)
.collect(toImmutableList());
}
public ConnectorBucketNodeMap getConnectorBucketNodeMap(Session session, PartitioningHandle partitioningHandle)
{
ConnectorNodePartitioningProvider partitioningProvider = getPartitioningProvider(partitioningHandle.getConnectorId().get());
ConnectorBucketNodeMap connectorBucketNodeMap = partitioningProvider.getBucketNodeMap(
partitioningHandle.getTransactionHandle().orElse(null),
session.toConnectorSession(partitioningHandle.getConnectorId().get()),
partitioningHandle.getConnectorHandle());
checkArgument(connectorBucketNodeMap != null, "No partition map %s", partitioningHandle);
return connectorBucketNodeMap;
}
private ToIntFunction<Split> getSplitToBucket(Session session, PartitioningHandle partitioningHandle)
{
ConnectorNodePartitioningProvider partitioningProvider = getPartitioningProvider(partitioningHandle.getConnectorId().get());
ToIntFunction<ConnectorSplit> splitBucketFunction = partitioningProvider.getSplitBucketFunction(
partitioningHandle.getTransactionHandle().orElse(null),
session.toConnectorSession(partitioningHandle.getConnectorId().get()),
partitioningHandle.getConnectorHandle());
checkArgument(splitBucketFunction != null, "No partitioning %s", partitioningHandle);
return split -> {
int bucket;
if (split.getConnectorSplit() instanceof EmptySplit) {
bucket = split.getLifespan().isTaskWide() ? 0 : split.getLifespan().getId();
}
else {
bucket = splitBucketFunction.applyAsInt(split.getConnectorSplit());
}
if (!split.getLifespan().isTaskWide()) {
checkArgument(split.getLifespan().getId() == bucket);
}
return bucket;
};
}
private ConnectorNodePartitioningProvider getPartitioningProvider(CatalogName catalogName)
{
ConnectorNodePartitioningProvider partitioningProvider = partitioningProviders.get(requireNonNull(catalogName, "catalogName is null"));
checkArgument(partitioningProvider != null, "No partitioning provider for connector %s", catalogName);
return partitioningProvider;
}
private static List<InternalNode> createArbitraryBucketToNode(List<InternalNode> nodes, int bucketCount)
{
return cyclingShuffledStream(nodes)
.limit(bucketCount)
.collect(toImmutableList());
}
private static <T> Stream<T> cyclingShuffledStream(Collection<T> collection)
{
List<T> list = new ArrayList<>(collection);
Collections.shuffle(list);
return Stream.generate(() -> list).flatMap(List::stream);
}
}
|
core/trino-main/src/main/java/io/trino/sql/planner/NodePartitioningManager.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.sql.planner;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import io.trino.Session;
import io.trino.connector.CatalogName;
import io.trino.execution.scheduler.BucketNodeMap;
import io.trino.execution.scheduler.FixedBucketNodeMap;
import io.trino.execution.scheduler.NodeScheduler;
import io.trino.execution.scheduler.group.DynamicBucketNodeMap;
import io.trino.metadata.InternalNode;
import io.trino.metadata.Split;
import io.trino.operator.BucketPartitionFunction;
import io.trino.operator.PartitionFunction;
import io.trino.spi.connector.BucketFunction;
import io.trino.spi.connector.ConnectorBucketNodeMap;
import io.trino.spi.connector.ConnectorNodePartitioningProvider;
import io.trino.spi.connector.ConnectorPartitionHandle;
import io.trino.spi.connector.ConnectorSplit;
import io.trino.spi.type.Type;
import io.trino.split.EmptySplit;
import io.trino.type.BlockTypeOperators;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.function.ToIntFunction;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.util.Objects.requireNonNull;
public class NodePartitioningManager
{
private final NodeScheduler nodeScheduler;
private final BlockTypeOperators blockTypeOperators;
private final ConcurrentMap<CatalogName, ConnectorNodePartitioningProvider> partitioningProviders = new ConcurrentHashMap<>();
@Inject
public NodePartitioningManager(NodeScheduler nodeScheduler, BlockTypeOperators blockTypeOperators)
{
this.nodeScheduler = requireNonNull(nodeScheduler, "nodeScheduler is null");
this.blockTypeOperators = requireNonNull(blockTypeOperators, "blockTypeOperators is null");
}
public void addPartitioningProvider(CatalogName catalogName, ConnectorNodePartitioningProvider nodePartitioningProvider)
{
requireNonNull(catalogName, "catalogName is null");
requireNonNull(nodePartitioningProvider, "nodePartitioningProvider is null");
checkArgument(partitioningProviders.putIfAbsent(catalogName, nodePartitioningProvider) == null,
"NodePartitioningProvider for connector '%s' is already registered", catalogName);
}
public void removePartitioningProvider(CatalogName catalogName)
{
partitioningProviders.remove(catalogName);
}
public PartitionFunction getPartitionFunction(
Session session,
PartitioningScheme partitioningScheme,
List<Type> partitionChannelTypes)
{
int[] bucketToPartition = partitioningScheme.getBucketToPartition()
.orElseThrow(() -> new IllegalArgumentException("Bucket to partition must be set before a partition function can be created"));
PartitioningHandle partitioningHandle = partitioningScheme.getPartitioning().getHandle();
if (partitioningHandle.getConnectorHandle() instanceof SystemPartitioningHandle) {
return ((SystemPartitioningHandle) partitioningHandle.getConnectorHandle()).getPartitionFunction(
partitionChannelTypes,
partitioningScheme.getHashColumn().isPresent(),
bucketToPartition,
blockTypeOperators);
}
BucketFunction bucketFunction = getBucketFunction(session, partitioningHandle, partitionChannelTypes, bucketToPartition.length);
return new BucketPartitionFunction(bucketFunction, bucketToPartition);
}
public BucketFunction getBucketFunction(Session session, PartitioningHandle partitioning, List<Type> partitionChannelTypes, int bucketCount)
{
ConnectorNodePartitioningProvider partitioningProvider = getPartitioningProvider(partitioning.getConnectorId().get());
BucketFunction bucketFunction = partitioningProvider.getBucketFunction(
partitioning.getTransactionHandle().orElse(null),
session.toConnectorSession(),
partitioning.getConnectorHandle(),
partitionChannelTypes,
bucketCount);
checkArgument(bucketFunction != null, "No bucket function for partitioning: %s", partitioning);
return bucketFunction;
}
public List<ConnectorPartitionHandle> listPartitionHandles(
Session session,
PartitioningHandle partitioningHandle)
{
ConnectorNodePartitioningProvider partitioningProvider = getPartitioningProvider(partitioningHandle.getConnectorId().get());
return partitioningProvider.listPartitionHandles(
partitioningHandle.getTransactionHandle().orElse(null),
session.toConnectorSession(partitioningHandle.getConnectorId().get()),
partitioningHandle.getConnectorHandle());
}
public NodePartitionMap getNodePartitioningMap(Session session, PartitioningHandle partitioningHandle)
{
requireNonNull(session, "session is null");
requireNonNull(partitioningHandle, "partitioningHandle is null");
if (partitioningHandle.getConnectorHandle() instanceof SystemPartitioningHandle) {
return ((SystemPartitioningHandle) partitioningHandle.getConnectorHandle()).getNodePartitionMap(session, nodeScheduler);
}
ConnectorBucketNodeMap connectorBucketNodeMap = getConnectorBucketNodeMap(session, partitioningHandle);
// safety check for crazy partitioning
checkArgument(connectorBucketNodeMap.getBucketCount() < 1_000_000, "Too many buckets in partitioning: %s", connectorBucketNodeMap.getBucketCount());
List<InternalNode> bucketToNode;
if (connectorBucketNodeMap.hasFixedMapping()) {
bucketToNode = getFixedMapping(connectorBucketNodeMap);
}
else {
CatalogName catalogName = partitioningHandle.getConnectorId()
.orElseThrow(() -> new IllegalArgumentException("No connector ID for partitioning handle: " + partitioningHandle));
bucketToNode = createArbitraryBucketToNode(
nodeScheduler.createNodeSelector(session, Optional.of(catalogName)).allNodes(),
connectorBucketNodeMap.getBucketCount());
}
int[] bucketToPartition = new int[connectorBucketNodeMap.getBucketCount()];
BiMap<InternalNode, Integer> nodeToPartition = HashBiMap.create();
int nextPartitionId = 0;
for (int bucket = 0; bucket < bucketToNode.size(); bucket++) {
InternalNode node = bucketToNode.get(bucket);
Integer partitionId = nodeToPartition.get(node);
if (partitionId == null) {
partitionId = nextPartitionId++;
nodeToPartition.put(node, partitionId);
}
bucketToPartition[bucket] = partitionId;
}
List<InternalNode> partitionToNode = IntStream.range(0, nodeToPartition.size())
.mapToObj(partitionId -> nodeToPartition.inverse().get(partitionId))
.collect(toImmutableList());
return new NodePartitionMap(partitionToNode, bucketToPartition, getSplitToBucket(session, partitioningHandle));
}
public BucketNodeMap getBucketNodeMap(Session session, PartitioningHandle partitioningHandle, boolean preferDynamic)
{
ConnectorBucketNodeMap connectorBucketNodeMap = getConnectorBucketNodeMap(session, partitioningHandle);
if (connectorBucketNodeMap.hasFixedMapping()) {
return new FixedBucketNodeMap(getSplitToBucket(session, partitioningHandle), getFixedMapping(connectorBucketNodeMap));
}
if (preferDynamic) {
return new DynamicBucketNodeMap(getSplitToBucket(session, partitioningHandle), connectorBucketNodeMap.getBucketCount());
}
Optional<CatalogName> catalogName = partitioningHandle.getConnectorId();
checkArgument(catalogName.isPresent(), "No connector ID for partitioning handle: %s", partitioningHandle);
return new FixedBucketNodeMap(
getSplitToBucket(session, partitioningHandle),
createArbitraryBucketToNode(
new ArrayList<>(nodeScheduler.createNodeSelector(session, catalogName).allNodes()),
connectorBucketNodeMap.getBucketCount()));
}
private static List<InternalNode> getFixedMapping(ConnectorBucketNodeMap connectorBucketNodeMap)
{
return connectorBucketNodeMap.getFixedMapping().stream()
.map(InternalNode.class::cast)
.collect(toImmutableList());
}
public ConnectorBucketNodeMap getConnectorBucketNodeMap(Session session, PartitioningHandle partitioning)
{
ConnectorNodePartitioningProvider partitioningProvider = getPartitioningProvider(partitioning.getConnectorId().get());
ConnectorBucketNodeMap connectorBucketNodeMap = partitioningProvider.getBucketNodeMap(
partitioning.getTransactionHandle().orElse(null),
session.toConnectorSession(partitioning.getConnectorId().get()),
partitioning.getConnectorHandle());
checkArgument(connectorBucketNodeMap != null, "No partition map %s", partitioning);
return connectorBucketNodeMap;
}
private ToIntFunction<Split> getSplitToBucket(Session session, PartitioningHandle partitioningHandle)
{
ConnectorNodePartitioningProvider partitioningProvider = getPartitioningProvider(partitioningHandle.getConnectorId().get());
ToIntFunction<ConnectorSplit> splitBucketFunction = partitioningProvider.getSplitBucketFunction(
partitioningHandle.getTransactionHandle().orElse(null),
session.toConnectorSession(partitioningHandle.getConnectorId().get()),
partitioningHandle.getConnectorHandle());
checkArgument(splitBucketFunction != null, "No partitioning %s", partitioningHandle);
return split -> {
int bucket;
if (split.getConnectorSplit() instanceof EmptySplit) {
bucket = split.getLifespan().isTaskWide() ? 0 : split.getLifespan().getId();
}
else {
bucket = splitBucketFunction.applyAsInt(split.getConnectorSplit());
}
if (!split.getLifespan().isTaskWide()) {
checkArgument(split.getLifespan().getId() == bucket);
}
return bucket;
};
}
private ConnectorNodePartitioningProvider getPartitioningProvider(CatalogName catalogName)
{
ConnectorNodePartitioningProvider partitioningProvider = partitioningProviders.get(requireNonNull(catalogName, "catalogName is null"));
checkArgument(partitioningProvider != null, "No partitioning provider for connector %s", catalogName);
return partitioningProvider;
}
private static List<InternalNode> createArbitraryBucketToNode(List<InternalNode> nodes, int bucketCount)
{
return cyclingShuffledStream(nodes)
.limit(bucketCount)
.collect(toImmutableList());
}
private static <T> Stream<T> cyclingShuffledStream(Collection<T> collection)
{
List<T> list = new ArrayList<>(collection);
Collections.shuffle(list);
return Stream.generate(() -> list).flatMap(List::stream);
}
}
|
Rename argument for consistency
Apparently most methods in the class call it `partitioningHandle`, so
let's be it.
|
core/trino-main/src/main/java/io/trino/sql/planner/NodePartitioningManager.java
|
Rename argument for consistency
|
|
Java
|
apache-2.0
|
f4fb318d9291bb1328d4da26873cdb166ec75666
| 0
|
mduerig/jackrabbit-oak,mduerig/jackrabbit-oak,mduerig/jackrabbit-oak,mduerig/jackrabbit-oak,mduerig/jackrabbit-oak
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.segment;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkPositionIndexes;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Lists.newArrayListWithCapacity;
import static com.google.common.collect.Maps.newConcurrentMap;
import static org.apache.jackrabbit.oak.plugins.segment.SegmentVersion.V_11;
import static org.apache.jackrabbit.oak.plugins.segment.SegmentWriter.BLOCK_SIZE;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.WritableByteChannel;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import javax.annotation.Nullable;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.blob.ReferenceCollector;
import org.apache.jackrabbit.oak.plugins.memory.PropertyStates;
/**
* A list of records.
* <p>
* Record data is not kept in memory, but some entries are cached (templates,
* all strings in the segment).
* <p>
* This class includes method to read records from the raw bytes.
*/
public class Segment {
/**
* Number of bytes used for storing a record identifier. One byte
* is used for identifying the segment and two for the record offset
* within that segment.
*/
static final int RECORD_ID_BYTES = 1 + 2;
/**
* The limit on segment references within one segment. Since record
* identifiers use one byte to indicate the referenced segment, a single
* segment can hold references to up to 255 segments plus itself.
*/
static final int SEGMENT_REFERENCE_LIMIT = (1 << 8) - 1; // 255
/**
* The number of bytes (or bits of address space) to use for the
* alignment boundary of segment records.
*/
public static final int RECORD_ALIGN_BITS = 2; // align at the four-byte boundary
/**
* Maximum segment size. Record identifiers are stored as three-byte
* sequences with the first byte indicating the segment and the next
* two the offset within that segment. Since all records are aligned
* at four-byte boundaries, the two bytes can address up to 256kB of
* record data.
*/
public static final int MAX_SEGMENT_SIZE = 1 << (16 + RECORD_ALIGN_BITS); // 256kB
/**
* The size limit for small values. The variable length of small values
* is encoded as a single byte with the high bit as zero, which gives us
* seven bits for encoding the length of the value.
*/
static final int SMALL_LIMIT = 1 << 7;
/**
* The size limit for medium values. The variable length of medium values
* is encoded as two bytes with the highest bits of the first byte set to
* one and zero, which gives us 14 bits for encoding the length of the
* value. And since small values are never stored as medium ones, we can
* extend the size range to cover that many longer values.
*/
public static final int MEDIUM_LIMIT = (1 << (16 - 2)) + SMALL_LIMIT;
/**
* Maximum size of small blob IDs. A small blob ID is stored in a value
* record whose length field contains the pattern "1110" in its most
* significant bits. Since two bytes are used to store both the bit pattern
* and the actual length of the blob ID, a maximum of 2^12 values can be
* stored in the length field.
*/
public static final int BLOB_ID_SMALL_LIMIT = 1 << 12;
public static final int REF_COUNT_OFFSET = 5;
static final int ROOT_COUNT_OFFSET = 6;
static final int BLOBREF_COUNT_OFFSET = 8;
private final SegmentTracker tracker;
private final SegmentId id;
private final ByteBuffer data;
/**
* Version of the segment storage format.
*/
private final SegmentVersion version;
/**
* Referenced segment identifiers. Entries are initialized lazily in
* {@link #getRefId(int)}. Set to {@code null} for bulk segments.
*/
private final SegmentId[] refids;
/**
* String records read from segment. Used to avoid duplicate
* copies and repeated parsing of the same strings.
*
* @deprecated Superseded by {@link #stringCache} unless
* {@link SegmentTracker#DISABLE_STRING_CACHE} is {@code true}.
*/
@Deprecated
private final ConcurrentMap<Integer, String> strings;
private final Function<Integer, String> loadString = new Function<Integer, String>() {
@Nullable
@Override
public String apply(Integer offset) {
return loadString(offset);
}
};
/**
* Cache for string records or {@code null} if {@link #strings} is used for caching
*/
private final StringCache stringCache;
/**
* Template records read from segment. Used to avoid duplicate
* copies and repeated parsing of the same templates.
*/
private final ConcurrentMap<Integer, Template> templates = newConcurrentMap();
private volatile long accessed;
/**
* Decode a 4 byte aligned segment offset.
* @param offset 4 byte aligned segment offset
* @return decoded segment offset
*/
public static int decode(short offset) {
return (offset & 0xffff) << RECORD_ALIGN_BITS;
}
/**
* Encode a segment offset into a 4 byte aligned address packed into a {@code short}.
* @param offset segment offset
* @return encoded segment offset packed into a {@code short}
*/
public static short encode(int offset) {
return (short) (offset >> RECORD_ALIGN_BITS);
}
public Segment(SegmentTracker tracker, SegmentId id, ByteBuffer data) {
this(tracker, id, data, V_11);
}
public Segment(SegmentTracker tracker, SegmentId id, ByteBuffer data, SegmentVersion version) {
this.tracker = checkNotNull(tracker);
this.id = checkNotNull(id);
if (tracker.getStringCache() == null) {
strings = newConcurrentMap();
stringCache = null;
} else {
strings = null;
stringCache = tracker.getStringCache();
}
this.data = checkNotNull(data);
if (id.isDataSegmentId()) {
byte segmentVersion = data.get(3);
checkState(data.get(0) == '0'
&& data.get(1) == 'a'
&& data.get(2) == 'K'
&& SegmentVersion.isValid(segmentVersion));
this.refids = new SegmentId[getRefCount()];
this.refids[0] = id;
this.version = SegmentVersion.fromByte(segmentVersion);
} else {
this.refids = null;
this.version = version;
}
}
Segment(SegmentTracker tracker, byte[] buffer) {
this.tracker = checkNotNull(tracker);
this.id = tracker.newDataSegmentId();
if (tracker.getStringCache() == null) {
strings = newConcurrentMap();
stringCache = null;
} else {
strings = null;
stringCache = tracker.getStringCache();
}
this.data = ByteBuffer.wrap(checkNotNull(buffer));
this.refids = new SegmentId[SEGMENT_REFERENCE_LIMIT + 1];
this.refids[0] = id;
this.version = SegmentVersion.fromByte(buffer[3]);
this.id.setSegment(this);
}
SegmentVersion getSegmentVersion() {
return version;
}
/**
* Maps the given record offset to the respective position within the
* internal {@link #data} array. The validity of a record with the given
* length at the given offset is also verified.
*
* @param offset record offset
* @param length record length
* @return position within the data array
*/
private int pos(int offset, int length) {
checkPositionIndexes(offset, offset + length, MAX_SEGMENT_SIZE);
int pos = data.limit() - MAX_SEGMENT_SIZE + offset;
checkState(pos >= data.position());
return pos;
}
public SegmentId getSegmentId() {
return id;
}
int getRefCount() {
return (data.get(REF_COUNT_OFFSET) & 0xff) + 1;
}
public int getRootCount() {
return data.getShort(ROOT_COUNT_OFFSET) & 0xffff;
}
public RecordType getRootType(int index) {
int refCount = getRefCount();
checkArgument(index < getRootCount());
return RecordType.values()[data.get(data.position() + refCount * 16 + index * 3) & 0xff];
}
public int getRootOffset(int index) {
int refCount = getRefCount();
checkArgument(index < getRootCount());
return (data.getShort(data.position() + refCount * 16 + index * 3 + 1) & 0xffff)
<< RECORD_ALIGN_BITS;
}
SegmentId getRefId(int index) {
if (refids == null || index >= refids.length) {
String type = "data";
if (!id.isDataSegmentId()) {
type = "bulk";
}
long delta = System.currentTimeMillis() - id.getCreationTime();
throw new IllegalStateException("RefId '" + index
+ "' doesn't exist in " + type + " segment " + id
+ ". Creation date delta is " + delta + " ms.");
}
SegmentId refid = refids[index];
if (refid == null) {
synchronized (this) {
refid = refids[index];
if (refid == null) {
int refpos = data.position() + index * 16;
long msb = data.getLong(refpos);
long lsb = data.getLong(refpos + 8);
refid = tracker.getSegmentId(msb, lsb);
refids[index] = refid;
}
}
}
return refid;
}
public List<SegmentId> getReferencedIds() {
int refcount = getRefCount();
List<SegmentId> ids = newArrayListWithCapacity(refcount);
for (int refid = 0; refid < refcount; refid++) {
ids.add(getRefId(refid));
}
return ids;
}
public int size() {
return data.remaining();
}
public long getCacheSize() {
int size = 1024;
if (!data.isDirect()) {
size += size();
}
if (id.isDataSegmentId()) {
size += size();
}
return size;
}
/**
* Writes this segment to the given output stream.
*
* @param stream stream to which this segment will be written
* @throws IOException on an IO error
*/
public void writeTo(OutputStream stream) throws IOException {
ByteBuffer buffer = data.duplicate();
WritableByteChannel channel = Channels.newChannel(stream);
while (buffer.hasRemaining()) {
channel.write(buffer);
}
}
void collectBlobReferences(ReferenceCollector collector) {
int refcount = getRefCount();
int rootcount =
data.getShort(data.position() + ROOT_COUNT_OFFSET) & 0xffff;
int blobrefcount =
data.getShort(data.position() + BLOBREF_COUNT_OFFSET) & 0xffff;
int blobrefpos = data.position() + refcount * 16 + rootcount * 3;
for (int i = 0; i < blobrefcount; i++) {
int offset = (data.getShort(blobrefpos + i * 2) & 0xffff) << 2;
SegmentBlob blob = new SegmentBlob(new RecordId(id, offset));
collector.addReference(blob.getBlobId(), null);
}
}
byte readByte(int offset) {
return data.get(pos(offset, 1));
}
short readShort(int offset) {
return data.getShort(pos(offset, 2));
}
int readInt(int offset) {
return data.getInt(pos(offset, 4));
}
long readLong(int offset) {
return data.getLong(pos(offset, 8));
}
/**
* Reads the given number of bytes starting from the given position
* in this segment.
*
* @param position position within segment
* @param buffer target buffer
* @param offset offset within target buffer
* @param length number of bytes to read
*/
void readBytes(int position, byte[] buffer, int offset, int length) {
checkNotNull(buffer);
checkPositionIndexes(offset, offset + length, buffer.length);
ByteBuffer d = data.duplicate();
d.position(pos(position, length));
d.get(buffer, offset, length);
}
RecordId readRecordId(int offset) {
int pos = pos(offset, RECORD_ID_BYTES);
return internalReadRecordId(pos);
}
private RecordId internalReadRecordId(int pos) {
SegmentId refid = getRefId(data.get(pos) & 0xff);
int offset = ((data.get(pos + 1) & 0xff) << 8) | (data.get(pos + 2) & 0xff);
return new RecordId(refid, offset << RECORD_ALIGN_BITS);
}
static String readString(final RecordId id) {
final SegmentId segmentId = id.getSegmentId();
StringCache cache = segmentId.getTracker().getStringCache();
if (cache == null) {
return segmentId.getSegment().readString(id.getOffset());
} else {
long msb = segmentId.getMostSignificantBits();
long lsb = segmentId.getLeastSignificantBits();
return cache.getString(msb, lsb, id.getOffset(), new Function<Integer, String>() {
@Nullable
@Override
public String apply(Integer offset) {
return segmentId.getSegment().loadString(offset);
}
});
}
}
private String readString(int offset) {
if (stringCache != null) {
long msb = id.getMostSignificantBits();
long lsb = id.getLeastSignificantBits();
return stringCache.getString(msb, lsb, offset, loadString);
} else {
String string = strings.get(offset);
if (string == null) {
string = loadString(offset);
strings.putIfAbsent(offset, string); // only keep the first copy
}
return string;
}
}
private String loadString(int offset) {
int pos = pos(offset, 1);
long length = internalReadLength(pos);
if (length < SMALL_LIMIT) {
byte[] bytes = new byte[(int) length];
ByteBuffer buffer = data.duplicate();
buffer.position(pos + 1);
buffer.get(bytes);
return new String(bytes, Charsets.UTF_8);
} else if (length < MEDIUM_LIMIT) {
byte[] bytes = new byte[(int) length];
ByteBuffer buffer = data.duplicate();
buffer.position(pos + 2);
buffer.get(bytes);
return new String(bytes, Charsets.UTF_8);
} else if (length < Integer.MAX_VALUE) {
int size = (int) ((length + BLOCK_SIZE - 1) / BLOCK_SIZE);
ListRecord list =
new ListRecord(internalReadRecordId(pos + 8), size);
SegmentStream stream = new SegmentStream(
new RecordId(id, offset), list, length);
try {
return stream.getString();
} finally {
stream.close();
}
} else {
throw new IllegalStateException("String is too long: " + length);
}
}
MapRecord readMap(RecordId id) {
return new MapRecord(id);
}
Template readTemplate(final RecordId id) {
return id.getSegment().readTemplate(id.getOffset());
}
private Template readTemplate(int offset) {
Template template = templates.get(offset);
if (template == null) {
template = loadTemplate(offset);
templates.putIfAbsent(offset, template); // only keep the first copy
}
return template;
}
private Template loadTemplate(int offset) {
int head = readInt(offset);
boolean hasPrimaryType = (head & (1 << 31)) != 0;
boolean hasMixinTypes = (head & (1 << 30)) != 0;
boolean zeroChildNodes = (head & (1 << 29)) != 0;
boolean manyChildNodes = (head & (1 << 28)) != 0;
int mixinCount = (head >> 18) & ((1 << 10) - 1);
int propertyCount = head & ((1 << 18) - 1);
offset += 4;
PropertyState primaryType = null;
if (hasPrimaryType) {
RecordId primaryId = readRecordId(offset);
primaryType = PropertyStates.createProperty(
"jcr:primaryType", readString(primaryId), Type.NAME);
offset += RECORD_ID_BYTES;
}
PropertyState mixinTypes = null;
if (hasMixinTypes) {
String[] mixins = new String[mixinCount];
for (int i = 0; i < mixins.length; i++) {
RecordId mixinId = readRecordId(offset);
mixins[i] = readString(mixinId);
offset += RECORD_ID_BYTES;
}
mixinTypes = PropertyStates.createProperty(
"jcr:mixinTypes", Arrays.asList(mixins), Type.NAMES);
}
String childName = Template.ZERO_CHILD_NODES;
if (manyChildNodes) {
childName = Template.MANY_CHILD_NODES;
} else if (!zeroChildNodes) {
RecordId childNameId = readRecordId(offset);
childName = readString(childNameId);
offset += RECORD_ID_BYTES;
}
PropertyTemplate[] properties;
if (version.onOrAfter(V_11)) {
properties = readPropsV11(propertyCount, offset);
} else {
properties = readPropsV10(propertyCount, offset);
}
return new Template(primaryType, mixinTypes, properties, childName);
}
private PropertyTemplate[] readPropsV10(int propertyCount, int offset) {
PropertyTemplate[] properties = new PropertyTemplate[propertyCount];
for (int i = 0; i < propertyCount; i++) {
RecordId propertyNameId = readRecordId(offset);
offset += RECORD_ID_BYTES;
byte type = readByte(offset++);
properties[i] = new PropertyTemplate(i, readString(propertyNameId),
Type.fromTag(Math.abs(type), type < 0));
}
return properties;
}
private PropertyTemplate[] readPropsV11(int propertyCount, int offset) {
PropertyTemplate[] properties = new PropertyTemplate[propertyCount];
if (propertyCount > 0) {
RecordId id = readRecordId(offset);
ListRecord propertyNames = new ListRecord(id, properties.length);
offset += RECORD_ID_BYTES;
for (int i = 0; i < propertyCount; i++) {
byte type = readByte(offset++);
properties[i] = new PropertyTemplate(i,
readString(propertyNames.getEntry(i)), Type.fromTag(
Math.abs(type), type < 0));
}
}
return properties;
}
long readLength(RecordId id) {
return id.getSegment().readLength(id.getOffset());
}
long readLength(int offset) {
return internalReadLength(pos(offset, 1));
}
private long internalReadLength(int pos) {
int length = data.get(pos++) & 0xff;
if ((length & 0x80) == 0) {
return length;
} else if ((length & 0x40) == 0) {
return ((length & 0x3f) << 8
| data.get(pos++) & 0xff)
+ SMALL_LIMIT;
} else {
return (((long) length & 0x3f) << 56
| ((long) (data.get(pos++) & 0xff)) << 48
| ((long) (data.get(pos++) & 0xff)) << 40
| ((long) (data.get(pos++) & 0xff)) << 32
| ((long) (data.get(pos++) & 0xff)) << 24
| ((long) (data.get(pos++) & 0xff)) << 16
| ((long) (data.get(pos++) & 0xff)) << 8
| ((long) (data.get(pos++) & 0xff)))
+ MEDIUM_LIMIT;
}
}
//------------------------------------------------------------< Object >--
@Override
public String toString() {
StringWriter string = new StringWriter();
PrintWriter writer = new PrintWriter(string);
int length = data.remaining();
writer.format("Segment %s (%d bytes)%n", id, length);
if (id.isDataSegmentId()) {
writer.println("--------------------------------------------------------------------------");
int refcount = getRefCount();
for (int refid = 0; refid < refcount; refid++) {
writer.format("reference %02x: %s%n", refid, getRefId(refid));
}
int rootcount = data.getShort(ROOT_COUNT_OFFSET) & 0xffff;
int pos = data.position() + refcount * 16;
for (int rootid = 0; rootid < rootcount; rootid++) {
writer.format(
"root %d: %s at %04x%n", rootid,
RecordType.values()[data.get(pos + rootid * 3) & 0xff],
data.getShort(pos + rootid * 3 + 1) & 0xffff);
}
int blobrefcount = data.getShort(BLOBREF_COUNT_OFFSET) & 0xffff;
pos += rootcount * 3;
for (int blobrefid = 0; blobrefid < blobrefcount; blobrefid++) {
int offset = data.getShort(pos + blobrefid * 2) & 0xffff;
SegmentBlob blob = new SegmentBlob(
new RecordId(id, offset << RECORD_ALIGN_BITS));
writer.format(
"blobref %d: %s at %04x%n", blobrefid,
blob.getBlobId(), offset);
}
}
writer.println("--------------------------------------------------------------------------");
int pos = data.limit() - ((length + 15) & ~15);
while (pos < data.limit()) {
writer.format("%04x: ", (MAX_SEGMENT_SIZE - data.limit() + pos) >> RECORD_ALIGN_BITS);
for (int i = 0; i < 16; i++) {
if (i > 0 && i % 4 == 0) {
writer.append(' ');
}
if (pos + i >= data.position()) {
byte b = data.get(pos + i);
writer.format("%02x ", b & 0xff);
} else {
writer.append(" ");
}
}
writer.append(' ');
for (int i = 0; i < 16; i++) {
if (pos + i >= data.position()) {
byte b = data.get(pos + i);
if (b >= ' ' && b < 127) {
writer.append((char) b);
} else {
writer.append('.');
}
} else {
writer.append(' ');
}
}
writer.println();
pos += 16;
}
writer.println("--------------------------------------------------------------------------");
writer.close();
return string.toString();
}
}
|
oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.segment;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkPositionIndexes;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Lists.newArrayListWithCapacity;
import static com.google.common.collect.Maps.newConcurrentMap;
import static org.apache.jackrabbit.oak.plugins.segment.SegmentVersion.V_11;
import static org.apache.jackrabbit.oak.plugins.segment.SegmentWriter.BLOCK_SIZE;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.WritableByteChannel;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import javax.annotation.Nullable;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.blob.ReferenceCollector;
import org.apache.jackrabbit.oak.plugins.memory.PropertyStates;
/**
* A list of records.
* <p>
* Record data is not kept in memory, but some entries are cached (templates,
* all strings in the segment).
* <p>
* This class includes method to read records from the raw bytes.
*/
public class Segment {
/**
* Number of bytes used for storing a record identifier. One byte
* is used for identifying the segment and two for the record offset
* within that segment.
*/
static final int RECORD_ID_BYTES = 1 + 2;
/**
* The limit on segment references within one segment. Since record
* identifiers use one byte to indicate the referenced segment, a single
* segment can hold references to up to 255 segments plus itself.
*/
static final int SEGMENT_REFERENCE_LIMIT = (1 << 8) - 1; // 255
/**
* The number of bytes (or bits of address space) to use for the
* alignment boundary of segment records.
*/
public static final int RECORD_ALIGN_BITS = 2; // align at the four-byte boundary
/**
* Maximum segment size. Record identifiers are stored as three-byte
* sequences with the first byte indicating the segment and the next
* two the offset within that segment. Since all records are aligned
* at four-byte boundaries, the two bytes can address up to 256kB of
* record data.
*/
public static final int MAX_SEGMENT_SIZE = 1 << (16 + RECORD_ALIGN_BITS); // 256kB
/**
* The size limit for small values. The variable length of small values
* is encoded as a single byte with the high bit as zero, which gives us
* seven bits for encoding the length of the value.
*/
static final int SMALL_LIMIT = 1 << 7;
/**
* The size limit for medium values. The variable length of medium values
* is encoded as two bytes with the highest bits of the first byte set to
* one and zero, which gives us 14 bits for encoding the length of the
* value. And since small values are never stored as medium ones, we can
* extend the size range to cover that many longer values.
*/
public static final int MEDIUM_LIMIT = (1 << (16 - 2)) + SMALL_LIMIT;
/**
* Maximum size of small blob IDs. A small blob ID is stored in a value
* record whose length field contains the pattern "1110" in its most
* significant bits. Since two bytes are used to store both the bit pattern
* and the actual length of the blob ID, a maximum of 2^12 values can be
* stored in the length field.
*/
public static final int BLOB_ID_SMALL_LIMIT = 1 << 12;
public static final int REF_COUNT_OFFSET = 5;
static final int ROOT_COUNT_OFFSET = 6;
static final int BLOBREF_COUNT_OFFSET = 8;
private final SegmentTracker tracker;
private final SegmentId id;
private final ByteBuffer data;
/**
* Version of the segment storage format.
*/
private final SegmentVersion version;
/**
* Referenced segment identifiers. Entries are initialized lazily in
* {@link #getRefId(int)}. Set to {@code null} for bulk segments.
*/
private final SegmentId[] refids;
/**
* String records read from segment. Used to avoid duplicate
* copies and repeated parsing of the same strings.
*
* @deprecated Superseded by {@link #stringCache} unless
* {@link SegmentTracker#DISABLE_STRING_CACHE} is {@code true}.
*/
@Deprecated
private final ConcurrentMap<Integer, String> strings;
private final Function<Integer, String> loadString = new Function<Integer, String>() {
@Nullable
@Override
public String apply(Integer offset) {
return loadString(offset);
}
};
/**
* Cache for string records or {@code null} if {@link #strings} is used for caching
*/
private final StringCache stringCache;
/**
* Template records read from segment. Used to avoid duplicate
* copies and repeated parsing of the same templates.
*/
private final ConcurrentMap<Integer, Template> templates = newConcurrentMap();
private volatile long accessed;
/**
* Decode a 4 byte aligned segment offset.
* @param offset 4 byte aligned segment offset
* @return decoded segment offset
*/
public static int decode(short offset) {
return (offset & 0xffff) << RECORD_ALIGN_BITS;
}
/**
* Encode a segment offset into a 4 byte aligned address packed into a {@code short}.
* @param offset segment offset
* @return encoded segment offset packed into a {@code short}
*/
public static short encode(int offset) {
return (short) (offset >> RECORD_ALIGN_BITS);
}
public Segment(SegmentTracker tracker, SegmentId id, ByteBuffer data) {
this(tracker, id, data, V_11);
}
public Segment(SegmentTracker tracker, SegmentId id, ByteBuffer data, SegmentVersion version) {
this.tracker = checkNotNull(tracker);
this.id = checkNotNull(id);
if (tracker.getStringCache() == null) {
strings = newConcurrentMap();
stringCache = null;
} else {
strings = null;
stringCache = tracker.getStringCache();
}
this.data = checkNotNull(data);
if (id.isDataSegmentId()) {
byte segmentVersion = data.get(3);
checkState(data.get(0) == '0'
&& data.get(1) == 'a'
&& data.get(2) == 'K'
&& SegmentVersion.isValid(segmentVersion));
this.refids = new SegmentId[getRefCount()];
this.refids[0] = id;
this.version = SegmentVersion.fromByte(segmentVersion);
} else {
this.refids = null;
this.version = version;
}
}
Segment(SegmentTracker tracker, byte[] buffer) {
this.tracker = checkNotNull(tracker);
this.id = tracker.newDataSegmentId();
if (tracker.getStringCache() == null) {
strings = newConcurrentMap();
stringCache = null;
} else {
strings = null;
stringCache = tracker.getStringCache();
}
this.data = ByteBuffer.wrap(checkNotNull(buffer));
this.refids = new SegmentId[SEGMENT_REFERENCE_LIMIT + 1];
this.refids[0] = id;
this.version = SegmentVersion.fromByte(buffer[3]);
this.id.setSegment(this);
}
SegmentVersion getSegmentVersion() {
return version;
}
/**
* Maps the given record offset to the respective position within the
* internal {@link #data} array. The validity of a record with the given
* length at the given offset is also verified.
*
* @param offset record offset
* @param length record length
* @return position within the data array
*/
private int pos(int offset, int length) {
checkPositionIndexes(offset, offset + length, MAX_SEGMENT_SIZE);
int pos = data.limit() - MAX_SEGMENT_SIZE + offset;
checkState(pos >= data.position());
return pos;
}
public SegmentId getSegmentId() {
return id;
}
int getRefCount() {
return (data.get(REF_COUNT_OFFSET) & 0xff) + 1;
}
public int getRootCount() {
return data.getShort(ROOT_COUNT_OFFSET) & 0xffff;
}
public RecordType getRootType(int index) {
int refCount = getRefCount();
checkArgument(index < getRootCount());
return RecordType.values()[data.get(data.position() + refCount * 16 + index * 3) & 0xff];
}
public int getRootOffset(int index) {
int refCount = getRefCount();
checkArgument(index < getRootCount());
return (data.getShort(data.position() + refCount * 16 + index * 3 + 1) & 0xffff)
<< RECORD_ALIGN_BITS;
}
SegmentId getRefId(int index) {
if (refids == null || index >= refids.length) {
String type = "data";
if (!id.isDataSegmentId()) {
type = "bulk";
}
long delta = System.currentTimeMillis() - id.getCreationTime();
throw new IllegalStateException("RefId '" + index
+ "' doesn't exist in " + type + " segment " + id
+ ". Creation date delta is " + delta + " ms.");
}
SegmentId refid = refids[index];
if (refid == null) {
synchronized (this) {
refid = refids[index];
if (refid == null) {
int refpos = data.position() + index * 16;
long msb = data.getLong(refpos);
long lsb = data.getLong(refpos + 8);
refid = tracker.getSegmentId(msb, lsb);
refids[index] = refid;
}
}
}
return refid;
}
public List<SegmentId> getReferencedIds() {
int refcount = getRefCount();
List<SegmentId> ids = newArrayListWithCapacity(refcount);
for (int refid = 0; refid < refcount; refid++) {
ids.add(getRefId(refid));
}
return ids;
}
public int size() {
return data.remaining();
}
public long getCacheSize() {
int size = 1024;
if (!data.isDirect()) {
size += size();
}
if (id.isDataSegmentId()) {
size += size();
}
return size;
}
/**
* Writes this segment to the given output stream.
*
* @param stream stream to which this segment will be written
* @throws IOException on an IO error
*/
public void writeTo(OutputStream stream) throws IOException {
ByteBuffer buffer = data.duplicate();
WritableByteChannel channel = Channels.newChannel(stream);
while (buffer.hasRemaining()) {
channel.write(buffer);
}
}
void collectBlobReferences(ReferenceCollector collector) {
int refcount = getRefCount();
int rootcount =
data.getShort(data.position() + ROOT_COUNT_OFFSET) & 0xffff;
int blobrefcount =
data.getShort(data.position() + BLOBREF_COUNT_OFFSET) & 0xffff;
int blobrefpos = data.position() + refcount * 16 + rootcount * 3;
for (int i = 0; i < blobrefcount; i++) {
int offset = (data.getShort(blobrefpos + i * 2) & 0xffff) << 2;
SegmentBlob blob = new SegmentBlob(new RecordId(id, offset));
collector.addReference(blob.getBlobId(), null);
}
}
byte readByte(int offset) {
return data.get(pos(offset, 1));
}
short readShort(int offset) {
return data.getShort(pos(offset, 2));
}
int readInt(int offset) {
return data.getInt(pos(offset, 4));
}
long readLong(int offset) {
return data.getLong(pos(offset, 8));
}
/**
* Reads the given number of bytes starting from the given position
* in this segment.
*
* @param position position within segment
* @param buffer target buffer
* @param offset offset within target buffer
* @param length number of bytes to read
*/
void readBytes(int position, byte[] buffer, int offset, int length) {
checkNotNull(buffer);
checkPositionIndexes(offset, offset + length, buffer.length);
ByteBuffer d = data.duplicate();
d.position(pos(position, length));
d.get(buffer, offset, length);
}
RecordId readRecordId(int offset) {
int pos = pos(offset, RECORD_ID_BYTES);
return internalReadRecordId(pos);
}
private RecordId internalReadRecordId(int pos) {
SegmentId refid = getRefId(data.get(pos) & 0xff);
int offset = ((data.get(pos + 1) & 0xff) << 8) | (data.get(pos + 2) & 0xff);
return new RecordId(refid, offset << RECORD_ALIGN_BITS);
}
static String readString(final RecordId id) {
return id.getSegmentId().getSegment().readString(id.getOffset());
}
private String readString(int offset) {
if (stringCache != null) {
long msb = id.getMostSignificantBits();
long lsb = id.getLeastSignificantBits();
return stringCache.getString(msb, lsb, offset, loadString);
} else {
String string = strings.get(offset);
if (string == null) {
string = loadString(offset);
strings.putIfAbsent(offset, string); // only keep the first copy
}
return string;
}
}
private String loadString(int offset) {
int pos = pos(offset, 1);
long length = internalReadLength(pos);
if (length < SMALL_LIMIT) {
byte[] bytes = new byte[(int) length];
ByteBuffer buffer = data.duplicate();
buffer.position(pos + 1);
buffer.get(bytes);
return new String(bytes, Charsets.UTF_8);
} else if (length < MEDIUM_LIMIT) {
byte[] bytes = new byte[(int) length];
ByteBuffer buffer = data.duplicate();
buffer.position(pos + 2);
buffer.get(bytes);
return new String(bytes, Charsets.UTF_8);
} else if (length < Integer.MAX_VALUE) {
int size = (int) ((length + BLOCK_SIZE - 1) / BLOCK_SIZE);
ListRecord list =
new ListRecord(internalReadRecordId(pos + 8), size);
SegmentStream stream = new SegmentStream(
new RecordId(id, offset), list, length);
try {
return stream.getString();
} finally {
stream.close();
}
} else {
throw new IllegalStateException("String is too long: " + length);
}
}
MapRecord readMap(RecordId id) {
return new MapRecord(id);
}
Template readTemplate(final RecordId id) {
return id.getSegment().readTemplate(id.getOffset());
}
private Template readTemplate(int offset) {
Template template = templates.get(offset);
if (template == null) {
template = loadTemplate(offset);
templates.putIfAbsent(offset, template); // only keep the first copy
}
return template;
}
private Template loadTemplate(int offset) {
int head = readInt(offset);
boolean hasPrimaryType = (head & (1 << 31)) != 0;
boolean hasMixinTypes = (head & (1 << 30)) != 0;
boolean zeroChildNodes = (head & (1 << 29)) != 0;
boolean manyChildNodes = (head & (1 << 28)) != 0;
int mixinCount = (head >> 18) & ((1 << 10) - 1);
int propertyCount = head & ((1 << 18) - 1);
offset += 4;
PropertyState primaryType = null;
if (hasPrimaryType) {
RecordId primaryId = readRecordId(offset);
primaryType = PropertyStates.createProperty(
"jcr:primaryType", readString(primaryId), Type.NAME);
offset += RECORD_ID_BYTES;
}
PropertyState mixinTypes = null;
if (hasMixinTypes) {
String[] mixins = new String[mixinCount];
for (int i = 0; i < mixins.length; i++) {
RecordId mixinId = readRecordId(offset);
mixins[i] = readString(mixinId);
offset += RECORD_ID_BYTES;
}
mixinTypes = PropertyStates.createProperty(
"jcr:mixinTypes", Arrays.asList(mixins), Type.NAMES);
}
String childName = Template.ZERO_CHILD_NODES;
if (manyChildNodes) {
childName = Template.MANY_CHILD_NODES;
} else if (!zeroChildNodes) {
RecordId childNameId = readRecordId(offset);
childName = readString(childNameId);
offset += RECORD_ID_BYTES;
}
PropertyTemplate[] properties;
if (version.onOrAfter(V_11)) {
properties = readPropsV11(propertyCount, offset);
} else {
properties = readPropsV10(propertyCount, offset);
}
return new Template(primaryType, mixinTypes, properties, childName);
}
private PropertyTemplate[] readPropsV10(int propertyCount, int offset) {
PropertyTemplate[] properties = new PropertyTemplate[propertyCount];
for (int i = 0; i < propertyCount; i++) {
RecordId propertyNameId = readRecordId(offset);
offset += RECORD_ID_BYTES;
byte type = readByte(offset++);
properties[i] = new PropertyTemplate(i, readString(propertyNameId),
Type.fromTag(Math.abs(type), type < 0));
}
return properties;
}
private PropertyTemplate[] readPropsV11(int propertyCount, int offset) {
PropertyTemplate[] properties = new PropertyTemplate[propertyCount];
if (propertyCount > 0) {
RecordId id = readRecordId(offset);
ListRecord propertyNames = new ListRecord(id, properties.length);
offset += RECORD_ID_BYTES;
for (int i = 0; i < propertyCount; i++) {
byte type = readByte(offset++);
properties[i] = new PropertyTemplate(i,
readString(propertyNames.getEntry(i)), Type.fromTag(
Math.abs(type), type < 0));
}
}
return properties;
}
long readLength(RecordId id) {
return id.getSegment().readLength(id.getOffset());
}
long readLength(int offset) {
return internalReadLength(pos(offset, 1));
}
private long internalReadLength(int pos) {
int length = data.get(pos++) & 0xff;
if ((length & 0x80) == 0) {
return length;
} else if ((length & 0x40) == 0) {
return ((length & 0x3f) << 8
| data.get(pos++) & 0xff)
+ SMALL_LIMIT;
} else {
return (((long) length & 0x3f) << 56
| ((long) (data.get(pos++) & 0xff)) << 48
| ((long) (data.get(pos++) & 0xff)) << 40
| ((long) (data.get(pos++) & 0xff)) << 32
| ((long) (data.get(pos++) & 0xff)) << 24
| ((long) (data.get(pos++) & 0xff)) << 16
| ((long) (data.get(pos++) & 0xff)) << 8
| ((long) (data.get(pos++) & 0xff)))
+ MEDIUM_LIMIT;
}
}
//------------------------------------------------------------< Object >--
@Override
public String toString() {
StringWriter string = new StringWriter();
PrintWriter writer = new PrintWriter(string);
int length = data.remaining();
writer.format("Segment %s (%d bytes)%n", id, length);
if (id.isDataSegmentId()) {
writer.println("--------------------------------------------------------------------------");
int refcount = getRefCount();
for (int refid = 0; refid < refcount; refid++) {
writer.format("reference %02x: %s%n", refid, getRefId(refid));
}
int rootcount = data.getShort(ROOT_COUNT_OFFSET) & 0xffff;
int pos = data.position() + refcount * 16;
for (int rootid = 0; rootid < rootcount; rootid++) {
writer.format(
"root %d: %s at %04x%n", rootid,
RecordType.values()[data.get(pos + rootid * 3) & 0xff],
data.getShort(pos + rootid * 3 + 1) & 0xffff);
}
int blobrefcount = data.getShort(BLOBREF_COUNT_OFFSET) & 0xffff;
pos += rootcount * 3;
for (int blobrefid = 0; blobrefid < blobrefcount; blobrefid++) {
int offset = data.getShort(pos + blobrefid * 2) & 0xffff;
SegmentBlob blob = new SegmentBlob(
new RecordId(id, offset << RECORD_ALIGN_BITS));
writer.format(
"blobref %d: %s at %04x%n", blobrefid,
blob.getBlobId(), offset);
}
}
writer.println("--------------------------------------------------------------------------");
int pos = data.limit() - ((length + 15) & ~15);
while (pos < data.limit()) {
writer.format("%04x: ", (MAX_SEGMENT_SIZE - data.limit() + pos) >> RECORD_ALIGN_BITS);
for (int i = 0; i < 16; i++) {
if (i > 0 && i % 4 == 0) {
writer.append(' ');
}
if (pos + i >= data.position()) {
byte b = data.get(pos + i);
writer.format("%02x ", b & 0xff);
} else {
writer.append(" ");
}
}
writer.append(' ');
for (int i = 0; i < 16; i++) {
if (pos + i >= data.position()) {
byte b = data.get(pos + i);
if (b >= ' ' && b < 127) {
writer.append((char) b);
} else {
writer.append('.');
}
} else {
writer.append(' ');
}
}
writer.println();
pos += 16;
}
writer.println("--------------------------------------------------------------------------");
writer.close();
return string.toString();
}
}
|
OAK-3330: FileStore lock contention with concurrent writers
Avoid reading segment when reading strings and the string is in the cache
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1708401 13f79535-47bb-0310-9956-ffa450edef68
|
oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/segment/Segment.java
|
OAK-3330: FileStore lock contention with concurrent writers Avoid reading segment when reading strings and the string is in the cache
|
|
Java
|
apache-2.0
|
f1ec3bc3b9a8b5b732ad17f040afa8587e6ddbe4
| 0
|
wcm-io-qa/wcm-io-qa-galenium,wcm-io-qa/wcm-io-qa-galenium,wcm-io-qa/wcm-io-qa-galenium
|
/*
* #%L
* wcm.io
* %%
* Copyright (C) 2018 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.qa.galenium.interaction;
import static io.wcm.qa.galenium.reporting.GaleniumReportUtil.getLogger;
import static io.wcm.qa.galenium.util.GaleniumContext.getDriver;
import java.net.URL;
import org.apache.commons.lang3.StringUtils;
import io.wcm.qa.galenium.util.GaleniumContext;
/**
* Alert related convenience methods.
*/
public final class Browser {
private static final String ABOUT_BLANK = "about:blank";
private Browser() {
// do not instantiate
}
/**
* Navigate back.
*/
public static void back() {
getLogger().info("navigating back");
getDriver().navigate().back();
}
/**
* Navigate forward.
*/
public static void forward() {
getLogger().info("navigating forward");
getDriver().navigate().forward();
}
/**
* @param url to check against
* @return whether browser is currently pointing at URL
*/
public static boolean isCurrentUrl(String url) {
getLogger().debug("checking current URL");
return StringUtils.equals(url, getDriver().getCurrentUrl());
}
/**
* Load URL in browser.
* @param url to load
*/
public static void load(String url) {
getLogger().info("loading URL: '" + url + "'");
getDriver().get(url);
}
/**
* Loads 'about:blank'.
*/
public static void loadBlankPage() {
load(ABOUT_BLANK);
}
/**
* Load URL in browser and fail test if URL does not match.
* @param url to load
*/
public static void loadExactly(String url) {
load(url);
GaleniumContext.getAssertion().assertEquals(url, getDriver().getCurrentUrl(), "Current URL should match.");
}
/**
* Navigate to URL.
* @param url URL to navigate to
*/
public static void navigateTo(String url) {
getLogger().info("navigating to URL: '" + url + "'");
getDriver().navigate().to(url);
}
/**
* Navigate to URL.
* @param url to navigate to
*/
public static void navigateTo(URL url) {
getLogger().info("navigating to URL: '" + url + "'");
getDriver().navigate().to(url);
}
/**
* Refresh browser.
*/
public static void refresh() {
getLogger().info("refreshing browser");
getDriver().navigate().refresh();
}
}
|
convenience/src/main/java/io/wcm/qa/galenium/interaction/Browser.java
|
/*
* #%L
* wcm.io
* %%
* Copyright (C) 2018 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.qa.galenium.interaction;
import static io.wcm.qa.galenium.reporting.GaleniumReportUtil.getLogger;
import static io.wcm.qa.galenium.util.GaleniumContext.getDriver;
import java.net.URL;
import org.apache.commons.lang3.StringUtils;
import io.wcm.qa.galenium.util.GaleniumContext;
/**
* Alert related convenience methods.
*/
public final class Browser {
private static final String ABOUT_BLANK = "about:blank";
private Browser() {
// do not instantiate
}
/**
* Navigate back.
*/
public static void back() {
getLogger().debug("navigating back");
getDriver().navigate().back();
}
/**
* Navigate forward.
*/
public static void forward() {
getLogger().debug("navigating forward");
getDriver().navigate().forward();
}
/**
* @param url to check against
* @return whether browser is currently pointing at URL
*/
public static boolean isCurrentUrl(String url) {
getLogger().trace("checking current URL");
return StringUtils.equals(url, getDriver().getCurrentUrl());
}
/**
* Load URL in browser.
* @param url to load
*/
public static void load(String url) {
getLogger().trace("loading URL: '" + url + "'");
getDriver().get(url);
}
/**
* Loads 'about:blank'.
*/
public static void loadBlankPage() {
load(ABOUT_BLANK);
}
/**
* Load URL in browser and fail test if URL does not match.
* @param url to load
*/
public static void loadExactly(String url) {
load(url);
GaleniumContext.getAssertion().assertEquals(url, getDriver().getCurrentUrl(), "Current URL should match.");
}
/**
* Navigate to URL.
* @param url URL to navigate to
*/
public static void navigateTo(String url) {
getLogger().debug("navigating to URL: '" + url + "'");
getDriver().navigate().to(url);
}
/**
* Navigate to URL.
* @param url to navigate to
*/
public static void navigateTo(URL url) {
getLogger().debug("navigating to URL: '" + url + "'");
getDriver().navigate().to(url);
}
/**
* Refresh browser.
*/
public static void refresh() {
getLogger().debug("refreshing browser");
getDriver().navigate().refresh();
}
}
|
browser navigation is important info
|
convenience/src/main/java/io/wcm/qa/galenium/interaction/Browser.java
|
browser navigation is important info
|
|
Java
|
apache-2.0
|
1b889ae8119cba916b3ee8a6c69f7672bcaf138f
| 0
|
adejanovski/java-driver,tolbertam/java-driver,adejanovski/java-driver,tommystendahl/java-driver,tommystendahl/java-driver,kjniemi/java-driver,mcanini/java-driver,mebigfatguy/java-driver,DICL/cassandra-driver,tommystendahl/java-driver,DICL/cassandra-driver,mebigfatguy/java-driver,kjniemi/java-driver,tolbertam/java-driver,mike-tr-adamson/java-driver,tolbertam/java-driver,mike-tr-adamson/java-driver,DICL/cassandra-driver,adejanovski/java-driver,kjniemi/java-driver,fromanator/java-driver,mcanini/java-driver,mebigfatguy/java-driver,fromanator/java-driver,mcanini/java-driver,fromanator/java-driver,mike-tr-adamson/java-driver
|
/*
* Copyright (C) 2012-2015 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.driver.core;
import java.util.*;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Describes a Table.
*/
public class TableMetadata {
private static final Logger logger = LoggerFactory.getLogger(TableMetadata.class);
static final String CF_NAME = "columnfamily_name";
private static final String CF_ID = "cf_id";
private static final String KEY_VALIDATOR = "key_validator";
private static final String COMPARATOR = "comparator";
private static final String VALIDATOR = "default_validator";
private static final String KEY_ALIASES = "key_aliases";
private static final String COLUMN_ALIASES = "column_aliases";
private static final String VALUE_ALIAS = "value_alias";
private static final String DEFAULT_KEY_ALIAS = "key";
private static final String DEFAULT_COLUMN_ALIAS = "column";
private static final String DEFAULT_VALUE_ALIAS = "value";
private static final Comparator<ColumnMetadata> columnMetadataComparator = new Comparator<ColumnMetadata>() {
public int compare(ColumnMetadata c1, ColumnMetadata c2) {
return c1.getName().compareTo(c2.getName());
}
};
private final KeyspaceMetadata keyspace;
private final String name;
private final UUID id;
private final List<ColumnMetadata> partitionKey;
private final List<ColumnMetadata> clusteringColumns;
private final Map<String, ColumnMetadata> columns;
private final Options options;
private final List<Order> clusteringOrder;
private final VersionNumber cassandraVersion;
/**
* Clustering orders.
* <p>
* This is used by {@link #getClusteringOrder} to indicate the clustering
* order of a table.
*/
public static enum Order {
ASC, DESC;
static final Predicate<Order> isAscending = new Predicate<Order>() {
public boolean apply(Order o) {
return o == ASC;
}
};
}
private TableMetadata(KeyspaceMetadata keyspace,
String name,
UUID id,
List<ColumnMetadata> partitionKey,
List<ColumnMetadata> clusteringColumns,
LinkedHashMap<String, ColumnMetadata> columns,
Options options,
List<Order> clusteringOrder,
VersionNumber cassandraVersion) {
this.keyspace = keyspace;
this.name = name;
this.id = id;
this.partitionKey = partitionKey;
this.clusteringColumns = clusteringColumns;
this.columns = columns;
this.options = options;
this.clusteringOrder = clusteringOrder;
this.cassandraVersion = cassandraVersion;
}
static TableMetadata build(KeyspaceMetadata ksm, Row row, Map<String, ColumnMetadata.Raw> rawCols, VersionNumber cassandraVersion) {
String name = row.getString(CF_NAME);
UUID id = (cassandraVersion.getMajor() > 2 || (cassandraVersion.getMajor() == 2 && cassandraVersion.getMinor() >= 1))
? row.getUUID(CF_ID)
: null;
CassandraTypeParser.ParseResult keyValidator = CassandraTypeParser.parseWithComposite(row.getString(KEY_VALIDATOR));
CassandraTypeParser.ParseResult comparator = CassandraTypeParser.parseWithComposite(row.getString(COMPARATOR));
List<String> columnAliases = cassandraVersion.getMajor() >= 2 || row.getString(COLUMN_ALIASES) == null
? Collections.<String>emptyList()
: SimpleJSONParser.parseStringList(row.getString(COLUMN_ALIASES));
int clusteringSize = findClusteringSize(comparator, rawCols.values(), columnAliases, cassandraVersion);
boolean isDense = clusteringSize != comparator.types.size() - 1;
boolean isCompact = isDense || !comparator.isComposite;
List<ColumnMetadata> partitionKey = nullInitializedList(keyValidator.types.size());
List<ColumnMetadata> clusteringColumns = nullInitializedList(clusteringSize);
List<Order> clusteringOrder = nullInitializedList(clusteringSize);
// We use a linked hashmap because we will keep this in the order of a 'SELECT * FROM ...'.
LinkedHashMap<String, ColumnMetadata> columns = new LinkedHashMap<String, ColumnMetadata>();
Options options = null;
try {
options = new Options(row, isCompact, cassandraVersion);
} catch (RuntimeException e) {
// See ControlConnection#refreshSchema for why we'd rather not probably this further. Since table options is one thing
// that tends to change often in Cassandra, it's worth special casing this.
logger.error(String.format("Error parsing schema options for table %s.%s: "
+ "Cluster.getMetadata().getKeyspace(\"%s\").getTable(\"%s\").getOptions() will return null",
ksm.getName(), name, ksm.getName(), name), e);
}
TableMetadata tm = new TableMetadata(ksm, name, id, partitionKey, clusteringColumns, columns, options, clusteringOrder, cassandraVersion);
// We use this temporary set just so non PK columns are added in lexicographical order, which is the one of a
// 'SELECT * FROM ...'
Set<ColumnMetadata> otherColumns = new TreeSet<ColumnMetadata>(columnMetadataComparator);
if (cassandraVersion.getMajor() < 2) {
// In C* 1.2, only the REGULAR columns are in the columns schema table, so we need to add the names from
// the aliases (and make sure we handle default aliases).
List<String> keyAliases = row.getString(KEY_ALIASES) == null
? Collections.<String>emptyList()
: SimpleJSONParser.parseStringList(row.getString(KEY_ALIASES));
for (int i = 0; i < partitionKey.size(); i++) {
String alias = keyAliases.size() > i ? keyAliases.get(i) : (i == 0 ? DEFAULT_KEY_ALIAS : DEFAULT_KEY_ALIAS + (i + 1));
partitionKey.set(i, ColumnMetadata.forAlias(tm, alias, keyValidator.types.get(i)));
}
for (int i = 0; i < clusteringSize; i++) {
String alias = columnAliases.size() > i ? columnAliases.get(i) : DEFAULT_COLUMN_ALIAS + (i + 1);
clusteringColumns.set(i, ColumnMetadata.forAlias(tm, alias, comparator.types.get(i)));
clusteringOrder.set(i, comparator.reversed.get(i) ? Order.DESC : Order.ASC);
}
// We have a value alias if we're dense
if (isDense) {
String alias = row.isNull(VALUE_ALIAS) ? DEFAULT_VALUE_ALIAS : row.getString(VALUE_ALIAS);
DataType type = CassandraTypeParser.parseOne(row.getString(VALIDATOR));
otherColumns.add(ColumnMetadata.forAlias(tm, alias, type));
}
}
for (ColumnMetadata.Raw rawCol : rawCols.values()) {
ColumnMetadata col = ColumnMetadata.fromRaw(tm, rawCol);
switch (rawCol.kind) {
case PARTITION_KEY:
partitionKey.set(rawCol.componentIndex, col);
break;
case CLUSTERING_KEY:
clusteringColumns.set(rawCol.componentIndex, col);
clusteringOrder.set(rawCol.componentIndex, rawCol.isReversed ? Order.DESC : Order.ASC);
break;
default:
otherColumns.add(col);
break;
}
}
for (ColumnMetadata c : partitionKey)
columns.put(c.getName(), c);
for (ColumnMetadata c : clusteringColumns)
columns.put(c.getName(), c);
for (ColumnMetadata c : otherColumns)
columns.put(c.getName(), c);
return tm;
}
private static int findClusteringSize(CassandraTypeParser.ParseResult comparator,
Collection<ColumnMetadata.Raw> cols,
List<String> columnAliases,
VersionNumber cassandraVersion) {
// In 2.0, this is relatively easy, we just find the biggest 'componentIndex' amongst the clustering columns.
// For 1.2 however, this is slightly more subtle: we need to infer it based on whether the comparator is composite or not, and whether we have
// regular columns or not.
if (cassandraVersion.getMajor() >= 2) {
int maxId = -1;
for (ColumnMetadata.Raw col : cols)
if (col.kind == ColumnMetadata.Raw.Kind.CLUSTERING_KEY)
maxId = Math.max(maxId, col.componentIndex);
return maxId + 1;
} else {
int size = comparator.types.size();
if (comparator.isComposite)
return !comparator.collections.isEmpty() || (columnAliases.size() == size - 1 && comparator.types.get(size - 1).equals(DataType.text())) ? size - 1 : size;
else
// We know cols only has the REGULAR ones for 1.2
return !columnAliases.isEmpty() || cols.isEmpty() ? size : 0;
}
}
private static <T> List<T> nullInitializedList(int size) {
List<T> l = new ArrayList<T>(size);
for (int i = 0; i < size; ++i)
l.add(null);
return l;
}
/**
* Returns the name of this table.
*
* @return the name of this CQL table.
*/
public String getName() {
return name;
}
/**
* Returns the unique id of this table.
* <p>
* Note: this id is available in Cassandra 2.1 and above. It will be
* {@code null} for earlier versions.
*
* @return the unique id of the table.
*/
public UUID getId() {
return id;
}
/**
* Returns the keyspace this table belong to.
*
* @return the keyspace metadata of the keyspace this table belong to.
*/
public KeyspaceMetadata getKeyspace() {
return keyspace;
}
/**
* Returns metadata on a column of this table.
*
* @param name the name of the column to retrieve ({@code name} will be
* interpreted as a case-insensitive identifier unless enclosed in double-quotes,
* see {@link Metadata#quote}).
* @return the metadata for the {@code name} column if it exists, or
* {@code null} otherwise.
*/
public ColumnMetadata getColumn(String name) {
return columns.get(Metadata.handleId(name));
}
/**
* Returns a list containing all the columns of this table.
*
* The order of the columns in the list is consistent with
* the order of the columns returned by a {@code SELECT * FROM thisTable}:
* the first column is the partition key, next are the clustering
* columns in their defined order, and then the rest of the
* columns follow in alphabetic order.
*
* @return a list containing the metadata for the columns of this table.
*/
public List<ColumnMetadata> getColumns() {
return new ArrayList<ColumnMetadata>(columns.values());
}
/**
* Returns the list of columns composing the primary key for this table.
*
* A table will always at least have a partition key (that
* may itself be one or more columns), so the returned list at least
* has one element.
*
* @return the list of columns composing the primary key for this table.
*/
public List<ColumnMetadata> getPrimaryKey() {
List<ColumnMetadata> pk = new ArrayList<ColumnMetadata>(partitionKey.size() + clusteringColumns.size());
pk.addAll(partitionKey);
pk.addAll(clusteringColumns);
return pk;
}
/**
* Returns the list of columns composing the partition key for this table.
*
* A table always has a partition key so the returned list has
* at least one element.
*
* @return the list of columns composing the partition key for this table.
*/
public List<ColumnMetadata> getPartitionKey() {
return Collections.unmodifiableList(partitionKey);
}
/**
* Returns the list of clustering columns for this table.
*
* @return the list of clustering columns for this table.
* If there is no clustering columns, an empty list is returned.
*/
public List<ColumnMetadata> getClusteringColumns() {
return Collections.unmodifiableList(clusteringColumns);
}
/**
* Returns the clustering order for this table.
* <p>
* The returned contains the clustering order of each clustering column. The
* {@code i}th element of the result correspond to the order (ascending or
* descending) of the {@code i}th clustering column (see
* {@link #getClusteringColumns}). Note that a table defined without any
* particular clustering order is equivalent to one for which all the
* clustering key are in ascending order.
*
* @return a list with the clustering order for each clustering column.
*/
public List<Order> getClusteringOrder() {
return clusteringOrder;
}
/**
* Returns the options for this table.
*
* @return the options for this table.
*/
public Options getOptions() {
return options;
}
void add(ColumnMetadata column) {
columns.put(column.getName(), column);
}
/**
* Returns a {@code String} containing CQL queries representing this
* table and the index on it.
* <p>
* In other words, this method returns the queries that would allow you to
* recreate the schema of this table, along with the index defined on
* columns of this table.
* <p>
* Note that the returned String is formatted to be human readable (for
* some definition of human readable at least).
*
* @return the CQL queries representing this table schema as a {code
* String}.
*/
public String exportAsString() {
StringBuilder sb = new StringBuilder();
sb.append(asCQLQuery(true));
for (ColumnMetadata column : columns.values()) {
ColumnMetadata.IndexMetadata index = column.getIndex();
if (index == null)
continue;
sb.append('\n').append(index.asCQLQuery());
}
return sb.toString();
}
/**
* Returns a CQL query representing this table.
* <p>
* This method returns a single 'CREATE TABLE' query with the options
* corresponding to this table definition.
* <p>
* Note that the returned string is a single line; the returned query
* is not formatted in any way.
*
* @return the 'CREATE TABLE' query corresponding to this table.
* @see #exportAsString
*/
public String asCQLQuery() {
return asCQLQuery(false);
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
TableMetadata that = (TableMetadata)o;
if (!name.equals(that.name))
return false;
if (!partitionKey.equals(that.partitionKey))
return false;
if (!clusteringColumns.equals(that.clusteringColumns))
return false;
if (!columns.equals(that.columns))
return false;
if (!options.equals(that.options))
return false;
if (!clusteringOrder.equals(that.clusteringOrder))
return false;
return cassandraVersion.equals(that.cassandraVersion);
}
@Override
public int hashCode() {
int result = name.hashCode();
result = 31 * result + partitionKey.hashCode();
result = 31 * result + clusteringColumns.hashCode();
result = 31 * result + columns.hashCode();
result = 31 * result + options.hashCode();
result = 31 * result + clusteringOrder.hashCode();
result = 31 * result + cassandraVersion.hashCode();
return result;
}
private String asCQLQuery(boolean formatted) {
StringBuilder sb = new StringBuilder();
sb.append("CREATE TABLE ").append(Metadata.escapeId(keyspace.getName())).append('.').append(Metadata.escapeId(name)).append(" (");
newLine(sb, formatted);
for (ColumnMetadata cm : columns.values())
newLine(sb.append(spaces(4, formatted)).append(cm).append(','), formatted);
// PK
sb.append(spaces(4, formatted)).append("PRIMARY KEY (");
if (partitionKey.size() == 1) {
sb.append(partitionKey.get(0).getName());
} else {
sb.append('(');
boolean first = true;
for (ColumnMetadata cm : partitionKey) {
if (first) first = false; else sb.append(", ");
sb.append(Metadata.escapeId(cm.getName()));
}
sb.append(')');
}
for (ColumnMetadata cm : clusteringColumns)
sb.append(", ").append(Metadata.escapeId(cm.getName()));
sb.append(')');
newLine(sb, formatted);
// end PK
// Options
sb.append(") WITH ");
if (options.isCompactStorage)
and(sb.append("COMPACT STORAGE"), formatted);
if (!Iterables.all(clusteringOrder, Order.isAscending))
and(appendClusteringOrder(sb), formatted);
sb.append("read_repair_chance = ").append(options.readRepair);
and(sb, formatted).append("dclocal_read_repair_chance = ").append(options.localReadRepair);
if (cassandraVersion.getMajor() < 2 || (cassandraVersion.getMajor() == 2 && cassandraVersion.getMinor() == 0))
and(sb, formatted).append("replicate_on_write = ").append(options.replicateOnWrite);
and(sb, formatted).append("gc_grace_seconds = ").append(options.gcGrace);
and(sb, formatted).append("bloom_filter_fp_chance = ").append(options.bfFpChance);
if (cassandraVersion.getMajor() < 2 || cassandraVersion.getMajor() == 2 && cassandraVersion.getMinor() < 1)
and(sb, formatted).append("caching = '").append(options.caching.get("keys")).append('\'');
else
and(sb, formatted).append("caching = ").append(formatOptionMap(options.caching));
if (options.comment != null)
and(sb, formatted).append("comment = '").append(options.comment.replace("'","''")).append('\'');
and(sb, formatted).append("compaction = ").append(formatOptionMap(options.compaction));
and(sb, formatted).append("compression = ").append(formatOptionMap(options.compression));
if (cassandraVersion.getMajor() >= 2) {
and(sb, formatted).append("default_time_to_live = ").append(options.defaultTTL);
and(sb, formatted).append("speculative_retry = '").append(options.speculativeRetry).append('\'');
if (options.indexInterval != null)
and(sb, formatted).append("index_interval = ").append(options.indexInterval);
}
if (cassandraVersion.getMajor() > 2 || (cassandraVersion.getMajor() == 2 && cassandraVersion.getMinor() >= 1)) {
and(sb, formatted).append("min_index_interval = ").append(options.minIndexInterval);
and(sb, formatted).append("max_index_interval = ").append(options.maxIndexInterval);
}
sb.append(';');
return sb.toString();
}
@Override
public String toString() {
return asCQLQuery();
}
private StringBuilder appendClusteringOrder(StringBuilder sb) {
sb.append("CLUSTERING ORDER BY (");
for (int i = 0; i < clusteringColumns.size(); i++) {
if (i > 0) sb.append(", ");
sb.append(clusteringColumns.get(i).getName()).append(' ').append(clusteringOrder.get(i));
}
return sb.append(')');
}
private static String formatOptionMap(Map<String, String> m) {
StringBuilder sb = new StringBuilder();
sb.append("{ ");
boolean first = true;
for (Map.Entry<String, String> entry : m.entrySet()) {
if (first) first = false; else sb.append(", ");
sb.append('\'').append(entry.getKey()).append('\'');
sb.append(" : ");
try {
sb.append(Integer.parseInt(entry.getValue()));
} catch (NumberFormatException e) {
sb.append('\'').append(entry.getValue()).append('\'');
}
}
sb.append(" }");
return sb.toString();
}
private StringBuilder and(StringBuilder sb, boolean formatted) {
return newLine(sb, formatted).append(spaces(2, formatted)).append(" AND ");
}
static String spaces(int n, boolean formatted) {
if (!formatted)
return "";
StringBuilder sb = new StringBuilder();
for (int i = 0; i < n; i++)
sb.append(' ');
return sb.toString();
}
static StringBuilder newLine(StringBuilder sb, boolean formatted) {
if (formatted)
sb.append('\n');
return sb;
}
public static class Options {
private static final String COMMENT = "comment";
private static final String READ_REPAIR = "read_repair_chance";
private static final String LOCAL_READ_REPAIR = "local_read_repair_chance";
private static final String REPLICATE_ON_WRITE = "replicate_on_write";
private static final String GC_GRACE = "gc_grace_seconds";
private static final String BF_FP_CHANCE = "bloom_filter_fp_chance";
private static final String CACHING = "caching";
private static final String COMPACTION_CLASS = "compaction_strategy_class";
private static final String COMPACTION_OPTIONS = "compaction_strategy_options";
private static final String MIN_COMPACTION_THRESHOLD = "min_compaction_threshold";
private static final String MAX_COMPACTION_THRESHOLD = "max_compaction_threshold";
private static final String POPULATE_CACHE_ON_FLUSH = "populate_io_cache_on_flush";
private static final String COMPRESSION_PARAMS = "compression_parameters";
private static final String MEMTABLE_FLUSH_PERIOD_MS = "memtable_flush_period_in_ms";
private static final String DEFAULT_TTL = "default_time_to_live";
private static final String SPECULATIVE_RETRY = "speculative_retry";
private static final String INDEX_INTERVAL = "index_interval";
private static final String MIN_INDEX_INTERVAL = "min_index_interval";
private static final String MAX_INDEX_INTERVAL = "max_index_interval";
private static final boolean DEFAULT_REPLICATE_ON_WRITE = true;
private static final double DEFAULT_BF_FP_CHANCE = 0.01;
private static final boolean DEFAULT_POPULATE_CACHE_ON_FLUSH = false;
private static final int DEFAULT_MEMTABLE_FLUSH_PERIOD = 0;
private static final int DEFAULT_DEFAULT_TTL = 0;
private static final String DEFAULT_SPECULATIVE_RETRY = "NONE";
private static final int DEFAULT_INDEX_INTERVAL = 128;
private static final int DEFAULT_MIN_INDEX_INTERVAL = 128;
private static final int DEFAULT_MAX_INDEX_INTERVAL = 2048;
private final boolean isCompactStorage;
private final String comment;
private final double readRepair;
private final double localReadRepair;
private final boolean replicateOnWrite;
private final int gcGrace;
private final double bfFpChance;
private final Map<String, String> caching;
private final boolean populateCacheOnFlush;
private final int memtableFlushPeriodMs;
private final int defaultTTL;
private final String speculativeRetry;
private final Integer indexInterval;
private final Integer minIndexInterval;
private final Integer maxIndexInterval;
private final Map<String, String> compaction = new HashMap<String, String>();
private final Map<String, String> compression = new HashMap<String, String>();
Options(Row row, boolean isCompactStorage, VersionNumber version) {
this.isCompactStorage = isCompactStorage;
this.comment = isNullOrAbsent(row, COMMENT) ? "" : row.getString(COMMENT);
this.readRepair = row.getDouble(READ_REPAIR);
this.localReadRepair = row.getDouble(LOCAL_READ_REPAIR);
boolean is210OrMore = version.getMajor() > 2 || (version.getMajor() == 2 && version.getMinor() >= 1);
this.replicateOnWrite = is210OrMore || isNullOrAbsent(row, REPLICATE_ON_WRITE) ? DEFAULT_REPLICATE_ON_WRITE : row.getBool(REPLICATE_ON_WRITE);
this.gcGrace = row.getInt(GC_GRACE);
this.bfFpChance = isNullOrAbsent(row, BF_FP_CHANCE) ? DEFAULT_BF_FP_CHANCE : row.getDouble(BF_FP_CHANCE);
this.caching = is210OrMore
? SimpleJSONParser.parseStringMap(row.getString(CACHING))
: ImmutableMap.of("keys", row.getString(CACHING));
this.populateCacheOnFlush = isNullOrAbsent(row, POPULATE_CACHE_ON_FLUSH) ? DEFAULT_POPULATE_CACHE_ON_FLUSH : row.getBool(POPULATE_CACHE_ON_FLUSH);
this.memtableFlushPeriodMs = version.getMajor() < 2 || isNullOrAbsent(row, MEMTABLE_FLUSH_PERIOD_MS) ? DEFAULT_MEMTABLE_FLUSH_PERIOD : row.getInt(MEMTABLE_FLUSH_PERIOD_MS);
this.defaultTTL = version.getMajor() < 2 || isNullOrAbsent(row, DEFAULT_TTL) ? DEFAULT_DEFAULT_TTL : row.getInt(DEFAULT_TTL);
this.speculativeRetry = version.getMajor() < 2 || isNullOrAbsent(row, SPECULATIVE_RETRY) ? DEFAULT_SPECULATIVE_RETRY : row.getString(SPECULATIVE_RETRY);
if (version.getMajor() >= 2 && !is210OrMore)
this.indexInterval = isNullOrAbsent(row, INDEX_INTERVAL) ? DEFAULT_INDEX_INTERVAL : row.getInt(INDEX_INTERVAL);
else
this.indexInterval = null;
if (is210OrMore) {
this.minIndexInterval = isNullOrAbsent(row, MIN_INDEX_INTERVAL)
? DEFAULT_MIN_INDEX_INTERVAL
: row.getInt(MIN_INDEX_INTERVAL);
this.maxIndexInterval = isNullOrAbsent(row, MAX_INDEX_INTERVAL)
? DEFAULT_MAX_INDEX_INTERVAL
: row.getInt(MAX_INDEX_INTERVAL);
} else {
this.minIndexInterval = null;
this.maxIndexInterval = null;
}
this.compaction.put("class", row.getString(COMPACTION_CLASS));
this.compaction.putAll(SimpleJSONParser.parseStringMap(row.getString(COMPACTION_OPTIONS)));
this.compression.putAll(SimpleJSONParser.parseStringMap(row.getString(COMPRESSION_PARAMS)));
}
private static boolean isNullOrAbsent(Row row, String name) {
return row.getColumnDefinitions().getIndexOf(name) < 0
|| row.isNull(name);
}
/**
* Returns whether the table uses the {@code COMPACT STORAGE} option.
*
* @return whether the table uses the {@code COMPACT STORAGE} option.
*/
public boolean isCompactStorage() {
return isCompactStorage;
}
/**
* Returns the commentary set for this table.
*
* @return the commentary set for this table, or {@code null} if noe has been set.
*/
public String getComment() {
return comment;
}
/**
* Returns the chance with which a read repair is triggered for this table.
*
* @return the read repair change set for table (in [0.0, 1.0]).
*/
public double getReadRepairChance() {
return readRepair;
}
/**
* Returns the cluster local read repair chance set for this table.
*
* @return the local read repair change set for table (in [0.0, 1.0]).
*/
public double getLocalReadRepairChance() {
return localReadRepair;
}
/**
* Returns whether replicateOnWrite is set for this table.
*
* This is only meaningful for tables holding counters.
*
* @return whether replicateOnWrite is set for this table.
*/
public boolean getReplicateOnWrite() {
return replicateOnWrite;
}
/**
* Returns the tombstone garbage collection grace time in seconds for this table.
*
* @return the tombstone garbage collection grace time in seconds for this table.
*/
public int getGcGraceInSeconds() {
return gcGrace;
}
/**
* Returns the false positive chance for the Bloom filter of this table.
*
* @return the Bloom filter false positive chance for this table (in [0.0, 1.0]).
*/
public double getBloomFilterFalsePositiveChance() {
return bfFpChance;
}
/**
* Returns the caching options for this table.
*
* @return the caching options for this table.
*/
public Map<String, String> getCaching() {
return caching;
}
/**
* Whether the populate I/O cache on flush is set on this table.
*
* @return whether the populate I/O cache on flush is set on this table.
*/
public boolean getPopulateIOCacheOnFlush() {
return populateCacheOnFlush;
}
/*
* Returns the memtable flush period (in milliseconds) option for this table.
* <p>
* Note: this option is not available in Cassandra 1.2 and will return 0 (no periodic
* flush) when connected to 1.2 nodes.
*
* @return the memtable flush period option for this table or 0 if no
* periodic flush is configured.
*/
public int getMemtableFlushPeriodInMs() {
return memtableFlushPeriodMs;
}
/**
* Returns the default TTL for this table.
* <p>
* Note: this option is not available in Cassandra 1.2 and will return 0 (no default
* TTL) when connected to 1.2 nodes.
*
* @return the default TTL for this table or 0 if no default TTL is
* configured.
*/
public int getDefaultTimeToLive() {
return defaultTTL;
}
/**
* Returns the speculative retry option for this table.
* <p>
* Note: this option is not available in Cassandra 1.2 and will return "NONE" (no
* speculative retry) when connected to 1.2 nodes.
*
* @return the speculative retry option this table.
*/
public String getSpeculativeRetry() {
return speculativeRetry;
}
/**
* Returns the index interval option for this table.
* <p>
* Note: this option is not available in Cassandra 1.2 (more precisely, it is not
* configurable per-table) and will return 128 (the default index interval) when
* connected to 1.2 nodes. It is deprecated in Cassandra 2.1 and above, and will
* therefore return {@code null} for 2.1 nodes.
*
* @return the index interval option for this table.
*/
public Integer getIndexInterval() {
return indexInterval;
}
/**
* Returns the minimum index interval option for this table.
* <p>
* Note: this option is available in Cassandra 2.1 and above, and will return
* {@code null} for earlier versions.
*
* @return the minimum index interval option for this table.
*/
public Integer getMinIndexInterval() {
return minIndexInterval;
}
/**
* Returns the maximum index interval option for this table.
* <p>
* Note: this option is available in Cassandra 2.1 and above, and will return
* {@code null} for earlier versions.
*
* @return the maximum index interval option for this table.
*/
public Integer getMaxIndexInterval() {
return maxIndexInterval;
}
/**
* Returns the compaction options for this table.
*
* @return a map containing the compaction options for this table.
*/
public Map<String, String> getCompaction() {
return new HashMap<String, String>(compaction);
}
/**
* Returns the compression options for this table.
*
* @return a map containing the compression options for this table.
*/
public Map<String, String> getCompression() {
return new HashMap<String, String>(compression);
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Options options = (Options)o;
if (isCompactStorage != options.isCompactStorage)
return false;
if (Double.compare(options.readRepair, readRepair) != 0)
return false;
if (Double.compare(options.localReadRepair, localReadRepair) != 0)
return false;
if (replicateOnWrite != options.replicateOnWrite)
return false;
if (gcGrace != options.gcGrace)
return false;
if (Double.compare(options.bfFpChance, bfFpChance) != 0)
return false;
if (populateCacheOnFlush != options.populateCacheOnFlush)
return false;
if (memtableFlushPeriodMs != options.memtableFlushPeriodMs)
return false;
if (defaultTTL != options.defaultTTL)
return false;
if (indexInterval != null ? !indexInterval.equals(options.indexInterval) : options.indexInterval != null)
return false;
if (minIndexInterval != null ? !minIndexInterval.equals(options.minIndexInterval) : options.minIndexInterval != null)
return false;
if (maxIndexInterval != null ? !maxIndexInterval.equals(options.maxIndexInterval) : options.maxIndexInterval != null)
return false;
if (comment != null ? !comment.equals(options.comment) : options.comment != null)
return false;
if (caching != null ? !caching.equals(options.caching) : options.caching != null)
return false;
if (speculativeRetry != null ? !speculativeRetry.equals(options.speculativeRetry) : options.speculativeRetry != null)
return false;
if (compaction != null ? !compaction.equals(options.compaction) : options.compaction != null)
return false;
return !(compression != null ? !compression.equals(options.compression) : options.compression != null);
}
@Override
public int hashCode() {
int result;
long temp;
result = (isCompactStorage ? 1 : 0);
result = 31 * result + (comment != null ? comment.hashCode() : 0);
temp = Double.doubleToLongBits(readRepair);
result = 31 * result + (int)(temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(localReadRepair);
result = 31 * result + (int)(temp ^ (temp >>> 32));
result = 31 * result + (replicateOnWrite ? 1 : 0);
result = 31 * result + gcGrace;
temp = Double.doubleToLongBits(bfFpChance);
result = 31 * result + (int)(temp ^ (temp >>> 32));
result = 31 * result + (caching != null ? caching.hashCode() : 0);
result = 31 * result + (populateCacheOnFlush ? 1 : 0);
result = 31 * result + memtableFlushPeriodMs;
result = 31 * result + defaultTTL;
result = 31 * result + (speculativeRetry != null ? speculativeRetry.hashCode() : 0);
result = 31 * result + (indexInterval != null ? indexInterval.hashCode() : 0);
result = 31 * result + (minIndexInterval != null ? minIndexInterval.hashCode() : 0);
result = 31 * result + (maxIndexInterval != null ? maxIndexInterval.hashCode() : 0);
result = 31 * result + (compaction != null ? compaction.hashCode() : 0);
result = 31 * result + (compression != null ? compression.hashCode() : 0);
return result;
}
}
}
|
driver-core/src/main/java/com/datastax/driver/core/TableMetadata.java
|
/*
* Copyright (C) 2012-2015 DataStax Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.driver.core;
import java.util.*;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Describes a Table.
*/
public class TableMetadata {
private static final Logger logger = LoggerFactory.getLogger(TableMetadata.class);
static final String CF_NAME = "columnfamily_name";
private static final String CF_ID = "cf_id";
private static final String KEY_VALIDATOR = "key_validator";
private static final String COMPARATOR = "comparator";
private static final String VALIDATOR = "default_validator";
private static final String KEY_ALIASES = "key_aliases";
private static final String COLUMN_ALIASES = "column_aliases";
private static final String VALUE_ALIAS = "value_alias";
private static final String DEFAULT_KEY_ALIAS = "key";
private static final String DEFAULT_COLUMN_ALIAS = "column";
private static final String DEFAULT_VALUE_ALIAS = "value";
private static final Comparator<ColumnMetadata> columnMetadataComparator = new Comparator<ColumnMetadata>() {
public int compare(ColumnMetadata c1, ColumnMetadata c2) {
return c1.getName().compareTo(c2.getName());
}
};
private final KeyspaceMetadata keyspace;
private final String name;
private final UUID id;
private final List<ColumnMetadata> partitionKey;
private final List<ColumnMetadata> clusteringColumns;
private final Map<String, ColumnMetadata> columns;
private final Options options;
private final List<Order> clusteringOrder;
private final VersionNumber cassandraVersion;
/**
* Clustering orders.
* <p>
* This is used by {@link #getClusteringOrder} to indicate the clustering
* order of a table.
*/
public static enum Order {
ASC, DESC;
static final Predicate<Order> isAscending = new Predicate<Order>() {
public boolean apply(Order o) {
return o == ASC;
}
};
}
private TableMetadata(KeyspaceMetadata keyspace,
String name,
UUID id,
List<ColumnMetadata> partitionKey,
List<ColumnMetadata> clusteringColumns,
LinkedHashMap<String, ColumnMetadata> columns,
Options options,
List<Order> clusteringOrder,
VersionNumber cassandraVersion) {
this.keyspace = keyspace;
this.name = name;
this.id = id;
this.partitionKey = partitionKey;
this.clusteringColumns = clusteringColumns;
this.columns = columns;
this.options = options;
this.clusteringOrder = clusteringOrder;
this.cassandraVersion = cassandraVersion;
}
static TableMetadata build(KeyspaceMetadata ksm, Row row, Map<String, ColumnMetadata.Raw> rawCols, VersionNumber cassandraVersion) {
String name = row.getString(CF_NAME);
UUID id = (cassandraVersion.getMajor() > 2 || (cassandraVersion.getMajor() == 2 && cassandraVersion.getMinor() >= 1))
? row.getUUID(CF_ID)
: null;
CassandraTypeParser.ParseResult keyValidator = CassandraTypeParser.parseWithComposite(row.getString(KEY_VALIDATOR));
CassandraTypeParser.ParseResult comparator = CassandraTypeParser.parseWithComposite(row.getString(COMPARATOR));
List<String> columnAliases = cassandraVersion.getMajor() >= 2 || row.getString(COLUMN_ALIASES) == null
? Collections.<String>emptyList()
: SimpleJSONParser.parseStringList(row.getString(COLUMN_ALIASES));
int clusteringSize = findClusteringSize(comparator, rawCols.values(), columnAliases, cassandraVersion);
boolean isDense = clusteringSize != comparator.types.size() - 1;
boolean isCompact = isDense || !comparator.isComposite;
List<ColumnMetadata> partitionKey = nullInitializedList(keyValidator.types.size());
List<ColumnMetadata> clusteringColumns = nullInitializedList(clusteringSize);
List<Order> clusteringOrder = nullInitializedList(clusteringSize);
// We use a linked hashmap because we will keep this in the order of a 'SELECT * FROM ...'.
LinkedHashMap<String, ColumnMetadata> columns = new LinkedHashMap<String, ColumnMetadata>();
Options options = null;
try {
options = new Options(row, isCompact, cassandraVersion);
} catch (RuntimeException e) {
// See ControlConnection#refreshSchema for why we'd rather not probably this further. Since table options is one thing
// that tends to change often in Cassandra, it's worth special casing this.
logger.error(String.format("Error parsing schema options for table %s.%s: "
+ "Cluster.getMetadata().getKeyspace(\"%s\").getTable(\"%s\").getOptions() will return null",
ksm.getName(), name, ksm.getName(), name), e);
}
TableMetadata tm = new TableMetadata(ksm, name, id, partitionKey, clusteringColumns, columns, options, clusteringOrder, cassandraVersion);
// We use this temporary set just so non PK columns are added in lexicographical order, which is the one of a
// 'SELECT * FROM ...'
Set<ColumnMetadata> otherColumns = new TreeSet<ColumnMetadata>(columnMetadataComparator);
if (cassandraVersion.getMajor() < 2) {
// In C* 1.2, only the REGULAR columns are in the columns schema table, so we need to add the names from
// the aliases (and make sure we handle default aliases).
List<String> keyAliases = row.getString(KEY_ALIASES) == null
? Collections.<String>emptyList()
: SimpleJSONParser.parseStringList(row.getString(KEY_ALIASES));
for (int i = 0; i < partitionKey.size(); i++) {
String alias = keyAliases.size() > i ? keyAliases.get(i) : (i == 0 ? DEFAULT_KEY_ALIAS : DEFAULT_KEY_ALIAS + (i + 1));
partitionKey.set(i, ColumnMetadata.forAlias(tm, alias, keyValidator.types.get(i)));
}
for (int i = 0; i < clusteringSize; i++) {
String alias = columnAliases.size() > i ? columnAliases.get(i) : DEFAULT_COLUMN_ALIAS + (i + 1);
clusteringColumns.set(i, ColumnMetadata.forAlias(tm, alias, comparator.types.get(i)));
clusteringOrder.set(i, comparator.reversed.get(i) ? Order.DESC : Order.ASC);
}
// We have a value alias if we're dense
if (isDense) {
String alias = row.isNull(VALUE_ALIAS) ? DEFAULT_VALUE_ALIAS : row.getString(VALUE_ALIAS);
DataType type = CassandraTypeParser.parseOne(row.getString(VALIDATOR));
otherColumns.add(ColumnMetadata.forAlias(tm, alias, type));
}
}
for (ColumnMetadata.Raw rawCol : rawCols.values()) {
ColumnMetadata col = ColumnMetadata.fromRaw(tm, rawCol);
switch (rawCol.kind) {
case PARTITION_KEY:
partitionKey.set(rawCol.componentIndex, col);
break;
case CLUSTERING_KEY:
clusteringColumns.set(rawCol.componentIndex, col);
clusteringOrder.set(rawCol.componentIndex, rawCol.isReversed ? Order.DESC : Order.ASC);
break;
default:
otherColumns.add(col);
break;
}
}
for (ColumnMetadata c : partitionKey)
columns.put(c.getName(), c);
for (ColumnMetadata c : clusteringColumns)
columns.put(c.getName(), c);
for (ColumnMetadata c : otherColumns)
columns.put(c.getName(), c);
return tm;
}
private static int findClusteringSize(CassandraTypeParser.ParseResult comparator,
Collection<ColumnMetadata.Raw> cols,
List<String> columnAliases,
VersionNumber cassandraVersion) {
// In 2.0, this is relatively easy, we just find the biggest 'componentIndex' amongst the clustering columns.
// For 1.2 however, this is slightly more subtle: we need to infer it based on whether the comparator is composite or not, and whether we have
// regular columns or not.
if (cassandraVersion.getMajor() >= 2) {
int maxId = -1;
for (ColumnMetadata.Raw col : cols)
if (col.kind == ColumnMetadata.Raw.Kind.CLUSTERING_KEY)
maxId = Math.max(maxId, col.componentIndex);
return maxId + 1;
} else {
int size = comparator.types.size();
if (comparator.isComposite)
return !comparator.collections.isEmpty() || (columnAliases.size() == size - 1 && comparator.types.get(size - 1).equals(DataType.text())) ? size - 1 : size;
else
// We know cols only has the REGULAR ones for 1.2
return !columnAliases.isEmpty() || cols.isEmpty() ? size : 0;
}
}
private static <T> List<T> nullInitializedList(int size) {
List<T> l = new ArrayList<T>(size);
for (int i = 0; i < size; ++i)
l.add(null);
return l;
}
/**
* Returns the name of this table.
*
* @return the name of this CQL table.
*/
public String getName() {
return name;
}
/**
* Returns the unique id of this table.
* <p>
* Note: this id is available in Cassandra 2.1 and above. It will be
* {@code null} for earlier versions.
*
* @return the unique id of the table.
*/
public UUID getId() {
return id;
}
/**
* Returns the keyspace this table belong to.
*
* @return the keyspace metadata of the keyspace this table belong to.
*/
public KeyspaceMetadata getKeyspace() {
return keyspace;
}
/**
* Returns metadata on a column of this table.
*
* @param name the name of the column to retrieve ({@code name} will be
* interpreted as a case-insensitive identifier unless enclosed in double-quotes,
* see {@link Metadata#quote}).
* @return the metadata for the {@code name} column if it exists, or
* {@code null} otherwise.
*/
public ColumnMetadata getColumn(String name) {
return columns.get(Metadata.handleId(name));
}
/**
* Returns a list containing all the columns of this table.
*
* The order of the columns in the list is consistent with
* the order of the columns returned by a {@code SELECT * FROM thisTable}:
* the first column is the partition key, next are the clustering
* columns in their defined order, and then the rest of the
* columns follow in alphabetic order.
*
* @return a list containing the metadata for the columns of this table.
*/
public List<ColumnMetadata> getColumns() {
return new ArrayList<ColumnMetadata>(columns.values());
}
/**
* Returns the list of columns composing the primary key for this table.
*
* A table will always at least have a partition key (that
* may itself be one or more columns), so the returned list at least
* has one element.
*
* @return the list of columns composing the primary key for this table.
*/
public List<ColumnMetadata> getPrimaryKey() {
List<ColumnMetadata> pk = new ArrayList<ColumnMetadata>(partitionKey.size() + clusteringColumns.size());
pk.addAll(partitionKey);
pk.addAll(clusteringColumns);
return pk;
}
/**
* Returns the list of columns composing the partition key for this table.
*
* A table always has a partition key so the returned list has
* at least one element.
*
* @return the list of columns composing the partition key for this table.
*/
public List<ColumnMetadata> getPartitionKey() {
return Collections.unmodifiableList(partitionKey);
}
/**
* Returns the list of clustering columns for this table.
*
* @return the list of clustering columns for this table.
* If there is no clustering columns, an empty list is returned.
*/
public List<ColumnMetadata> getClusteringColumns() {
return Collections.unmodifiableList(clusteringColumns);
}
/**
* Returns the clustering order for this table.
* <p>
* The returned contains the clustering order of each clustering column. The
* {@code i}th element of the result correspond to the order (ascending or
* descending) of the {@code i}th clustering column (see
* {@link #getClusteringColumns}). Note that a table defined without any
* particular clustering order is equivalent to one for which all the
* clustering key are in ascending order.
*
* @return a list with the clustering order for each clustering column.
*/
public List<Order> getClusteringOrder() {
return clusteringOrder;
}
/**
* Returns the options for this table.
*
* @return the options for this table.
*/
public Options getOptions() {
return options;
}
void add(ColumnMetadata column) {
columns.put(column.getName(), column);
}
/**
* Returns a {@code String} containing CQL queries representing this
* table and the index on it.
* <p>
* In other words, this method returns the queries that would allow you to
* recreate the schema of this table, along with the index defined on
* columns of this table.
* <p>
* Note that the returned String is formatted to be human readable (for
* some definition of human readable at least).
*
* @return the CQL queries representing this table schema as a {code
* String}.
*/
public String exportAsString() {
StringBuilder sb = new StringBuilder();
sb.append(asCQLQuery(true));
for (ColumnMetadata column : columns.values()) {
ColumnMetadata.IndexMetadata index = column.getIndex();
if (index == null)
continue;
sb.append('\n').append(index.asCQLQuery());
}
return sb.toString();
}
/**
* Returns a CQL query representing this table.
* <p>
* This method returns a single 'CREATE TABLE' query with the options
* corresponding to this table definition.
* <p>
* Note that the returned string is a single line; the returned query
* is not formatted in any way.
*
* @return the 'CREATE TABLE' query corresponding to this table.
* @see #exportAsString
*/
public String asCQLQuery() {
return asCQLQuery(false);
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
TableMetadata that = (TableMetadata)o;
if (!name.equals(that.name))
return false;
if (!partitionKey.equals(that.partitionKey))
return false;
if (!clusteringColumns.equals(that.clusteringColumns))
return false;
if (!columns.equals(that.columns))
return false;
if (!options.equals(that.options))
return false;
if (!clusteringOrder.equals(that.clusteringOrder))
return false;
return cassandraVersion.equals(that.cassandraVersion);
}
@Override
public int hashCode() {
int result = name.hashCode();
result = 31 * result + partitionKey.hashCode();
result = 31 * result + clusteringColumns.hashCode();
result = 31 * result + columns.hashCode();
result = 31 * result + options.hashCode();
result = 31 * result + clusteringOrder.hashCode();
result = 31 * result + cassandraVersion.hashCode();
return result;
}
private String asCQLQuery(boolean formatted) {
StringBuilder sb = new StringBuilder();
sb.append("CREATE TABLE ").append(Metadata.escapeId(keyspace.getName())).append('.').append(Metadata.escapeId(name)).append(" (");
newLine(sb, formatted);
for (ColumnMetadata cm : columns.values())
newLine(sb.append(spaces(4, formatted)).append(cm).append(','), formatted);
// PK
sb.append(spaces(4, formatted)).append("PRIMARY KEY (");
if (partitionKey.size() == 1) {
sb.append(partitionKey.get(0).getName());
} else {
sb.append('(');
boolean first = true;
for (ColumnMetadata cm : partitionKey) {
if (first) first = false; else sb.append(", ");
sb.append(Metadata.escapeId(cm.getName()));
}
sb.append(')');
}
for (ColumnMetadata cm : clusteringColumns)
sb.append(", ").append(Metadata.escapeId(cm.getName()));
sb.append(')');
newLine(sb, formatted);
// end PK
// Options
sb.append(") WITH ");
if (options.isCompactStorage)
and(sb.append("COMPACT STORAGE"), formatted);
if (!Iterables.all(clusteringOrder, Order.isAscending))
and(appendClusteringOrder(sb), formatted);
sb.append("read_repair_chance = ").append(options.readRepair);
and(sb, formatted).append("dclocal_read_repair_chance = ").append(options.localReadRepair);
if (cassandraVersion.getMajor() < 2 || (cassandraVersion.getMajor() == 2 && cassandraVersion.getMinor() == 0))
and(sb, formatted).append("replicate_on_write = ").append(options.replicateOnWrite);
and(sb, formatted).append("gc_grace_seconds = ").append(options.gcGrace);
and(sb, formatted).append("bloom_filter_fp_chance = ").append(options.bfFpChance);
if (cassandraVersion.getMajor() < 2 || cassandraVersion.getMajor() == 2 && cassandraVersion.getMinor() < 1)
and(sb, formatted).append("caching = '").append(options.caching.get("keys")).append('\'');
else
and(sb, formatted).append("caching = ").append(formatOptionMap(options.caching));
if (options.comment != null)
and(sb, formatted).append("comment = '").append(options.comment.replace("'","''")).append('\'');
and(sb, formatted).append("compaction = ").append(formatOptionMap(options.compaction));
and(sb, formatted).append("compression = ").append(formatOptionMap(options.compression));
if (cassandraVersion.getMajor() >= 2) {
and(sb, formatted).append("default_time_to_live = ").append(options.defaultTTL);
and(sb, formatted).append("speculative_retry = '").append(options.speculativeRetry).append('\'');
if (options.indexInterval != null)
and(sb, formatted).append("index_interval = ").append(options.indexInterval);
}
if (cassandraVersion.getMajor() > 2 || (cassandraVersion.getMajor() == 2 && cassandraVersion.getMinor() >= 1)) {
and(sb, formatted).append("min_index_interval = ").append(options.minIndexInterval);
and(sb, formatted).append("max_index_interval = ").append(options.maxIndexInterval);
}
sb.append(';');
return sb.toString();
}
@Override
public String toString() {
return asCQLQuery();
}
private StringBuilder appendClusteringOrder(StringBuilder sb) {
sb.append("CLUSTERING ORDER BY (");
for (int i = 0; i < clusteringColumns.size(); i++) {
if (i > 0) sb.append(", ");
sb.append(clusteringColumns.get(i).getName()).append(' ').append(clusteringOrder.get(i));
}
return sb.append(')');
}
private static String formatOptionMap(Map<String, String> m) {
StringBuilder sb = new StringBuilder();
sb.append("{ ");
boolean first = true;
for (Map.Entry<String, String> entry : m.entrySet()) {
if (first) first = false; else sb.append(", ");
sb.append('\'').append(entry.getKey()).append('\'');
sb.append(" : ");
try {
sb.append(Integer.parseInt(entry.getValue()));
} catch (NumberFormatException e) {
sb.append('\'').append(entry.getValue()).append('\'');
}
}
sb.append(" }");
return sb.toString();
}
private StringBuilder and(StringBuilder sb, boolean formatted) {
return newLine(sb, formatted).append(spaces(2, formatted)).append(" AND ");
}
static String spaces(int n, boolean formatted) {
if (!formatted)
return "";
StringBuilder sb = new StringBuilder();
for (int i = 0; i < n; i++)
sb.append(' ');
return sb.toString();
}
static StringBuilder newLine(StringBuilder sb, boolean formatted) {
if (formatted)
sb.append('\n');
return sb;
}
public static class Options {
private static final String COMMENT = "comment";
private static final String READ_REPAIR = "read_repair_chance";
private static final String LOCAL_READ_REPAIR = "local_read_repair_chance";
private static final String REPLICATE_ON_WRITE = "replicate_on_write";
private static final String GC_GRACE = "gc_grace_seconds";
private static final String BF_FP_CHANCE = "bloom_filter_fp_chance";
private static final String CACHING = "caching";
private static final String COMPACTION_CLASS = "compaction_strategy_class";
private static final String COMPACTION_OPTIONS = "compaction_strategy_options";
private static final String MIN_COMPACTION_THRESHOLD = "min_compaction_threshold";
private static final String MAX_COMPACTION_THRESHOLD = "max_compaction_threshold";
private static final String POPULATE_CACHE_ON_FLUSH = "populate_io_cache_on_flush";
private static final String COMPRESSION_PARAMS = "compression_parameters";
private static final String MEMTABLE_FLUSH_PERIOD_MS = "memtable_flush_period_in_ms";
private static final String DEFAULT_TTL = "default_time_to_live";
private static final String SPECULATIVE_RETRY = "speculative_retry";
private static final String INDEX_INTERVAL = "index_interval";
private static final String MIN_INDEX_INTERVAL = "min_index_interval";
private static final String MAX_INDEX_INTERVAL = "max_index_interval";
private static final boolean DEFAULT_REPLICATE_ON_WRITE = true;
private static final double DEFAULT_BF_FP_CHANCE = 0.01;
private static final boolean DEFAULT_POPULATE_CACHE_ON_FLUSH = false;
private static final int DEFAULT_MEMTABLE_FLUSH_PERIOD = 0;
private static final int DEFAULT_DEFAULT_TTL = 0;
private static final String DEFAULT_SPECULATIVE_RETRY = "NONE";
private static final int DEFAULT_INDEX_INTERVAL = 128;
private static final int DEFAULT_MIN_INDEX_INTERVAL = 128;
private static final int DEFAULT_MAX_INDEX_INTERVAL = 2048;
private final boolean isCompactStorage;
private final String comment;
private final double readRepair;
private final double localReadRepair;
private final boolean replicateOnWrite;
private final int gcGrace;
private final double bfFpChance;
private final Map<String, String> caching;
private final boolean populateCacheOnFlush;
private final int memtableFlushPeriodMs;
private final int defaultTTL;
private final String speculativeRetry;
private final Integer indexInterval;
private final Integer minIndexInterval;
private final Integer maxIndexInterval;
private final Map<String, String> compaction = new HashMap<String, String>();
private final Map<String, String> compression = new HashMap<String, String>();
Options(Row row, boolean isCompactStorage, VersionNumber version) {
this.isCompactStorage = isCompactStorage;
this.comment = isNullOrAbsent(row, COMMENT) ? "" : row.getString(COMMENT);
this.readRepair = row.getDouble(READ_REPAIR);
this.localReadRepair = row.getDouble(LOCAL_READ_REPAIR);
boolean is210OrMore = version.getMajor() > 2 || (version.getMajor() == 2 && version.getMinor() >= 1);
this.replicateOnWrite = is210OrMore || isNullOrAbsent(row, REPLICATE_ON_WRITE) ? DEFAULT_REPLICATE_ON_WRITE : row.getBool(REPLICATE_ON_WRITE);
this.gcGrace = row.getInt(GC_GRACE);
this.bfFpChance = isNullOrAbsent(row, BF_FP_CHANCE) ? DEFAULT_BF_FP_CHANCE : row.getDouble(BF_FP_CHANCE);
this.caching = is210OrMore
? SimpleJSONParser.parseStringMap(row.getString(CACHING))
: ImmutableMap.of("keys", row.getString(CACHING));
this.populateCacheOnFlush = isNullOrAbsent(row, POPULATE_CACHE_ON_FLUSH) ? DEFAULT_POPULATE_CACHE_ON_FLUSH : row.getBool(POPULATE_CACHE_ON_FLUSH);
this.memtableFlushPeriodMs = version.getMajor() < 2 || isNullOrAbsent(row, MEMTABLE_FLUSH_PERIOD_MS) ? DEFAULT_MEMTABLE_FLUSH_PERIOD : row.getInt(MEMTABLE_FLUSH_PERIOD_MS);
this.defaultTTL = version.getMajor() < 2 || isNullOrAbsent(row, DEFAULT_TTL) ? DEFAULT_DEFAULT_TTL : row.getInt(DEFAULT_TTL);
this.speculativeRetry = version.getMajor() < 2 || isNullOrAbsent(row, SPECULATIVE_RETRY) ? DEFAULT_SPECULATIVE_RETRY : row.getString(SPECULATIVE_RETRY);
if (version.getMajor() >= 2 && !is210OrMore)
this.indexInterval = isNullOrAbsent(row, INDEX_INTERVAL) ? DEFAULT_INDEX_INTERVAL : row.getInt(INDEX_INTERVAL);
else
this.indexInterval = null;
if (is210OrMore) {
this.minIndexInterval = isNullOrAbsent(row, MIN_INDEX_INTERVAL)
? DEFAULT_MIN_INDEX_INTERVAL
: row.getInt(MIN_INDEX_INTERVAL);
this.maxIndexInterval = isNullOrAbsent(row, MAX_INDEX_INTERVAL)
? DEFAULT_MAX_INDEX_INTERVAL
: row.getInt(MAX_INDEX_INTERVAL);
} else {
this.minIndexInterval = null;
this.maxIndexInterval = null;
}
this.compaction.put("class", row.getString(COMPACTION_CLASS));
this.compaction.putAll(SimpleJSONParser.parseStringMap(row.getString(COMPACTION_OPTIONS)));
this.compression.putAll(SimpleJSONParser.parseStringMap(row.getString(COMPRESSION_PARAMS)));
}
private static boolean isNullOrAbsent(Row row, String name) {
return row.getColumnDefinitions().getIndexOf(name) < 0
|| row.isNull(name);
}
/**
* Returns whether the table uses the {@code COMPACT STORAGE} option.
*
* @return whether the table uses the {@code COMPACT STORAGE} option.
*/
public boolean isCompactStorage() {
return isCompactStorage;
}
/**
* Returns the commentary set for this table.
*
* @return the commentary set for this table, or {@code null} if noe has been set.
*/
public String getComment() {
return comment;
}
/**
* Returns the chance with which a read repair is triggered for this table.
*
* @return the read repair change set for table (in [0.0, 1.0]).
*/
public double getReadRepairChance() {
return readRepair;
}
/**
* Returns the cluster local read repair chance set for this table.
*
* @return the local read repair change set for table (in [0.0, 1.0]).
*/
public double getLocalReadRepairChance() {
return localReadRepair;
}
/**
* Returns whether replicateOnWrite is set for this table.
*
* This is only meaningful for tables holding counters.
*
* @return whether replicateOnWrite is set for this table.
*/
public boolean getReplicateOnWrite() {
return replicateOnWrite;
}
/**
* Returns the tombstone garbage collection grace time in seconds for this table.
*
* @return the tombstone garbage collection grace time in seconds for this table.
*/
public int getGcGraceInSeconds() {
return gcGrace;
}
/**
* Returns the false positive chance for the Bloom filter of this table.
*
* @return the Bloom filter false positive chance for this table (in [0.0, 1.0]).
*/
public double getBloomFilterFalsePositiveChance() {
return bfFpChance;
}
/**
* Returns the caching options for this table.
*
* @return the caching options for this table.
*/
public Map<String, String> getCaching() {
return caching;
}
/**
* Whether the populate I/O cache on flush is set on this table.
*
* @return whether the populate I/O cache on flush is set on this table.
*/
public boolean getPopulateIOCacheOnFlush() {
return populateCacheOnFlush;
}
/*
* Returns the memtable flush period (in milliseconds) option for this table.
* <p>
* Note: this option is not available in Cassandra 1.2 and will return 0 (no periodic
* flush) when connected to 1.2 nodes.
*
* @return the memtable flush period option for this table or 0 if no
* periodic flush is configured.
*/
public int getMemtableFlushPeriodInMs() {
return memtableFlushPeriodMs;
}
/**
* Returns the default TTL for this table.
* <p>
* Note: this option is not available in Cassandra 1.2 and will return 0 (no default
* TTL) when connected to 1.2 nodes.
*
* @return the default TTL for this table or 0 if no default TTL is
* configured.
*/
public int getDefaultTimeToLive() {
return defaultTTL;
}
/**
* Returns the speculative retry option for this table.
* <p>
* Note: this option is not available in Cassandra 1.2 and will return "NONE" (no
* speculative retry) when connected to 1.2 nodes.
*
* @return the speculative retry option this table.
*/
public String getSpeculativeRetry() {
return speculativeRetry;
}
/**
* Returns the index interval option for this table.
* <p>
* Note: this option is not available in Cassandra 1.2 (more precisely, it is not
* configurable per-table) and will return 128 (the default index interval) when
* connected to 1.2 nodes. It is deprecated in Cassandra 2.1 and above, and will
* therefore return {@code null} for 2.1 nodes.
*
* @return the index interval option for this table.
*/
public Integer getIndexInterval() {
return indexInterval;
}
/**
* Returns the minimum index interval option for this table.
* <p>
* Note: this option is available in Cassandra 2.1 and above, and will return
* {@code null} for earlier versions.
*
* @return the minimum index interval option for this table.
*/
public Integer getMinIndexInterval() {
return minIndexInterval;
}
/**
* Returns the maximum index interval option for this table.
* <p>
* Note: this option is available in Cassandra 2.1 and above, and will return
* {@code null} for earlier versions.
*
* @return the maximum index interval option for this table.
*/
public Integer getMaxIndexInterval() {
return maxIndexInterval;
}
/**
* Returns the compaction options for this table.
*
* @return a map containing the compaction options for this table.
*/
public Map<String, String> getCompaction() {
return new HashMap<String, String>(compaction);
}
/**
* Returns the compression options for this table.
*
* @return a map containing the compression options for this table.
*/
public Map<String, String> getCompression() {
return new HashMap<String, String>(compression);
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Options options = (Options)o;
if (isCompactStorage != options.isCompactStorage)
return false;
if (Double.compare(options.readRepair, readRepair) != 0)
return false;
if (Double.compare(options.localReadRepair, localReadRepair) != 0)
return false;
if (replicateOnWrite != options.replicateOnWrite)
return false;
if (gcGrace != options.gcGrace)
return false;
if (Double.compare(options.bfFpChance, bfFpChance) != 0)
return false;
if (populateCacheOnFlush != options.populateCacheOnFlush)
return false;
if (memtableFlushPeriodMs != options.memtableFlushPeriodMs)
return false;
if (defaultTTL != options.defaultTTL)
return false;
if (indexInterval != options.indexInterval)
return false;
if (comment != null ? !comment.equals(options.comment) : options.comment != null)
return false;
if (caching != null ? !caching.equals(options.caching) : options.caching != null)
return false;
if (speculativeRetry != null ? !speculativeRetry.equals(options.speculativeRetry) : options.speculativeRetry != null)
return false;
if (compaction != null ? !compaction.equals(options.compaction) : options.compaction != null)
return false;
return !(compression != null ? !compression.equals(options.compression) : options.compression != null);
}
@Override
public int hashCode() {
int result;
long temp;
result = (isCompactStorage ? 1 : 0);
result = 31 * result + (comment != null ? comment.hashCode() : 0);
temp = Double.doubleToLongBits(readRepair);
result = 31 * result + (int)(temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(localReadRepair);
result = 31 * result + (int)(temp ^ (temp >>> 32));
result = 31 * result + (replicateOnWrite ? 1 : 0);
result = 31 * result + gcGrace;
temp = Double.doubleToLongBits(bfFpChance);
result = 31 * result + (int)(temp ^ (temp >>> 32));
result = 31 * result + (caching != null ? caching.hashCode() : 0);
result = 31 * result + (populateCacheOnFlush ? 1 : 0);
result = 31 * result + memtableFlushPeriodMs;
result = 31 * result + defaultTTL;
result = 31 * result + (speculativeRetry != null ? speculativeRetry.hashCode() : 0);
result = 31 * result + indexInterval;
result = 31 * result + (compaction != null ? compaction.hashCode() : 0);
result = 31 * result + (compression != null ? compression.hashCode() : 0);
return result;
}
}
}
|
JAVA-929 Fix equals method for index_interval columns.
|
driver-core/src/main/java/com/datastax/driver/core/TableMetadata.java
|
JAVA-929 Fix equals method for index_interval columns.
|
|
Java
|
apache-2.0
|
5e0b0b3e0013d898ab3e25244c6bbb114db35dde
| 0
|
acieslinski/MovieSearcher
|
package pl.acieslinski.moviefun.fragments;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import com.daimajia.androidanimations.library.Techniques;
import com.daimajia.androidanimations.library.YoYo;
import butterknife.Bind;
import butterknife.ButterKnife;
import de.greenrobot.event.EventBus;
import pl.acieslinski.moviefun.R;
import pl.acieslinski.moviefun.models.Search;
import pl.acieslinski.moviefun.models.SearchEvent;
import pl.acieslinski.moviefun.views.TypeSpinner;
/**
* @author Arkadiusz Cieśliński 14.11.15.
* <acieslinski@gmail.com>
*/
public class SearchForm extends Fragment {
private static final String TAG = SearchForm.class.getSimpleName();
@Bind(R.id.et_search)
protected EditText mSearchEditText;
@Bind(R.id.et_year)
protected EditText mYearEditText;
@Bind(R.id.sp_type)
protected TypeSpinner mTypeSpinner;
@Bind(R.id.btn_search)
protected Button mSearchButton;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_search_form, container, false);
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
ButterKnife.bind(this, view);
mSearchButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (validateForm()) {
EventBus.getDefault().post(new SearchEvent(getSearch()));
}
}
});
}
public Search getSearch() {
Search search = new Search();
search.setSearch(mSearchEditText.getText().toString());
search.setYear(mYearEditText.getText().toString());
search.setType(mTypeSpinner.getSelectedType());
return search;
}
public void setSearch(Search search) {
mSearchEditText.setText(search.getSearch());
mYearEditText.setText(search.getYear());
mTypeSpinner.setSelection(search.getType());
}
private boolean validateForm() {
Search search = getSearch();
boolean valid = true;
if (search.getSearch().isEmpty()) {
if (isAdded()) {
YoYo.with(Techniques.Shake).delay(300).playOn(mSearchEditText);
}
valid = false;
}
if (!search.getYear().isEmpty()) {
if (!(search.getYear().matches("\\d{4}"))) {
if (isAdded()) {
YoYo.with(Techniques.Shake).delay(300).playOn(mYearEditText);
}
valid = false;
}
}
return valid;
}
}
|
app/src/main/java/pl/acieslinski/moviefun/fragments/SearchForm.java
|
package pl.acieslinski.moviefun.fragments;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import butterknife.Bind;
import butterknife.ButterKnife;
import de.greenrobot.event.EventBus;
import pl.acieslinski.moviefun.R;
import pl.acieslinski.moviefun.models.Search;
import pl.acieslinski.moviefun.models.SearchEvent;
import pl.acieslinski.moviefun.views.TypeSpinner;
/**
* @author Arkadiusz Cieśliński 14.11.15.
* <acieslinski@gmail.com>
*/
public class SearchForm extends Fragment {
private static final String TAG = SearchForm.class.getSimpleName();
@Bind(R.id.et_search)
protected EditText mSearchEditText;
@Bind(R.id.et_year)
protected EditText mYearEditText;
@Bind(R.id.sp_type)
protected TypeSpinner mTypeSpinner;
@Bind(R.id.btn_search)
protected Button mSearchButton;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_search_form, container, false);
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
ButterKnife.bind(this, view);
mSearchButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
EventBus.getDefault().post(new SearchEvent(getSearch()));
}
});
}
public Search getSearch() {
Search search = new Search();
search.setSearch(mSearchEditText.getText().toString());
search.setYear(mYearEditText.getText().toString());
search.setType(mTypeSpinner.getSelectedType());
return search;
}
public void setSearch(Search search) {
mSearchEditText.setText(search.getSearch());
mYearEditText.setText(search.getYear());
mTypeSpinner.setSelection(search.getType());
}
public void setReadOnlyMode(boolean readOnly) {
boolean enabled = !readOnly;
mSearchEditText.setEnabled(enabled);
mYearEditText.setEnabled(enabled);
mTypeSpinner.setEnabled(enabled);
}
private boolean validateForm() {
Search search = getSearch();
if (search.getSearch().isEmpty()) {
if (isAdded()) {
Toast.makeText(getContext(), R.string.message_validate_search_form_search,
Toast.LENGTH_LONG).show();
}
return false;
}
if (!search.getYear().isEmpty()) {
// TODO make the constrain on the input
if (!(search.getYear().matches("\\d{4}"))) {
if (isAdded()) {
Toast.makeText(getContext(), R.string.message_validate_search_form_year,
Toast.LENGTH_LONG).show();
}
return false;
}
}
return true;
}
}
|
animations for validation
|
app/src/main/java/pl/acieslinski/moviefun/fragments/SearchForm.java
|
animations for validation
|
|
Java
|
apache-2.0
|
c57940e98b82d3b05a356a6483880fdd694e1851
| 0
|
functional-reactive/functional-reactive-lib,functional-reactive/functional-reactive-lib
|
package org.rapidpm.frp.model;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Stream;
import org.rapidpm.frp.functions.CheckedFunction;
/**
* Copyright (C) 2017 RapidPM - Sven Ruppert
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* <p>
* Created by Sven Ruppert - RapidPM - Team on 16.03.17.
*/
public interface Result<T> {
void ifPresentOrElse(Consumer<? super T> action , Runnable emptyAction);
void ifPresentOrElse(Consumer<T> success , Consumer<String> failure);
void ifPresentOrElseAsync(Consumer<? super T> action , Runnable emptyAction);
void ifPresentOrElseAsync(Consumer<T> success , Consumer<String> failure);
static <T> Result<T> failure(String errorMessage) {
Objects.requireNonNull(errorMessage);
return new Result.Failure<>(errorMessage);
}
static <T> Result<T> success(T value) {
return new Result.Success<>(value);
}
static <T> Result<T> ofNullable(T value) {
return ofNullable(value , "Object was null");
}
static <T> Result<T> ofNullable(T value , String failedMessage) {
return (Objects.nonNull(value))
? Result.success(value)
: Result.failure(failedMessage);
}
T get();
T getOrElse(Supplier<T> supplier);
Boolean isPresent();
Boolean isAbsent();
void ifPresent(Consumer<T> consumer);
void ifAbsent(Runnable action);
void ifFailed(Consumer<String> failed);
default Stream<T> stream() {
if (! isPresent()) {
return Stream.empty();
} else {
return Stream.of(get());
}
}
default Result<T> or(Supplier<? extends Result<? extends T>> supplier) {
Objects.requireNonNull(supplier);
if (isPresent()) {
return this;
} else {
@SuppressWarnings("unchecked")
Result<T> r = (Result<T>) supplier.get();
return Objects.requireNonNull(r);
}
}
default Optional<T> toOptional() {
return Optional.ofNullable(get());
}
static <T> Result<T> fromOptional(Optional<T> optional) {
Objects.requireNonNull(optional);
return ofNullable(optional.get() , "Optional hold a null value");
}
default <V, R> Result<R> thenCombine(V value , BiFunction<T, V, Result<R>> func) {
return func.apply(get() , value);
}
default <V, R> CompletableFuture<Result<R>> thenCombineAsync(V value , BiFunction<T, V, Result<R>> func) {
return CompletableFuture.supplyAsync(() -> func.apply(get() , value));
}
default <U> Result<U> map(Function<? super T, ? extends U> mapper) {
Objects.requireNonNull(mapper);
return isPresent()
? ((CheckedFunction<T, U>) mapper::apply).apply(get())
: this.asFailure();
}
default <U> Result<U> flatMap(Function<? super T, Result<U>> mapper) {
Objects.requireNonNull(mapper);
return this.isPresent()
? mapper.apply(get())
: this.asFailure();
}
default <U> Result<U> asFailure() {
return (isAbsent())
? (Result<U>) this
: Result.failure("converted to Failure orig was " + this);
}
abstract class AbstractResult<T> implements Result<T> {
protected final T value;
public AbstractResult(T value) {
this.value = value;
}
@Override
public void ifPresent(Consumer<T> consumer) {
Objects.requireNonNull(consumer);
if (value != null) consumer.accept(value);
}
@Override
public void ifAbsent(Runnable action) {
Objects.requireNonNull(action);
if (value == null) action.run();
}
public Boolean isPresent() {
return (value != null) ? Boolean.TRUE : Boolean.FALSE;
}
public Boolean isAbsent() {
return (value == null) ? Boolean.TRUE : Boolean.FALSE;
}
@Override
public T get() {
return Objects.requireNonNull(value);
}
@Override
public T getOrElse(Supplier<T> supplier) {
Objects.requireNonNull(supplier);
return (value != null) ? value : Objects.requireNonNull(supplier.get());
}
}
class Success<T> extends AbstractResult<T> {
public Success(T value) {
super(value);
}
@Override
public void ifPresentOrElse(Consumer<? super T> action , Runnable emptyAction) {
action.accept(value);
}
@Override
public void ifPresentOrElse(final Consumer<T> success , final Consumer<String> failure) {
// TODO check if usefull -> Objects.requireNonNull(value);
success.accept(value);
}
@Override
public void ifPresentOrElseAsync(Consumer<? super T> action , Runnable emptyAction) {
CompletableFuture.runAsync(() -> action.accept(value));
}
@Override
public void ifPresentOrElseAsync(Consumer<T> success , Consumer<String> failure) {
CompletableFuture.runAsync(() -> success.accept(value));
}
public void ifFailed(Consumer<String> failed) {
Objects.requireNonNull(failed);
}
}
class Failure<T> extends AbstractResult<T> {
private final String errorMessage;
public Failure(final String errorMessage) {
super(null);
this.errorMessage = errorMessage;
}
@Override
public void ifPresentOrElse(Consumer<? super T> action , Runnable emptyAction) {
emptyAction.run();
}
@Override
public void ifPresentOrElse(final Consumer<T> success , final Consumer<String> failure) {
failure.accept(errorMessage);
}
@Override
public void ifPresentOrElseAsync(Consumer<? super T> action , Runnable emptyAction) {
CompletableFuture.runAsync(emptyAction);
}
@Override
public void ifPresentOrElseAsync(Consumer<T> success , Consumer<String> failure) {
CompletableFuture.runAsync(() -> failure.accept(errorMessage));
}
public void ifFailed(Consumer<String> failed) {
Objects.requireNonNull(failed);
failed.accept(errorMessage);
}
}
}
|
functional-reactive/src/main/java/org/rapidpm/frp/model/Result.java
|
package org.rapidpm.frp.model;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Stream;
import org.rapidpm.frp.functions.CheckedFunction;
/**
* Copyright (C) 2017 RapidPM - Sven Ruppert
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* <p>
* Created by Sven Ruppert - RapidPM - Team on 16.03.17.
*/
public interface Result<T> {
void ifPresentOrElse(Consumer<? super T> action , Runnable emptyAction);
void ifPresentOrElse(Consumer<T> success , Consumer<String> failure);
void ifPresentOrElseAsync(Consumer<? super T> action , Runnable emptyAction);
void ifPresentOrElseAsync(Consumer<T> success , Consumer<String> failure);
static <T> Result<T> failure(String errorMessage) {
Objects.requireNonNull(errorMessage);
return new Result.Failure<>(errorMessage);
}
static <T> Result<T> success(T value) {
return new Result.Success<>(value);
}
static <T> Result<T> ofNullable(T value) {
return ofNullable(value , "Object was null");
}
static <T> Result<T> ofNullable(T value , String failedMessage) {
return (Objects.nonNull(value))
? Result.success(value)
: Result.failure(failedMessage);
}
T get();
T getOrElse(Supplier<T> supplier);
Boolean isPresent();
Boolean isAbsent();
void ifPresent(Consumer<T> consumer);
void ifAbsent(Runnable action);
default Stream<T> stream() {
if (! isPresent()) {
return Stream.empty();
} else {
return Stream.of(get());
}
}
default Result<T> or(Supplier<? extends Result<? extends T>> supplier) {
Objects.requireNonNull(supplier);
if (isPresent()) {
return this;
} else {
@SuppressWarnings("unchecked")
Result<T> r = (Result<T>) supplier.get();
return Objects.requireNonNull(r);
}
}
default Optional<T> toOptional() {
return Optional.ofNullable(get());
}
static <T> Result<T> fromOptional(Optional<T> optional) {
Objects.requireNonNull(optional);
return ofNullable(optional.get() , "Optional hold a null value");
}
default <V, R> Result<R> thenCombine(V value , BiFunction<T, V, Result<R>> func) {
return func.apply(get() , value);
}
default <V, R> CompletableFuture<Result<R>> thenCombineAsync(V value , BiFunction<T, V, Result<R>> func) {
return CompletableFuture.supplyAsync(() -> func.apply(get() , value));
}
default <U> Result<U> map(Function<? super T, ? extends U> mapper) {
Objects.requireNonNull(mapper);
return isPresent()
? ((CheckedFunction<T, U>) mapper::apply).apply(get())
: this.asFailure();
}
default <U> Result<U> flatMap(Function<? super T, Result<U>> mapper) {
Objects.requireNonNull(mapper);
return this.isPresent()
? mapper.apply(get())
: this.asFailure();
}
default <U> Result<U> asFailure() {
return (isAbsent())
? (Result<U>) this
: Result.failure("converted to Failure orig was " + this);
}
abstract class AbstractResult<T> implements Result<T> {
protected final T value;
public AbstractResult(T value) {
this.value = value;
}
@Override
public void ifPresent(Consumer<T> consumer) {
Objects.requireNonNull(consumer);
if (value != null) consumer.accept(value);
}
@Override
public void ifAbsent(Runnable action) {
Objects.requireNonNull(action);
if (value == null) action.run();
}
public Boolean isPresent() {
return (value != null) ? Boolean.TRUE : Boolean.FALSE;
}
public Boolean isAbsent() {
return (value == null) ? Boolean.TRUE : Boolean.FALSE;
}
@Override
public T get() {
return Objects.requireNonNull(value);
}
@Override
public T getOrElse(Supplier<T> supplier) {
Objects.requireNonNull(supplier);
return (value != null) ? value : Objects.requireNonNull(supplier.get());
}
}
class Success<T> extends AbstractResult<T> {
public Success(T value) {
super(value);
}
@Override
public void ifPresentOrElse(Consumer<? super T> action , Runnable emptyAction) {
action.accept(value);
}
@Override
public void ifPresentOrElse(final Consumer<T> success , final Consumer<String> failure) {
// TODO check if usefull -> Objects.requireNonNull(value);
success.accept(value);
}
@Override
public void ifPresentOrElseAsync(Consumer<? super T> action , Runnable emptyAction) {
CompletableFuture.runAsync(() -> action.accept(value));
}
@Override
public void ifPresentOrElseAsync(Consumer<T> success , Consumer<String> failure) {
CompletableFuture.runAsync(() -> success.accept(value));
}
}
class Failure<T> extends AbstractResult<T> {
private final String errorMessage;
public Failure(final String errorMessage) {
super(null);
this.errorMessage = errorMessage;
}
@Override
public void ifPresentOrElse(Consumer<? super T> action , Runnable emptyAction) {
emptyAction.run();
}
@Override
public void ifPresentOrElse(final Consumer<T> success , final Consumer<String> failure) {
failure.accept(errorMessage);
}
@Override
public void ifPresentOrElseAsync(Consumer<? super T> action , Runnable emptyAction) {
CompletableFuture.runAsync(emptyAction);
}
@Override
public void ifPresentOrElseAsync(Consumer<T> success , Consumer<String> failure) {
CompletableFuture.runAsync(() -> failure.accept(errorMessage));
}
}
}
|
added Result.ifFailed
|
functional-reactive/src/main/java/org/rapidpm/frp/model/Result.java
|
added Result.ifFailed
|
|
Java
|
apache-2.0
|
5c82f07c960eb70a419827ffa53c727b112b66cd
| 0
|
googleinterns/step204-2020,googleinterns/step204-2020,googleinterns/step204-2020
|
package com.google.job.data;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.appengine.repackaged.com.google.common.collect.ImmutableSet;
import com.google.cloud.firestore.*;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.utils.FireStoreUtils;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
import org.apache.commons.lang3.Range;
import java.lang.UnsupportedOperationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeoutException;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.Future;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import java.util.logging.Level;
/** Helps persist and retrieve job posts. */
public final class JobsDatabase {
private static final Logger log = Logger.getLogger(JobsDatabase.class.getName());
private static final String JOB_COLLECTION = "Jobs";
private static final String APPLICANT_ACCOUNTS_COLLECTION = "ApplicantAccounts";
private static final String SALARY_FIELD = "jobPay.annualMax";
private static final String REGION_FIELD = "jobLocation.region";
private static final String JOB_STATUS_FIELD = "jobStatus";
private static final String JOB_REQUIREMENTS_FIELD = "requirements";
private static final String INTERESTED_JOBS_FIELD = "interestedJobs";
private static final String JOB_ID_FIELD = "jobId";
private static final long TIMEOUT_SECONDS = 5;
/**
* Adds a newly created job post.
*
* @param newJob Newly created job post. Assumes that it is non-nullable.
* @return A future of the detailed information of the writing.
*/
public Future<WriteResult> addJob(Job newJob) throws IOException {
// Add document data after generating an id
DocumentReference addedDocRef = FireStoreUtils.getFireStore()
.collection(JOB_COLLECTION).document();
String jobId = addedDocRef.getId();
// Updates the Job with cloud firestore id
// Status is already set when parsing the job post
Job job = newJob.toBuilder()
.setJobId(jobId)
.build();
return addedDocRef.set(job);
}
/**
* Edits the job post.
*
* @param jobId Id for the target job post in the database.
* @param updatedJob Updated job post (with cloud firestore id).
* @return A future of document reference for the updated job post.
* @throws IllegalArgumentException If the job id is invalid.
*/
public Future<DocumentReference> setJob(String jobId, Job updatedJob) throws IllegalArgumentException, IOException {
if (jobId.isEmpty()) {
throw new IllegalArgumentException("Job Id should be an non-empty string");
}
// Runs an asynchronous transaction
ApiFuture<DocumentReference> futureTransaction = FireStoreUtils.getFireStore().runTransaction(transaction -> {
final DocumentReference documentReference = FireStoreUtils.getFireStore()
.collection(JOB_COLLECTION).document(jobId);
// Verifies if the current user can update the job post with this job id
// TODO(issue/25): incorporate the account stuff into job post.
DocumentSnapshot documentSnapshot = transaction.get(documentReference).get();
// Job does not exist
if (!documentSnapshot.exists()) {
throw new IllegalArgumentException("Invalid jobId");
}
// Overwrites the whole job post
transaction.set(documentReference, updatedJob);
return documentReference;
});
return futureTransaction;
}
/**
* Marks a job post as DELETED.
*
* @param jobId Cloud Firestore Id of the job post.
* @return A future of document reference for the updated job post.
*/
public Future<DocumentReference> markJobPostAsDeleted(String jobId) throws IOException {
if (jobId.isEmpty()) {
throw new IllegalArgumentException("Job Id should be an non-empty string");
}
// Runs an asynchronous transaction
ApiFuture<DocumentReference> futureTransaction = FireStoreUtils.getFireStore().runTransaction(transaction -> {
final DocumentReference documentReference = FireStoreUtils.getFireStore()
.collection(JOB_COLLECTION).document(jobId);
// Verifies if the current user can update the job post with this job id
// TODO(issue/25): incorporate the account stuff into job post.
DocumentSnapshot documentSnapshot = transaction.get(documentReference).get();
// Job does not exist
if (!documentSnapshot.exists()) {
throw new IllegalArgumentException("Invalid jobId");
}
// Updates the jobStatus field to DELETED
transaction.update(documentReference, JOB_STATUS_FIELD, JobStatus.DELETED);
return documentReference;
});
return futureTransaction;
}
/**
* Fetches the snapshot future of a specific job post.
*
* @param jobId Id for the job post in the database.
* @return Future of the target job post.
* @throws IllegalArgumentException If the job id is invalid.
*/
public Future<Optional<Job>> fetchJob(String jobId) throws IllegalArgumentException, IOException {
DocumentReference docRef = FireStoreUtils.getFireStore()
.collection(JOB_COLLECTION).document(jobId);
// Asynchronously retrieves the document
ApiFuture<DocumentSnapshot> snapshotFuture = docRef.get();
ApiFunction<DocumentSnapshot, Optional<Job>> jobFunction = new ApiFunction<DocumentSnapshot, Optional<Job>>() {
@NullableDecl
public Optional<Job> apply(@NullableDecl DocumentSnapshot documentSnapshot) {
return FireStoreUtils.convertDocumentSnapshotToPOJO(documentSnapshot, Job.class);
}
};
return ApiFutures.transform(snapshotFuture, jobFunction, MoreExecutors.directExecutor());
}
/** Returns future of all ACTIVE and eligible job posts in database. */
public Future<Collection<Job>> fetchAllEligibleJobs(List<String> skills) throws IOException {
// Gets all requirements stable id
List<String> requirementsList = Requirement.getAllRequirementIds();
// Gets a list of requirements which the applicant does not have
List<String> excludedSkills = new ArrayList<>(requirementsList);
excludedSkills.removeAll(skills);
final Query activeJobsQuery = FireStoreUtils.getFireStore()
.collection(JOB_COLLECTION)
.whereEqualTo(JOB_STATUS_FIELD, JobStatus.ACTIVE);
// Eligible post: post whose requirements do not contain (field is false)
// the skills that applicant does not have
Query eligiblePostQuery = activeJobsQuery;
for (String negateSkill : excludedSkills) {
String fieldPath = String.format("%s.%s", JOB_REQUIREMENTS_FIELD, negateSkill);
eligiblePostQuery = eligiblePostQuery.whereEqualTo(fieldPath, false);
}
ApiFuture<QuerySnapshot> querySnapshotFuture = eligiblePostQuery.get();
ApiFunction<QuerySnapshot, Collection<Job>> function = documents -> {
ImmutableSet.Builder<Job> jobs = ImmutableSet.builder();
for (DocumentSnapshot document : documents) {
Job job = document.toObject(Job.class);
jobs.add(job);
}
return jobs.build();
};
return ApiFutures.transform(querySnapshotFuture, function, MoreExecutors.directExecutor());
}
/**
* Gets all the jobs given the params from the database.
* Currently, they can only be sorted/filtered by salary.
*
* @param jobQuery The job query object with all the filtering/sorting params.
* @return Future of the JobPage object.
*/
public static Future<JobPage> fetchJobPage(JobQuery jobQuery) throws IOException {
CollectionReference jobsCollection = FireStoreUtils.getFireStore().collection(JOB_COLLECTION);
// TODO(issue/62): support other filters
if (!jobQuery.getSortBy().equals(Filter.SALARY)) {
throw new UnsupportedOperationException("currently this app only supports sorting/filtering by salary");
}
Query query = jobsCollection.whereEqualTo(JOB_STATUS_FIELD, JobStatus.ACTIVE.name())
.whereGreaterThanOrEqualTo(SALARY_FIELD, jobQuery.getMinLimit())
.whereLessThanOrEqualTo(SALARY_FIELD, jobQuery.getMaxLimit())
.orderBy(SALARY_FIELD, Order.getQueryDirection(jobQuery.getOrder()));
SingaporeRegion region = jobQuery.getRegion();
if (!region.equals(SingaporeRegion.ENTIRE)) {
query = query.whereEqualTo(REGION_FIELD, region.name());
}
// TODO(issue/34): add to the query to include pagination (using pageSize and pageIndex)
return ApiFutures.transform(
query.get(),
querySnapshot -> {
if (querySnapshot == null) {
return new JobPage(/* jobList= */ ImmutableList.of(), /* totalCount= */ 0, Range.between(0, 0));
}
List<QueryDocumentSnapshot> documents = querySnapshot.getDocuments();
if (documents.size() == 0) {
return new JobPage(ImmutableList.of(), 0, Range.between(0, 0));
}
ImmutableList.Builder<Job> jobList = ImmutableList.builder();
for (QueryDocumentSnapshot document : documents) {
Job job = document.toObject(Job.class);
jobList.add(job);
}
// TODO(issue/34): adjust range/total count based on pagination
long totalCount = documents.size();
Range<Integer> range = Range.between(1, documents.size());
return new JobPage(jobList.build(), totalCount, range);
},
MoreExecutors.directExecutor()
);
}
/**
* Gets all the applicant's interested jobs given the params. If there's an error in getting a particular job
* from the jobId, then that specific job will not be returned.
*
* @param applicantId The applicant's userId.
* @param pageSize The the number of jobs to be shown on the page.
* @param pageIndex The page number on which we are at.
* @return Future of the JobPage object.
* @throws IllegalArgumentException If the applicantId doesn't have a corresponding document.
*/
public Future<JobPage> fetchInterestedJobPage(String applicantId, int pageSize, int pageIndex) throws IOException, IllegalArgumentException {
CollectionReference applicantAccountsCollection = FireStoreUtils.getFireStore().collection(APPLICANT_ACCOUNTS_COLLECTION);
DocumentReference docRef = applicantAccountsCollection.document(applicantId);
return ApiFutures.transform(
docRef.get(),
documentSnapshot -> {
if (!documentSnapshot.exists()) {
throw new IllegalArgumentException("Invalid applicantId");
}
List<String> interestedList = (List<String>) documentSnapshot.get(INTERESTED_JOBS_FIELD);
// TODO(issue/34): adjust the interestedList the be looked at based on pagination
ImmutableList.Builder<Job> jobListBuilder = ImmutableList.builder();
try {
int counter = 0;
// Handles the jobIds 10 at a time.
// TODO(issue/34): pagination could also be included here.
while (counter + 10 < interestedList.size()) {
List<String> subList = interestedList.subList(/* inclusive */ counter, /* exclusive */ counter + 10);
List<Job> fetchedList = fetchJobsFromIds(subList).get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
jobListBuilder.addAll(fetchedList);
counter = counter + 10;
}
List<String> subList = interestedList.subList(/* inclusive */ counter, /* exclusive */ interestedList.size());
List<Job> fetchedList = fetchJobsFromIds(subList).get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
jobListBuilder.addAll(fetchedList);
} catch (IOException | InterruptedException | ExecutionException | TimeoutException e) {
log.log(Level.SEVERE, "error while getting interested job list ", e);
}
List<Job> jobList = jobListBuilder.build();
// TODO(issue/34): adjust range/total count based on pagination
long totalCount = jobList.size();
Range<Integer> range = Range.between(1, jobList.size());
return new JobPage(jobList, totalCount, range);
},
MoreExecutors.directExecutor()
);
}
/**
* This will return a future of a list of jobs given the list of jobIds.
* Any jobs that are not active or invalid will not be included in the list returned.
* This only handles jobIds of max 10 at a time.
*
* @param jobIds The list of jobIds.
* @return Future of the list of jobs.
*/
public Future<List<Job>> fetchJobsFromIds(List<String> jobIds) throws IOException {
CollectionReference jobsCollection = FireStoreUtils.getFireStore().collection(JOB_COLLECTION);
Query query = jobsCollection.whereEqualTo(JOB_STATUS_FIELD, JobStatus.ACTIVE.name())
.whereIn(JOB_ID_FIELD, jobIds);
return ApiFutures.transform(
query.get(),
querySnapshot -> {
if (querySnapshot == null) {
return ImmutableList.of();
}
List<QueryDocumentSnapshot> documents = querySnapshot.getDocuments();
if (documents.size() == 0) {
return ImmutableList.of();
}
ImmutableList.Builder<Job> jobList = ImmutableList.builder();
for (QueryDocumentSnapshot document : documents) {
Job job = document.toObject(Job.class);
jobList.add(job);
}
return jobList.build();
},
MoreExecutors.directExecutor()
);
}
/*
* Updates the applicant's interested list to add or remove the job.
*
* @param applicantId The applicant's userId.
* @param jobId The job id.
* @param interested Whether the applicant is currently interested in it or not.
* @return A future of document reference for the applicant's update job list.
*/
public static Future<DocumentReference> updateInterestedJobsList(String applicantId, String jobId, boolean interested) throws IOException, IllegalArgumentException, Exception {
return FireStoreUtils.getFireStore().runTransaction(transaction -> {
final DocumentReference documentReference = FireStoreUtils.getFireStore()
.collection(APPLICANT_ACCOUNTS_COLLECTION).document(applicantId);
if (interested) {
// Remove the jobId
// Does nothing if it already isn't there
documentReference.update(INTERESTED_JOBS_FIELD, FieldValue.arrayRemove(jobId));
} else {
// Add the jobId
// Does nothing if it already exists
documentReference.update(INTERESTED_JOBS_FIELD, FieldValue.arrayUnion(jobId));
}
return documentReference;
});
}
}
|
walk-in-interview/src/main/java/com/google/job/data/JobsDatabase.java
|
package com.google.job.data;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.appengine.repackaged.com.google.common.collect.ImmutableSet;
import com.google.cloud.firestore.*;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.utils.FireStoreUtils;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
import org.apache.commons.lang3.Range;
import java.lang.UnsupportedOperationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeoutException;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.Future;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import java.util.logging.Level;
/** Helps persist and retrieve job posts. */
public final class JobsDatabase {
private static final Logger log = Logger.getLogger(JobsDatabase.class.getName());
private static final String JOB_COLLECTION = "Jobs";
private static final String APPLICANT_ACCOUNTS_COLLECTION = "ApplicantAccounts";
private static final String SALARY_FIELD = "jobPay.annualMax";
private static final String REGION_FIELD = "jobLocation.region";
private static final String JOB_STATUS_FIELD = "jobStatus";
private static final String JOB_REQUIREMENTS_FIELD = "requirements";
private static final String INTERESTED_JOBS_FIELD = "interestedJobs";
private static final String JOB_ID_FIELD = "jobId";
private static final long TIMEOUT_SECONDS = 5;
/**
* Adds a newly created job post.
*
* @param newJob Newly created job post. Assumes that it is non-nullable.
* @return A future of the detailed information of the writing.
*/
public Future<WriteResult> addJob(Job newJob) throws IOException {
// Add document data after generating an id
DocumentReference addedDocRef = FireStoreUtils.getFireStore()
.collection(JOB_COLLECTION).document();
String jobId = addedDocRef.getId();
// Updates the Job with cloud firestore id
// Status is already set when parsing the job post
Job job = newJob.toBuilder()
.setJobId(jobId)
.build();
return addedDocRef.set(job);
}
/**
* Edits the job post.
*
* @param jobId Id for the target job post in the database.
* @param updatedJob Updated job post (with cloud firestore id).
* @return A future of document reference for the updated job post.
* @throws IllegalArgumentException If the job id is invalid.
*/
public Future<DocumentReference> setJob(String jobId, Job updatedJob) throws IllegalArgumentException, IOException {
if (jobId.isEmpty()) {
throw new IllegalArgumentException("Job Id should be an non-empty string");
}
// Runs an asynchronous transaction
ApiFuture<DocumentReference> futureTransaction = FireStoreUtils.getFireStore().runTransaction(transaction -> {
final DocumentReference documentReference = FireStoreUtils.getFireStore()
.collection(JOB_COLLECTION).document(jobId);
// Verifies if the current user can update the job post with this job id
// TODO(issue/25): incorporate the account stuff into job post.
DocumentSnapshot documentSnapshot = transaction.get(documentReference).get();
// Job does not exist
if (!documentSnapshot.exists()) {
throw new IllegalArgumentException("Invalid jobId");
}
// Overwrites the whole job post
transaction.set(documentReference, updatedJob);
return documentReference;
});
return futureTransaction;
}
/**
* Marks a job post as DELETED.
*
* @param jobId Cloud Firestore Id of the job post.
* @return A future of document reference for the updated job post.
*/
public Future<DocumentReference> markJobPostAsDeleted(String jobId) throws IOException {
if (jobId.isEmpty()) {
throw new IllegalArgumentException("Job Id should be an non-empty string");
}
// Runs an asynchronous transaction
ApiFuture<DocumentReference> futureTransaction = FireStoreUtils.getFireStore().runTransaction(transaction -> {
final DocumentReference documentReference = FireStoreUtils.getFireStore()
.collection(JOB_COLLECTION).document(jobId);
// Verifies if the current user can update the job post with this job id
// TODO(issue/25): incorporate the account stuff into job post.
DocumentSnapshot documentSnapshot = transaction.get(documentReference).get();
// Job does not exist
if (!documentSnapshot.exists()) {
throw new IllegalArgumentException("Invalid jobId");
}
// Updates the jobStatus field to DELETED
transaction.update(documentReference, JOB_STATUS_FIELD, JobStatus.DELETED);
return documentReference;
});
return futureTransaction;
}
/**
* Fetches the snapshot future of a specific job post.
*
* @param jobId Id for the job post in the database.
* @return Future of the target job post.
* @throws IllegalArgumentException If the job id is invalid.
*/
public Future<Optional<Job>> fetchJob(String jobId) throws IllegalArgumentException, IOException {
DocumentReference docRef = FireStoreUtils.getFireStore()
.collection(JOB_COLLECTION).document(jobId);
// Asynchronously retrieves the document
ApiFuture<DocumentSnapshot> snapshotFuture = docRef.get();
ApiFunction<DocumentSnapshot, Optional<Job>> jobFunction = new ApiFunction<DocumentSnapshot, Optional<Job>>() {
@NullableDecl
public Optional<Job> apply(@NullableDecl DocumentSnapshot documentSnapshot) {
return FireStoreUtils.convertDocumentSnapshotToPOJO(documentSnapshot, Job.class);
}
};
return ApiFutures.transform(snapshotFuture, jobFunction, MoreExecutors.directExecutor());
}
/** Returns future of all ACTIVE and eligible job posts in database. */
public Future<Collection<Job>> fetchAllEligibleJobs(List<String> skills) throws IOException {
// Gets all requirements stable id
List<String> requirementsList = Requirement.getAllRequirementIds();
// Gets a list of requirements which the applicant does not have
List<String> excludedSkills = new ArrayList<>(requirementsList);
excludedSkills.removeAll(skills);
final Query activeJobsQuery = FireStoreUtils.getFireStore()
.collection(JOB_COLLECTION)
.whereEqualTo(JOB_STATUS_FIELD, JobStatus.ACTIVE);
// Eligible post: post whose requirements do not contain (field is false)
// the skills that applicant does not have
Query eligiblePostQuery = activeJobsQuery;
for (String negateSkill : excludedSkills) {
String fieldPath = String.format("%s.%s", JOB_REQUIREMENTS_FIELD, negateSkill);
eligiblePostQuery = eligiblePostQuery.whereEqualTo(fieldPath, false);
}
ApiFuture<QuerySnapshot> querySnapshotFuture = eligiblePostQuery.get();
ApiFunction<QuerySnapshot, Collection<Job>> function = documents -> {
ImmutableSet.Builder<Job> jobs = ImmutableSet.builder();
for (DocumentSnapshot document : documents) {
Job job = document.toObject(Job.class);
jobs.add(job);
}
return jobs.build();
};
return ApiFutures.transform(querySnapshotFuture, function, MoreExecutors.directExecutor());
}
/**
* Gets all the jobs given the params from the database.
* Currently, they can only be sorted/filtered by salary.
*
* @param jobQuery The job query object with all the filtering/sorting params.
* @return Future of the JobPage object.
*/
public static Future<JobPage> fetchJobPage(JobQuery jobQuery) throws IOException {
CollectionReference jobsCollection = FireStoreUtils.getFireStore().collection(JOB_COLLECTION);
// TODO(issue/62): support other filters
if (!jobQuery.getSortBy().equals(Filter.SALARY)) {
throw new UnsupportedOperationException("currently this app only supports sorting/filtering by salary");
}
Query query = jobsCollection.whereEqualTo(JOB_STATUS_FIELD, JobStatus.ACTIVE.name())
.whereGreaterThanOrEqualTo(SALARY_FIELD, jobQuery.getMinLimit())
.whereLessThanOrEqualTo(SALARY_FIELD, jobQuery.getMaxLimit())
.orderBy(SALARY_FIELD, Order.getQueryDirection(jobQuery.getOrder()));
SingaporeRegion region = jobQuery.getRegion();
if (!region.equals(SingaporeRegion.ENTIRE)) {
query = query.whereEqualTo(REGION_FIELD, region.name());
}
// TODO(issue/34): add to the query to include pagination (using pageSize and pageIndex)
return ApiFutures.transform(
query.get(),
querySnapshot -> {
if (querySnapshot == null) {
return new JobPage(/* jobList= */ ImmutableList.of(), /* totalCount= */ 0, Range.between(0, 0));
}
List<QueryDocumentSnapshot> documents = querySnapshot.getDocuments();
if (documents.size() == 0) {
return new JobPage(ImmutableList.of(), 0, Range.between(0, 0));
}
ImmutableList.Builder<Job> jobList = ImmutableList.builder();
for (QueryDocumentSnapshot document : documents) {
Job job = document.toObject(Job.class);
jobList.add(job);
}
// TODO(issue/34): adjust range/total count based on pagination
long totalCount = documents.size();
Range<Integer> range = Range.between(1, documents.size());
return new JobPage(jobList.build(), totalCount, range);
},
MoreExecutors.directExecutor()
);
}
/**
* Gets all the applicant's interested jobs given the params. If there's an error in getting a particular job
* from the jobId, then that specific job will not be returned.
*
* @param applicantId The applicant's userId.
* @param pageSize The the number of jobs to be shown on the page.
* @param pageIndex The page number on which we are at.
* @return Future of the JobPage object.
* @throws IllegalArgumentException If the applicantId doesn't have a corresponding document.
*/
public Future<JobPage> fetchInterestedJobPage(String applicantId, int pageSize, int pageIndex) throws IOException, IllegalArgumentException {
CollectionReference applicantAccountsCollection = FireStoreUtils.getFireStore().collection(APPLICANT_ACCOUNTS_COLLECTION);
DocumentReference docRef = applicantAccountsCollection.document(applicantId);
return ApiFutures.transform(
docRef.get(),
documentSnapshot -> {
if (!documentSnapshot.exists()) {
throw new IllegalArgumentException("Invalid applicantId");
}
List<String> interestedList = (List<String>) documentSnapshot.get(INTERESTED_JOBS_FIELD);
// TODO(issue/34): adjust the interestedList the be looked at based on pagination
ImmutableList.Builder<Job> jobListBuilder = ImmutableList.builder();
try {
int counter = 0;
while (counter + 10 < interestedList.size()) {
List<String> subList = interestedList.subList(/* inclusive */ counter, /* exclusive */ counter + 10);
List<Job> fetchedList = fetchJobsFromIds(subList).get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
jobListBuilder.addAll(fetchedList);
counter = counter + 10;
}
List<String> subList = interestedList.subList(/* inclusive */ counter, /* exclusive */ interestedList.size());
List<Job> fetchedList = fetchJobsFromIds(subList).get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
jobListBuilder.addAll(fetchedList);
} catch (IOException | InterruptedException | ExecutionException | TimeoutException e) {
log.log(Level.SEVERE, "error while getting interested job list ", e);
}
List<Job> jobList = jobListBuilder.build();
// TODO(issue/34): adjust range/total count based on pagination
long totalCount = jobList.size();
Range<Integer> range = Range.between(1, jobList.size());
return new JobPage(jobList, totalCount, range);
},
MoreExecutors.directExecutor()
);
}
/**
* This will return a future of a list of jobs given the list of jobIds.
* Any jobs that are not active or invalid will not be included in the list returned.
*
* @param jobIds The list of jobIds.
* @return Future of the list of jobs.
*/
public Future<List<Job>> fetchJobsFromIds(List<String> jobIds) throws IOException {
CollectionReference jobsCollection = FireStoreUtils.getFireStore().collection(JOB_COLLECTION);
// TODO(issue/xx): this will need to be done 10 jobIds at a time
Query query = jobsCollection.whereEqualTo(JOB_STATUS_FIELD, JobStatus.ACTIVE.name())
.whereIn(JOB_ID_FIELD, jobIds);
return ApiFutures.transform(
query.get(),
querySnapshot -> {
if (querySnapshot == null) {
return ImmutableList.of();
}
List<QueryDocumentSnapshot> documents = querySnapshot.getDocuments();
if (documents.size() == 0) {
return ImmutableList.of();
}
ImmutableList.Builder<Job> jobList = ImmutableList.builder();
for (QueryDocumentSnapshot document : documents) {
Job job = document.toObject(Job.class);
jobList.add(job);
}
return jobList.build();
},
MoreExecutors.directExecutor()
);
}
/*
* Updates the applicant's interested list to add or remove the job.
*
* @param applicantId The applicant's userId.
* @param jobId The job id.
* @param interested Whether the applicant is currently interested in it or not.
* @return A future of document reference for the applicant's update job list.
*/
public static Future<DocumentReference> updateInterestedJobsList(String applicantId, String jobId, boolean interested) throws IOException, IllegalArgumentException, Exception {
return FireStoreUtils.getFireStore().runTransaction(transaction -> {
final DocumentReference documentReference = FireStoreUtils.getFireStore()
.collection(APPLICANT_ACCOUNTS_COLLECTION).document(applicantId);
if (interested) {
// Remove the jobId
// Does nothing if it already isn't there
documentReference.update(INTERESTED_JOBS_FIELD, FieldValue.arrayRemove(jobId));
} else {
// Add the jobId
// Does nothing if it already exists
documentReference.update(INTERESTED_JOBS_FIELD, FieldValue.arrayUnion(jobId));
}
return documentReference;
});
}
}
|
add comments
|
walk-in-interview/src/main/java/com/google/job/data/JobsDatabase.java
|
add comments
|
|
Java
|
apache-2.0
|
6a9dfa5cdd116fcad4e6ee265cdae8782a2b3cb6
| 0
|
webfirmframework/wff,webfirmframework/wff
|
/*
* Copyright 2014-2016 Web Firm Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* @author WFF
*/
package com.webfirmframework.wffweb.css;
import com.webfirmframework.wffweb.InvalidValueException;
import com.webfirmframework.wffweb.data.AbstractBean;
/**
*
* @author WFF
* @since 1.0.0
*/
public class RgbCssValue extends AbstractBean<RgbCssValue> {
private static final long serialVersionUID = 1_0_0L;
private String rgb = "rgb(0, 0, 0)";
private int r = 0;
private int g = 0;
private int b = 0;
public RgbCssValue() {
}
/**
* @param rgbCssValue
* eg:- <code> rgb(15, 55, 155) </code>
*/
public RgbCssValue(final String rgbCssValue) {
super();
extractAndAssign(rgbCssValue);
}
public RgbCssValue(final RgbCssValue rgbCssValue) {
super();
extractAndAssign(rgbCssValue.getValue());
}
/**
* @param rgb
* eg:- <code> rgb(15, 55, 155) </code>
*
* @since 1.0.0
* @author WFF
*/
public void setRgb(final String rgb) {
extractAndAssign(rgb);
}
/**
* @param rgbString
* @since 1.0.0
* @author WFF
*/
private void extractAndAssign(final String rgbString) {
final String rgbStringLowerCase = rgbString.replace(" ", "")
.toLowerCase();
if (rgbStringLowerCase.startsWith("rgb(")
&& rgbStringLowerCase.contains(")")) {
final String rgb = rgbStringLowerCase.replace(",", ", ");
final String[] rgbStringParts = rgbStringLowerCase
.substring(rgbStringLowerCase.indexOf('(') + 1,
rgbStringLowerCase.lastIndexOf(')'))
.split("[,]");
if (rgbStringParts.length == 3) {
r = Integer.parseInt(rgbStringParts[0]);
if (r < 0 || r > 255) {
throw new InvalidValueException(rgbStringLowerCase
+ " is not a valid rgb string. r value in rgbString should be inbetween 0 to 255, eg: rgb(255, 25, 15)");
}
g = Integer.parseInt(rgbStringParts[1]);
if (g < 0 || g > 255) {
throw new InvalidValueException(rgbStringLowerCase
+ " is not a valid rgb string. g value in rgbString should be inbetween 0 to 255, eg: rgb(255, 25, 15)");
}
b = Integer.parseInt(rgbStringParts[2]);
if (b < 0 || b > 255) {
throw new InvalidValueException(rgbStringLowerCase
+ " is not a valid rgb string. b value in rgbString should be inbetween 0 to 255, eg: rgb(255, 25, 15)");
}
this.rgb = rgb;
} else {
throw new InvalidValueException(rgbStringLowerCase
+ " is not a valid rgb string. It should be in the format of rgb(255, 25, 15)");
}
} else {
throw new InvalidValueException(rgbStringLowerCase
+ " is not a valid rgb string. It should be in the format of rgb(255, 25, 15)");
}
}
/**
* @param r
* red value. accepts values only from 0 to 255.
* @param g
* green value. accepts values only from 0 to 255.
* @param b
* blue value. accepts values only from 0 to 255.
*/
public RgbCssValue(final int r, final int g, final int b) {
super();
if ((r < 0 || r > 255) || (g < 0 || g > 255) || (b < 0 || b > 255)) {
throw new InvalidValueException(
"r, b and g paramater accept values only from 0 to 255.");
}
this.r = r;
this.g = g;
this.b = b;
rgb = "rgb(" + r + ", " + g + ", " + b + ")";
}
public int getR() {
return r;
}
public void setR(final int r) {
if (r < 0 || r > 255) {
throw new InvalidValueException(
"r paramater accept values only from 0 to 255.");
}
this.r = r;
rgb = "rgb(" + r + ", " + g + ", " + b + ")";
if (getStateChangeInformer() != null) {
getStateChangeInformer().stateChanged(this);
}
}
public int getG() {
return g;
}
public void setG(final int g) {
if (g < 0 || g > 255) {
throw new InvalidValueException(
"g paramater accept values only from 0 to 255.");
}
this.g = g;
rgb = "rgb(" + r + ", " + g + ", " + b + ")";
if (getStateChangeInformer() != null) {
getStateChangeInformer().stateChanged(this);
}
}
public int getB() {
return b;
}
public void setB(final int b) {
if (b < 0 || b > 255) {
throw new InvalidValueException(
"b paramater accept values only from 0 to 255.");
}
this.b = b;
rgb = "rgb(" + r + ", " + g + ", " + b + ")";
if (getStateChangeInformer() != null) {
getStateChangeInformer().stateChanged(this);
}
}
@Override
public String toString() {
return rgb;
}
/**
* @return the print format of these values as a css value.
* @since 1.0.0
* @author WFF
*/
@Override
public String getValue() {
return rgb;
}
/**
* @param rgbString
* eg:- rgb(25, 155, 55)
* @return true if valid and false for invalid.
* @since 1.0.0
* @author WFF
*/
public static boolean isValid(final String rgbString) {
try {
final String rgbStringLowerCase = rgbString.replace(" ", "")
.toLowerCase();
if (rgbStringLowerCase.startsWith("rgb(")
&& rgbStringLowerCase.contains(")")) {
final String[] rgbStringParts = rgbStringLowerCase
.substring(rgbStringLowerCase.indexOf('(') + 1,
rgbStringLowerCase.lastIndexOf(')'))
.split("[,]");
if (rgbStringParts.length == 3) {
final int r = Integer.parseInt(rgbStringParts[0]);
if (r < 0 || r > 255) {
return false;
}
final int g = Integer.parseInt(rgbStringParts[1]);
if (g < 0 || g > 255) {
return false;
}
final int b = Integer.parseInt(rgbStringParts[2]);
return !(b < 0 || b > 255);
}
return false;
}
} catch (final NumberFormatException e) {
}
return false;
}
}
|
wffweb/src/main/java/com/webfirmframework/wffweb/css/RgbCssValue.java
|
/*
* Copyright 2014-2016 Web Firm Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* @author WFF
*/
package com.webfirmframework.wffweb.css;
import com.webfirmframework.wffweb.InvalidValueException;
import com.webfirmframework.wffweb.data.AbstractBean;
/**
*
* @author WFF
* @since 1.0.0
*/
public class RgbCssValue extends AbstractBean<RgbCssValue> {
private static final long serialVersionUID = 1_0_0L;
private String rgb = "rgb(0, 0, 0)";
private int r = 0;
private int g = 0;
private int b = 0;
public RgbCssValue() {
}
/**
* @param rgbCssValue
* eg:- <code> rgb(15, 55, 155) </code>
*/
public RgbCssValue(final String rgbCssValue) {
super();
extractAndAssign(rgbCssValue);
}
public RgbCssValue(final RgbCssValue rgbCssValue) {
super();
extractAndAssign(rgbCssValue.getValue());
}
/**
* @param rgb
* eg:- <code> rgb(15, 55, 155) </code>
*
* @since 1.0.0
* @author WFF
*/
public void setRgb(final String rgb) {
extractAndAssign(rgb);
}
/**
* @param rgbString
* @since 1.0.0
* @author WFF
*/
private void extractAndAssign(final String rgbString) {
final String rgbStringLowerCase = rgbString.replace(" ", "")
.toLowerCase();
if (rgbStringLowerCase.startsWith("rgb(")
&& rgbStringLowerCase.contains(")")) {
final String rgb = rgbStringLowerCase.replace(",", ", ");
final String[] rgbStringParts = rgbStringLowerCase
.substring(rgbStringLowerCase.indexOf('(') + 1,
rgbStringLowerCase.lastIndexOf(')'))
.split("[,]");
if (rgbStringParts.length == 3) {
r = Integer.parseInt(rgbStringParts[0]);
if (r < 0 || r > 255) {
throw new InvalidValueException(rgbStringLowerCase
+ " is not a valid rgb string. r value in rgbString should be inbetween 0 to 255, eg: rgb(255, 25, 15)");
}
g = Integer.parseInt(rgbStringParts[1]);
if (g < 0 || g > 255) {
throw new InvalidValueException(rgbStringLowerCase
+ " is not a valid rgb string. g value in rgbString should be inbetween 0 to 255, eg: rgb(255, 25, 15)");
}
b = Integer.parseInt(rgbStringParts[2]);
if (b < 0 || b > 255) {
throw new InvalidValueException(rgbStringLowerCase
+ " is not a valid rgb string. b value in rgbString should be inbetween 0 to 255, eg: rgb(255, 25, 15)");
}
this.rgb = rgb;
} else {
throw new InvalidValueException(rgbStringLowerCase
+ " is not a valid rgb string. It should be in the format of rgb(255, 25, 15)");
}
} else {
throw new InvalidValueException(rgbStringLowerCase
+ " is not a valid rgb string. It should be in the format of rgb(255, 25, 15)");
}
}
/**
* @param r
* red value. accepts values only from 0 to 255.
* @param g
* green value. accepts values only from 0 to 255.
* @param b
* blue value. accepts values only from 0 to 255.
*/
public RgbCssValue(final int r, final int g, final int b) {
super();
if ((r < 0 || r > 255) || (g < 0 || g > 255) || (b < 0 || b > 255)) {
throw new InvalidValueException(
"r, b and g paramater accept values only from 0 to 255.");
}
this.r = r;
this.g = g;
this.b = b;
rgb = "rgb(" + r + ", " + g + ", " + b + ")";
}
public int getR() {
return r;
}
public void setR(final int r) {
if (r < 0 || r > 255) {
throw new InvalidValueException(
"r paramater accept values only from 0 to 255.");
}
this.r = r;
rgb = "rgb(" + r + ", " + g + ", " + b + ")";
if (getStateChangeInformer() != null) {
getStateChangeInformer().stateChanged(this);
}
}
public int getG() {
return g;
}
public void setG(final int g) {
if (g < 0 || g > 255) {
throw new InvalidValueException(
"g paramater accept values only from 0 to 255.");
}
this.g = g;
rgb = "rgb(" + r + ", " + g + ", " + b + ")";
if (getStateChangeInformer() != null) {
getStateChangeInformer().stateChanged(this);
}
}
public int getB() {
return b;
}
public void setB(final int b) {
if (b < 0 || b > 255) {
throw new InvalidValueException(
"b paramater accept values only from 0 to 255.");
}
this.b = b;
rgb = "rgb(" + r + ", " + g + ", " + b + ")";
if (getStateChangeInformer() != null) {
getStateChangeInformer().stateChanged(this);
}
}
@Override
public String toString() {
return rgb;
}
/**
* @return the print format of these values as a css value.
* @since 1.0.0
* @author WFF
*/
@Override
public String getValue() {
return rgb;
}
/**
* @param rgbString
* eg:- rgb(25, 155, 55)
* @return true if valid and false for invalid.
* @since 1.0.0
* @author WFF
*/
public static boolean isValid(String rgbString) {
try {
rgbString = rgbString.replace(" ", "").toLowerCase();
if (rgbString.startsWith("rgb(") && rgbString.contains(")")) {
final String[] rgbStringParts = rgbString
.substring(rgbString.indexOf('(') + 1,
rgbString.lastIndexOf(')'))
.split("[,]");
if (rgbStringParts.length == 3) {
final int r = Integer.parseInt(rgbStringParts[0]);
if (r < 0 || r > 255) {
return false;
}
final int g = Integer.parseInt(rgbStringParts[1]);
if (g < 0 || g > 255) {
return false;
}
final int b = Integer.parseInt(rgbStringParts[2]);
return !(b < 0 || b > 255);
}
return false;
}
} catch (final NumberFormatException e) {
}
return false;
}
}
|
Code improvement
Avoided reassigning parameters such as 'rgbString'
|
wffweb/src/main/java/com/webfirmframework/wffweb/css/RgbCssValue.java
|
Code improvement
|
|
Java
|
apache-2.0
|
d95420cc67aa25d12a9a320dd190bc3687ec3abf
| 0
|
geidies/elasticsearch,mkis-/elasticsearch,jbertouch/elasticsearch,sc0ttkclark/elasticsearch,kingaj/elasticsearch,coding0011/elasticsearch,aparo/elasticsearch,clintongormley/elasticsearch,dpursehouse/elasticsearch,rhoml/elasticsearch,mgalushka/elasticsearch,fernandozhu/elasticsearch,humandb/elasticsearch,koxa29/elasticsearch,zhiqinghuang/elasticsearch,fekaputra/elasticsearch,loconsolutions/elasticsearch,lzo/elasticsearch-1,hydro2k/elasticsearch,robin13/elasticsearch,umeshdangat/elasticsearch,queirozfcom/elasticsearch,Charlesdong/elasticsearch,cwurm/elasticsearch,marcuswr/elasticsearch-dateline,sscarduzio/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,strapdata/elassandra,naveenhooda2000/elasticsearch,LewayneNaidoo/elasticsearch,amaliujia/elasticsearch,girirajsharma/elasticsearch,kalburgimanjunath/elasticsearch,kaneshin/elasticsearch,Collaborne/elasticsearch,martinstuga/elasticsearch,LeoYao/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,s1monw/elasticsearch,jango2015/elasticsearch,strapdata/elassandra,nezirus/elasticsearch,gmarz/elasticsearch,fekaputra/elasticsearch,AleksKochev/elasticsearch,hanswang/elasticsearch,fred84/elasticsearch,nrkkalyan/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,geidies/elasticsearch,areek/elasticsearch,xuzha/elasticsearch,gingerwizard/elasticsearch,sauravmondallive/elasticsearch,Charlesdong/elasticsearch,vroyer/elassandra,nellicus/elasticsearch,nomoa/elasticsearch,hydro2k/elasticsearch,ckclark/elasticsearch,lchennup/elasticsearch,strapdata/elassandra5-rc,fekaputra/elasticsearch,hydro2k/elasticsearch,slavau/elasticsearch,wangyuxue/elasticsearch,jpountz/elasticsearch,Shekharrajak/elasticsearch,pablocastro/elasticsearch,Asimov4/elasticsearch,rmuir/elasticsearch,pranavraman/elasticsearch,nellicus/elasticsearch,schonfeld/elasticsearch,girirajsharma/elasticsearch,uboness/elasticsearch,Ansh90/elasticsearch,libosu/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,vvcephei/elasticsearch,mbrukman/elasticsearch,rlugojr/elasticsearch,liweinan0423/elasticsearch,vietlq/elasticsearch,Helen-Zhao/elasticsearch,libosu/elasticsearch,awislowski/elasticsearch,elasticdog/elasticsearch,18098924759/elasticsearch,achow/elasticsearch,lzo/elasticsearch-1,vrkansagara/elasticsearch,franklanganke/elasticsearch,fooljohnny/elasticsearch,Rygbee/elasticsearch,zhiqinghuang/elasticsearch,abibell/elasticsearch,mapr/elasticsearch,pritishppai/elasticsearch,masaruh/elasticsearch,mohit/elasticsearch,sarwarbhuiyan/elasticsearch,weipinghe/elasticsearch,sposam/elasticsearch,cwurm/elasticsearch,martinstuga/elasticsearch,likaiwalkman/elasticsearch,polyfractal/elasticsearch,mjason3/elasticsearch,andrejserafim/elasticsearch,gmarz/elasticsearch,tsohil/elasticsearch,jimczi/elasticsearch,mjason3/elasticsearch,gingerwizard/elasticsearch,camilojd/elasticsearch,TonyChai24/ESSource,sscarduzio/elasticsearch,Chhunlong/elasticsearch,ivansun1010/elasticsearch,clintongormley/elasticsearch,brandonkearby/elasticsearch,Asimov4/elasticsearch,JackyMai/elasticsearch,fubuki/elasticsearch,wimvds/elasticsearch,adrianbk/elasticsearch,ydsakyclguozi/elasticsearch,lks21c/elasticsearch,iacdingping/elasticsearch,sjohnr/elasticsearch,Flipkart/elasticsearch,elancom/elasticsearch,vingupta3/elasticsearch,jeteve/elasticsearch,wayeast/elasticsearch,kkirsche/elasticsearch,alexbrasetvik/elasticsearch,MjAbuz/elasticsearch,abibell/elasticsearch,sauravmondallive/elasticsearch,brwe/elasticsearch,beiske/elasticsearch,ulkas/elasticsearch,thecocce/elasticsearch,snikch/elasticsearch,davidvgalbraith/elasticsearch,gingerwizard/elasticsearch,rmuir/elasticsearch,EasonYi/elasticsearch,fubuki/elasticsearch,anti-social/elasticsearch,truemped/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,onegambler/elasticsearch,xuzha/elasticsearch,ydsakyclguozi/elasticsearch,yongminxia/elasticsearch,koxa29/elasticsearch,drewr/elasticsearch,umeshdangat/elasticsearch,acchen97/elasticsearch,ydsakyclguozi/elasticsearch,karthikjaps/elasticsearch,mikemccand/elasticsearch,aglne/elasticsearch,andrestc/elasticsearch,Helen-Zhao/elasticsearch,ImpressTV/elasticsearch,LewayneNaidoo/elasticsearch,HarishAtGitHub/elasticsearch,chrismwendt/elasticsearch,AndreKR/elasticsearch,YosuaMichael/elasticsearch,polyfractal/elasticsearch,mmaracic/elasticsearch,luiseduardohdbackup/elasticsearch,huanzhong/elasticsearch,jbertouch/elasticsearch,rmuir/elasticsearch,naveenhooda2000/elasticsearch,Kakakakakku/elasticsearch,petabytedata/elasticsearch,jimhooker2002/elasticsearch,wimvds/elasticsearch,elancom/elasticsearch,socialrank/elasticsearch,djschny/elasticsearch,hafkensite/elasticsearch,acchen97/elasticsearch,szroland/elasticsearch,djschny/elasticsearch,Charlesdong/elasticsearch,aparo/elasticsearch,MjAbuz/elasticsearch,fred84/elasticsearch,jango2015/elasticsearch,JackyMai/elasticsearch,Siddartha07/elasticsearch,lzo/elasticsearch-1,sc0ttkclark/elasticsearch,cnfire/elasticsearch-1,palecur/elasticsearch,elasticdog/elasticsearch,ThiagoGarciaAlves/elasticsearch,kingaj/elasticsearch,Shepard1212/elasticsearch,mgalushka/elasticsearch,wayeast/elasticsearch,vorce/es-metrics,andrestc/elasticsearch,tahaemin/elasticsearch,thecocce/elasticsearch,gfyoung/elasticsearch,masterweb121/elasticsearch,wittyameta/elasticsearch,milodky/elasticsearch,nazarewk/elasticsearch,lzo/elasticsearch-1,ThalaivaStars/OrgRepo1,MaineC/elasticsearch,mikemccand/elasticsearch,mm0/elasticsearch,henakamaMSFT/elasticsearch,uboness/elasticsearch,markwalkom/elasticsearch,amaliujia/elasticsearch,wbowling/elasticsearch,opendatasoft/elasticsearch,zeroctu/elasticsearch,SergVro/elasticsearch,Brijeshrpatel9/elasticsearch,boliza/elasticsearch,jimczi/elasticsearch,VukDukic/elasticsearch,Chhunlong/elasticsearch,martinstuga/elasticsearch,MisterAndersen/elasticsearch,Clairebi/ElasticsearchClone,scorpionvicky/elasticsearch,nezirus/elasticsearch,obourgain/elasticsearch,golubev/elasticsearch,karthikjaps/elasticsearch,springning/elasticsearch,kevinkluge/elasticsearch,JSCooke/elasticsearch,yongminxia/elasticsearch,LeoYao/elasticsearch,KimTaehee/elasticsearch,rhoml/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,strapdata/elassandra,jprante/elasticsearch,kunallimaye/elasticsearch,MichaelLiZhou/elasticsearch,xingguang2013/elasticsearch,iamjakob/elasticsearch,scottsom/elasticsearch,cnfire/elasticsearch-1,kkirsche/elasticsearch,fubuki/elasticsearch,sdauletau/elasticsearch,jaynblue/elasticsearch,dongjoon-hyun/elasticsearch,koxa29/elasticsearch,tahaemin/elasticsearch,MetSystem/elasticsearch,luiseduardohdbackup/elasticsearch,huypx1292/elasticsearch,rajanm/elasticsearch,thecocce/elasticsearch,markllama/elasticsearch,kcompher/elasticsearch,apepper/elasticsearch,iamjakob/elasticsearch,kingaj/elasticsearch,sdauletau/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,nrkkalyan/elasticsearch,lmtwga/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,ivansun1010/elasticsearch,wittyameta/elasticsearch,jimhooker2002/elasticsearch,maddin2016/elasticsearch,huypx1292/elasticsearch,masaruh/elasticsearch,btiernay/elasticsearch,nomoa/elasticsearch,bawse/elasticsearch,lightslife/elasticsearch,i-am-Nathan/elasticsearch,drewr/elasticsearch,strapdata/elassandra-test,pablocastro/elasticsearch,mohsinh/elasticsearch,yuy168/elasticsearch,jsgao0/elasticsearch,ydsakyclguozi/elasticsearch,hanswang/elasticsearch,fernandozhu/elasticsearch,glefloch/elasticsearch,sarwarbhuiyan/elasticsearch,markharwood/elasticsearch,kalburgimanjunath/elasticsearch,markharwood/elasticsearch,areek/elasticsearch,iacdingping/elasticsearch,knight1128/elasticsearch,overcome/elasticsearch,trangvh/elasticsearch,obourgain/elasticsearch,sarwarbhuiyan/elasticsearch,codebunt/elasticsearch,xuzha/elasticsearch,YosuaMichael/elasticsearch,elasticdog/elasticsearch,NBSW/elasticsearch,Collaborne/elasticsearch,lmtwga/elasticsearch,AndreKR/elasticsearch,xpandan/elasticsearch,jeteve/elasticsearch,beiske/elasticsearch,YosuaMichael/elasticsearch,vrkansagara/elasticsearch,ImpressTV/elasticsearch,hydro2k/elasticsearch,tahaemin/elasticsearch,nknize/elasticsearch,Uiho/elasticsearch,brwe/elasticsearch,marcuswr/elasticsearch-dateline,tcucchietti/elasticsearch,easonC/elasticsearch,golubev/elasticsearch,yuy168/elasticsearch,nknize/elasticsearch,peschlowp/elasticsearch,mm0/elasticsearch,mmaracic/elasticsearch,anti-social/elasticsearch,ricardocerq/elasticsearch,HonzaKral/elasticsearch,fred84/elasticsearch,xingguang2013/elasticsearch,yynil/elasticsearch,rmuir/elasticsearch,ulkas/elasticsearch,smflorentino/elasticsearch,ThiagoGarciaAlves/elasticsearch,Kakakakakku/elasticsearch,a2lin/elasticsearch,iacdingping/elasticsearch,opendatasoft/elasticsearch,kunallimaye/elasticsearch,golubev/elasticsearch,mohsinh/elasticsearch,abibell/elasticsearch,socialrank/elasticsearch,NBSW/elasticsearch,mm0/elasticsearch,ZTE-PaaS/elasticsearch,jchampion/elasticsearch,JervyShi/elasticsearch,umeshdangat/elasticsearch,slavau/elasticsearch,achow/elasticsearch,ulkas/elasticsearch,franklanganke/elasticsearch,yanjunh/elasticsearch,Uiho/elasticsearch,Liziyao/elasticsearch,vroyer/elassandra,djschny/elasticsearch,hechunwen/elasticsearch,xingguang2013/elasticsearch,koxa29/elasticsearch,MjAbuz/elasticsearch,qwerty4030/elasticsearch,ThalaivaStars/OrgRepo1,hechunwen/elasticsearch,anti-social/elasticsearch,scottsom/elasticsearch,queirozfcom/elasticsearch,smflorentino/elasticsearch,andrestc/elasticsearch,iantruslove/elasticsearch,uboness/elasticsearch,davidvgalbraith/elasticsearch,chrismwendt/elasticsearch,Helen-Zhao/elasticsearch,linglaiyao1314/elasticsearch,mohsinh/elasticsearch,weipinghe/elasticsearch,cwurm/elasticsearch,petabytedata/elasticsearch,MisterAndersen/elasticsearch,feiqitian/elasticsearch,jimhooker2002/elasticsearch,Widen/elasticsearch,huanzhong/elasticsearch,ivansun1010/elasticsearch,Widen/elasticsearch,mm0/elasticsearch,micpalmia/elasticsearch,jaynblue/elasticsearch,smflorentino/elasticsearch,alexshadow007/elasticsearch,mrorii/elasticsearch,bestwpw/elasticsearch,winstonewert/elasticsearch,janmejay/elasticsearch,tebriel/elasticsearch,abibell/elasticsearch,zhaocloud/elasticsearch,tcucchietti/elasticsearch,gingerwizard/elasticsearch,chirilo/elasticsearch,Widen/elasticsearch,vvcephei/elasticsearch,hafkensite/elasticsearch,kingaj/elasticsearch,hanswang/elasticsearch,vvcephei/elasticsearch,amit-shar/elasticsearch,jango2015/elasticsearch,fforbeck/elasticsearch,bestwpw/elasticsearch,Brijeshrpatel9/elasticsearch,davidvgalbraith/elasticsearch,hirdesh2008/elasticsearch,wenpos/elasticsearch,mute/elasticsearch,VukDukic/elasticsearch,xingguang2013/elasticsearch,Shekharrajak/elasticsearch,camilojd/elasticsearch,zhiqinghuang/elasticsearch,HonzaKral/elasticsearch,nazarewk/elasticsearch,mgalushka/elasticsearch,sneivandt/elasticsearch,abibell/elasticsearch,snikch/elasticsearch,phani546/elasticsearch,phani546/elasticsearch,mnylen/elasticsearch,iacdingping/elasticsearch,JSCooke/elasticsearch,sc0ttkclark/elasticsearch,pozhidaevak/elasticsearch,zkidkid/elasticsearch,Uiho/elasticsearch,easonC/elasticsearch,MetSystem/elasticsearch,mute/elasticsearch,cwurm/elasticsearch,mnylen/elasticsearch,markharwood/elasticsearch,sarwarbhuiyan/elasticsearch,diendt/elasticsearch,skearns64/elasticsearch,alexksikes/elasticsearch,pritishppai/elasticsearch,himanshuag/elasticsearch,khiraiwa/elasticsearch,cnfire/elasticsearch-1,caengcjd/elasticsearch,TonyChai24/ESSource,fooljohnny/elasticsearch,MetSystem/elasticsearch,vingupta3/elasticsearch,beiske/elasticsearch,sjohnr/elasticsearch,ricardocerq/elasticsearch,ESamir/elasticsearch,s1monw/elasticsearch,queirozfcom/elasticsearch,raishiv/elasticsearch,mortonsykes/elasticsearch,mikemccand/elasticsearch,JSCooke/elasticsearch,wenpos/elasticsearch,andrejserafim/elasticsearch,PhaedrusTheGreek/elasticsearch,diendt/elasticsearch,PhaedrusTheGreek/elasticsearch,kenshin233/elasticsearch,mohsinh/elasticsearch,jeteve/elasticsearch,F0lha/elasticsearch,gingerwizard/elasticsearch,dpursehouse/elasticsearch,sreeramjayan/elasticsearch,micpalmia/elasticsearch,gingerwizard/elasticsearch,sposam/elasticsearch,MichaelLiZhou/elasticsearch,lchennup/elasticsearch,Stacey-Gammon/elasticsearch,ulkas/elasticsearch,jaynblue/elasticsearch,winstonewert/elasticsearch,alexkuk/elasticsearch,sreeramjayan/elasticsearch,tkssharma/elasticsearch,mnylen/elasticsearch,EasonYi/elasticsearch,LewayneNaidoo/elasticsearch,adrianbk/elasticsearch,jpountz/elasticsearch,kimimj/elasticsearch,pranavraman/elasticsearch,feiqitian/elasticsearch,apepper/elasticsearch,palecur/elasticsearch,andrejserafim/elasticsearch,ajhalani/elasticsearch,alexksikes/elasticsearch,nrkkalyan/elasticsearch,Kakakakakku/elasticsearch,aparo/elasticsearch,mjason3/elasticsearch,Widen/elasticsearch,javachengwc/elasticsearch,alexshadow007/elasticsearch,kubum/elasticsearch,abhijitiitr/es,jsgao0/elasticsearch,kubum/elasticsearch,aglne/elasticsearch,nellicus/elasticsearch,wangtuo/elasticsearch,vroyer/elassandra,LeoYao/elasticsearch,caengcjd/elasticsearch,bawse/elasticsearch,AndreKR/elasticsearch,strapdata/elassandra-test,JackyMai/elasticsearch,adrianbk/elasticsearch,petmit/elasticsearch,opendatasoft/elasticsearch,geidies/elasticsearch,xingguang2013/elasticsearch,ZTE-PaaS/elasticsearch,boliza/elasticsearch,heng4fun/elasticsearch,jchampion/elasticsearch,wuranbo/elasticsearch,loconsolutions/elasticsearch,tsohil/elasticsearch,yynil/elasticsearch,aparo/elasticsearch,winstonewert/elasticsearch,wimvds/elasticsearch,fernandozhu/elasticsearch,yuy168/elasticsearch,lightslife/elasticsearch,janmejay/elasticsearch,humandb/elasticsearch,kkirsche/elasticsearch,himanshuag/elasticsearch,myelin/elasticsearch,henakamaMSFT/elasticsearch,aglne/elasticsearch,winstonewert/elasticsearch,hechunwen/elasticsearch,vietlq/elasticsearch,huanzhong/elasticsearch,kimimj/elasticsearch,wimvds/elasticsearch,Microsoft/elasticsearch,infusionsoft/elasticsearch,naveenhooda2000/elasticsearch,vietlq/elasticsearch,girirajsharma/elasticsearch,vingupta3/elasticsearch,socialrank/elasticsearch,naveenhooda2000/elasticsearch,F0lha/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,schonfeld/elasticsearch,caengcjd/elasticsearch,ouyangkongtong/elasticsearch,KimTaehee/elasticsearch,alexbrasetvik/elasticsearch,vvcephei/elasticsearch,sdauletau/elasticsearch,vingupta3/elasticsearch,ricardocerq/elasticsearch,kenshin233/elasticsearch,MaineC/elasticsearch,masterweb121/elasticsearch,salyh/elasticsearch,F0lha/elasticsearch,AshishThakur/elasticsearch,ajhalani/elasticsearch,iacdingping/elasticsearch,ImpressTV/elasticsearch,jeteve/elasticsearch,peschlowp/elasticsearch,NBSW/elasticsearch,spiegela/elasticsearch,MjAbuz/elasticsearch,alexbrasetvik/elasticsearch,aglne/elasticsearch,huanzhong/elasticsearch,Flipkart/elasticsearch,hanst/elasticsearch,sneivandt/elasticsearch,F0lha/elasticsearch,combinatorist/elasticsearch,LeoYao/elasticsearch,YosuaMichael/elasticsearch,coding0011/elasticsearch,ydsakyclguozi/elasticsearch,jsgao0/elasticsearch,dpursehouse/elasticsearch,SergVro/elasticsearch,janmejay/elasticsearch,kevinkluge/elasticsearch,dpursehouse/elasticsearch,strapdata/elassandra-test,C-Bish/elasticsearch,JackyMai/elasticsearch,TonyChai24/ESSource,achow/elasticsearch,huypx1292/elasticsearch,kenshin233/elasticsearch,rmuir/elasticsearch,linglaiyao1314/elasticsearch,mute/elasticsearch,palecur/elasticsearch,naveenhooda2000/elasticsearch,diendt/elasticsearch,wittyameta/elasticsearch,luiseduardohdbackup/elasticsearch,milodky/elasticsearch,kaneshin/elasticsearch,tebriel/elasticsearch,MichaelLiZhou/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,Widen/elasticsearch,huypx1292/elasticsearch,rajanm/elasticsearch,Flipkart/elasticsearch,jimhooker2002/elasticsearch,chirilo/elasticsearch,iantruslove/elasticsearch,Charlesdong/elasticsearch,hirdesh2008/elasticsearch,jw0201/elastic,kalburgimanjunath/elasticsearch,strapdata/elassandra-test,truemped/elasticsearch,i-am-Nathan/elasticsearch,wangtuo/elasticsearch,kubum/elasticsearch,ThiagoGarciaAlves/elasticsearch,GlenRSmith/elasticsearch,ESamir/elasticsearch,MjAbuz/elasticsearch,ivansun1010/elasticsearch,zeroctu/elasticsearch,hirdesh2008/elasticsearch,bestwpw/elasticsearch,davidvgalbraith/elasticsearch,18098924759/elasticsearch,amaliujia/elasticsearch,jchampion/elasticsearch,petmit/elasticsearch,zhaocloud/elasticsearch,kcompher/elasticsearch,zeroctu/elasticsearch,brandonkearby/elasticsearch,jaynblue/elasticsearch,trangvh/elasticsearch,nezirus/elasticsearch,TonyChai24/ESSource,milodky/elasticsearch,aparo/elasticsearch,abhijitiitr/es,elasticdog/elasticsearch,truemped/elasticsearch,geidies/elasticsearch,winstonewert/elasticsearch,EasonYi/elasticsearch,lydonchandra/elasticsearch,acchen97/elasticsearch,mapr/elasticsearch,rajanm/elasticsearch,mikemccand/elasticsearch,lightslife/elasticsearch,dylan8902/elasticsearch,artnowo/elasticsearch,mikemccand/elasticsearch,mrorii/elasticsearch,kubum/elasticsearch,yynil/elasticsearch,zkidkid/elasticsearch,jbertouch/elasticsearch,wayeast/elasticsearch,MaineC/elasticsearch,jw0201/elastic,scottsom/elasticsearch,PhaedrusTheGreek/elasticsearch,yuy168/elasticsearch,ouyangkongtong/elasticsearch,elancom/elasticsearch,linglaiyao1314/elasticsearch,mrorii/elasticsearch,HarishAtGitHub/elasticsearch,wittyameta/elasticsearch,ulkas/elasticsearch,nilabhsagar/elasticsearch,infusionsoft/elasticsearch,fekaputra/elasticsearch,HonzaKral/elasticsearch,rento19962/elasticsearch,achow/elasticsearch,gmarz/elasticsearch,btiernay/elasticsearch,springning/elasticsearch,vroyer/elasticassandra,TonyChai24/ESSource,Shepard1212/elasticsearch,combinatorist/elasticsearch,iamjakob/elasticsearch,alexbrasetvik/elasticsearch,kcompher/elasticsearch,ImpressTV/elasticsearch,cnfire/elasticsearch-1,Brijeshrpatel9/elasticsearch,ricardocerq/elasticsearch,thecocce/elasticsearch,JSCooke/elasticsearch,rajanm/elasticsearch,iamjakob/elasticsearch,nazarewk/elasticsearch,mute/elasticsearch,nrkkalyan/elasticsearch,janmejay/elasticsearch,sdauletau/elasticsearch,kunallimaye/elasticsearch,Ansh90/elasticsearch,kenshin233/elasticsearch,karthikjaps/elasticsearch,franklanganke/elasticsearch,hanst/elasticsearch,maddin2016/elasticsearch,IanvsPoplicola/elasticsearch,robin13/elasticsearch,HarishAtGitHub/elasticsearch,markllama/elasticsearch,shreejay/elasticsearch,vrkansagara/elasticsearch,btiernay/elasticsearch,sdauletau/elasticsearch,uschindler/elasticsearch,salyh/elasticsearch,himanshuag/elasticsearch,tsohil/elasticsearch,luiseduardohdbackup/elasticsearch,strapdata/elassandra-test,geidies/elasticsearch,vvcephei/elasticsearch,LeoYao/elasticsearch,xpandan/elasticsearch,ckclark/elasticsearch,njlawton/elasticsearch,Brijeshrpatel9/elasticsearch,mcku/elasticsearch,dataduke/elasticsearch,queirozfcom/elasticsearch,opendatasoft/elasticsearch,mrorii/elasticsearch,StefanGor/elasticsearch,weipinghe/elasticsearch,heng4fun/elasticsearch,Liziyao/elasticsearch,NBSW/elasticsearch,masaruh/elasticsearch,snikch/elasticsearch,sjohnr/elasticsearch,sauravmondallive/elasticsearch,chirilo/elasticsearch,MichaelLiZhou/elasticsearch,peschlowp/elasticsearch,pozhidaevak/elasticsearch,pranavraman/elasticsearch,hanst/elasticsearch,tkssharma/elasticsearch,nilabhsagar/elasticsearch,ckclark/elasticsearch,scorpionvicky/elasticsearch,C-Bish/elasticsearch,jprante/elasticsearch,jprante/elasticsearch,yuy168/elasticsearch,mbrukman/elasticsearch,wangyuxue/elasticsearch,phani546/elasticsearch,himanshuag/elasticsearch,ThiagoGarciaAlves/elasticsearch,scottsom/elasticsearch,socialrank/elasticsearch,fubuki/elasticsearch,trangvh/elasticsearch,szroland/elasticsearch,petabytedata/elasticsearch,heng4fun/elasticsearch,kubum/elasticsearch,markwalkom/elasticsearch,dongjoon-hyun/elasticsearch,vorce/es-metrics,overcome/elasticsearch,Brijeshrpatel9/elasticsearch,Shepard1212/elasticsearch,acchen97/elasticsearch,nrkkalyan/elasticsearch,hydro2k/elasticsearch,hanst/elasticsearch,Shekharrajak/elasticsearch,humandb/elasticsearch,acchen97/elasticsearch,javachengwc/elasticsearch,nellicus/elasticsearch,vietlq/elasticsearch,Fsero/elasticsearch,mute/elasticsearch,truemped/elasticsearch,kingaj/elasticsearch,sc0ttkclark/elasticsearch,fooljohnny/elasticsearch,qwerty4030/elasticsearch,KimTaehee/elasticsearch,tsohil/elasticsearch,yongminxia/elasticsearch,lks21c/elasticsearch,henakamaMSFT/elasticsearch,davidvgalbraith/elasticsearch,wimvds/elasticsearch,StefanGor/elasticsearch,mcku/elasticsearch,Siddartha07/elasticsearch,ulkas/elasticsearch,alexshadow007/elasticsearch,springning/elasticsearch,lzo/elasticsearch-1,shreejay/elasticsearch,mkis-/elasticsearch,PhaedrusTheGreek/elasticsearch,obourgain/elasticsearch,petabytedata/elasticsearch,tcucchietti/elasticsearch,djschny/elasticsearch,pablocastro/elasticsearch,kevinkluge/elasticsearch,Rygbee/elasticsearch,acchen97/elasticsearch,nrkkalyan/elasticsearch,mm0/elasticsearch,loconsolutions/elasticsearch,jpountz/elasticsearch,apepper/elasticsearch,sposam/elasticsearch,kingaj/elasticsearch,awislowski/elasticsearch,bawse/elasticsearch,hanst/elasticsearch,vrkansagara/elasticsearch,Fsero/elasticsearch,kunallimaye/elasticsearch,fooljohnny/elasticsearch,maddin2016/elasticsearch,sposam/elasticsearch,abhijitiitr/es,pozhidaevak/elasticsearch,opendatasoft/elasticsearch,Ansh90/elasticsearch,brwe/elasticsearch,sscarduzio/elasticsearch,mnylen/elasticsearch,hafkensite/elasticsearch,NBSW/elasticsearch,vroyer/elasticassandra,robin13/elasticsearch,MichaelLiZhou/elasticsearch,Kakakakakku/elasticsearch,djschny/elasticsearch,chirilo/elasticsearch,andrejserafim/elasticsearch,tebriel/elasticsearch,raishiv/elasticsearch,Ansh90/elasticsearch,kcompher/elasticsearch,lchennup/elasticsearch,truemped/elasticsearch,overcome/elasticsearch,Flipkart/elasticsearch,bestwpw/elasticsearch,jpountz/elasticsearch,hechunwen/elasticsearch,dantuffery/elasticsearch,camilojd/elasticsearch,rhoml/elasticsearch,nilabhsagar/elasticsearch,yuy168/elasticsearch,pritishppai/elasticsearch,scottsom/elasticsearch,sscarduzio/elasticsearch,masterweb121/elasticsearch,skearns64/elasticsearch,micpalmia/elasticsearch,masterweb121/elasticsearch,pritishppai/elasticsearch,ouyangkongtong/elasticsearch,anti-social/elasticsearch,rhoml/elasticsearch,adrianbk/elasticsearch,mgalushka/elasticsearch,yongminxia/elasticsearch,i-am-Nathan/elasticsearch,overcome/elasticsearch,beiske/elasticsearch,drewr/elasticsearch,hechunwen/elasticsearch,alexksikes/elasticsearch,ivansun1010/elasticsearch,codebunt/elasticsearch,Uiho/elasticsearch,rhoml/elasticsearch,karthikjaps/elasticsearch,ckclark/elasticsearch,knight1128/elasticsearch,mohit/elasticsearch,SergVro/elasticsearch,raishiv/elasticsearch,onegambler/elasticsearch,wenpos/elasticsearch,ajhalani/elasticsearch,franklanganke/elasticsearch,amit-shar/elasticsearch,elancom/elasticsearch,MichaelLiZhou/elasticsearch,bestwpw/elasticsearch,jchampion/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,Collaborne/elasticsearch,infusionsoft/elasticsearch,s1monw/elasticsearch,kaneshin/elasticsearch,Chhunlong/elasticsearch,sjohnr/elasticsearch,mkis-/elasticsearch,mjhennig/elasticsearch,jimczi/elasticsearch,Shekharrajak/elasticsearch,Liziyao/elasticsearch,knight1128/elasticsearch,alexkuk/elasticsearch,beiske/elasticsearch,uschindler/elasticsearch,mute/elasticsearch,lmtwga/elasticsearch,overcome/elasticsearch,C-Bish/elasticsearch,sjohnr/elasticsearch,jsgao0/elasticsearch,Widen/elasticsearch,ESamir/elasticsearch,marcuswr/elasticsearch-dateline,myelin/elasticsearch,jw0201/elastic,tebriel/elasticsearch,a2lin/elasticsearch,humandb/elasticsearch,rlugojr/elasticsearch,ajhalani/elasticsearch,PhaedrusTheGreek/elasticsearch,hanswang/elasticsearch,GlenRSmith/elasticsearch,iantruslove/elasticsearch,zhaocloud/elasticsearch,franklanganke/elasticsearch,LewayneNaidoo/elasticsearch,tsohil/elasticsearch,chrismwendt/elasticsearch,pablocastro/elasticsearch,yynil/elasticsearch,kalimatas/elasticsearch,apepper/elasticsearch,nomoa/elasticsearch,tahaemin/elasticsearch,clintongormley/elasticsearch,iantruslove/elasticsearch,elancom/elasticsearch,shreejay/elasticsearch,huypx1292/elasticsearch,szroland/elasticsearch,Fsero/elasticsearch,AshishThakur/elasticsearch,achow/elasticsearch,Flipkart/elasticsearch,episerver/elasticsearch,rento19962/elasticsearch,tahaemin/elasticsearch,rento19962/elasticsearch,fekaputra/elasticsearch,javachengwc/elasticsearch,zeroctu/elasticsearch,obourgain/elasticsearch,xpandan/elasticsearch,likaiwalkman/elasticsearch,mjhennig/elasticsearch,kalimatas/elasticsearch,khiraiwa/elasticsearch,chirilo/elasticsearch,maddin2016/elasticsearch,mgalushka/elasticsearch,pablocastro/elasticsearch,sarwarbhuiyan/elasticsearch,LewayneNaidoo/elasticsearch,janmejay/elasticsearch,mcku/elasticsearch,yongminxia/elasticsearch,mohit/elasticsearch,wuranbo/elasticsearch,heng4fun/elasticsearch,AndreKR/elasticsearch,pranavraman/elasticsearch,YosuaMichael/elasticsearch,hafkensite/elasticsearch,mute/elasticsearch,cnfire/elasticsearch-1,libosu/elasticsearch,Asimov4/elasticsearch,markharwood/elasticsearch,jbertouch/elasticsearch,TonyChai24/ESSource,jsgao0/elasticsearch,markllama/elasticsearch,liweinan0423/elasticsearch,sc0ttkclark/elasticsearch,lydonchandra/elasticsearch,vvcephei/elasticsearch,Clairebi/ElasticsearchClone,lightslife/elasticsearch,khiraiwa/elasticsearch,rhoml/elasticsearch,ThalaivaStars/OrgRepo1,adrianbk/elasticsearch,polyfractal/elasticsearch,fooljohnny/elasticsearch,dataduke/elasticsearch,feiqitian/elasticsearch,jimhooker2002/elasticsearch,caengcjd/elasticsearch,fforbeck/elasticsearch,Stacey-Gammon/elasticsearch,mmaracic/elasticsearch,sarwarbhuiyan/elasticsearch,spiegela/elasticsearch,andrestc/elasticsearch,amaliujia/elasticsearch,Collaborne/elasticsearch,Collaborne/elasticsearch,queirozfcom/elasticsearch,hanswang/elasticsearch,jango2015/elasticsearch,ZTE-PaaS/elasticsearch,snikch/elasticsearch,Chhunlong/elasticsearch,xpandan/elasticsearch,wimvds/elasticsearch,rlugojr/elasticsearch,EasonYi/elasticsearch,dataduke/elasticsearch,loconsolutions/elasticsearch,Asimov4/elasticsearch,lks21c/elasticsearch,Charlesdong/elasticsearch,ulkas/elasticsearch,episerver/elasticsearch,bawse/elasticsearch,GlenRSmith/elasticsearch,kimimj/elasticsearch,EasonYi/elasticsearch,slavau/elasticsearch,mjason3/elasticsearch,andrejserafim/elasticsearch,wenpos/elasticsearch,loconsolutions/elasticsearch,zhiqinghuang/elasticsearch,rlugojr/elasticsearch,masterweb121/elasticsearch,bawse/elasticsearch,Uiho/elasticsearch,fforbeck/elasticsearch,Brijeshrpatel9/elasticsearch,petabytedata/elasticsearch,MjAbuz/elasticsearch,vorce/es-metrics,ckclark/elasticsearch,huanzhong/elasticsearch,VukDukic/elasticsearch,njlawton/elasticsearch,C-Bish/elasticsearch,HonzaKral/elasticsearch,snikch/elasticsearch,wittyameta/elasticsearch,slavau/elasticsearch,weipinghe/elasticsearch,njlawton/elasticsearch,nellicus/elasticsearch,jchampion/elasticsearch,boliza/elasticsearch,sscarduzio/elasticsearch,MetSystem/elasticsearch,himanshuag/elasticsearch,loconsolutions/elasticsearch,drewr/elasticsearch,alexkuk/elasticsearch,ESamir/elasticsearch,phani546/elasticsearch,Brijeshrpatel9/elasticsearch,lmtwga/elasticsearch,Ansh90/elasticsearch,markwalkom/elasticsearch,HarishAtGitHub/elasticsearch,mcku/elasticsearch,luiseduardohdbackup/elasticsearch,yongminxia/elasticsearch,amit-shar/elasticsearch,petmit/elasticsearch,nazarewk/elasticsearch,Clairebi/ElasticsearchClone,libosu/elasticsearch,Stacey-Gammon/elasticsearch,javachengwc/elasticsearch,mjason3/elasticsearch,mapr/elasticsearch,YosuaMichael/elasticsearch,dongjoon-hyun/elasticsearch,MetSystem/elasticsearch,rajanm/elasticsearch,s1monw/elasticsearch,artnowo/elasticsearch,areek/elasticsearch,kaneshin/elasticsearch,jpountz/elasticsearch,smflorentino/elasticsearch,mbrukman/elasticsearch,xuzha/elasticsearch,heng4fun/elasticsearch,uschindler/elasticsearch,Chhunlong/elasticsearch,libosu/elasticsearch,Collaborne/elasticsearch,socialrank/elasticsearch,dantuffery/elasticsearch,Liziyao/elasticsearch,kalimatas/elasticsearch,kubum/elasticsearch,JSCooke/elasticsearch,Fsero/elasticsearch,dpursehouse/elasticsearch,umeshdangat/elasticsearch,khiraiwa/elasticsearch,Charlesdong/elasticsearch,lmtwga/elasticsearch,tkssharma/elasticsearch,i-am-Nathan/elasticsearch,easonC/elasticsearch,vingupta3/elasticsearch,hanswang/elasticsearch,amit-shar/elasticsearch,socialrank/elasticsearch,elancom/elasticsearch,boliza/elasticsearch,Liziyao/elasticsearch,szroland/elasticsearch,likaiwalkman/elasticsearch,kcompher/elasticsearch,mnylen/elasticsearch,Clairebi/ElasticsearchClone,mrorii/elasticsearch,cnfire/elasticsearch-1,tebriel/elasticsearch,franklanganke/elasticsearch,caengcjd/elasticsearch,areek/elasticsearch,AshishThakur/elasticsearch,JervyShi/elasticsearch,yongminxia/elasticsearch,Helen-Zhao/elasticsearch,hanswang/elasticsearch,apepper/elasticsearch,petmit/elasticsearch,linglaiyao1314/elasticsearch,LeoYao/elasticsearch,hafkensite/elasticsearch,Rygbee/elasticsearch,dongjoon-hyun/elasticsearch,petabytedata/elasticsearch,iacdingping/elasticsearch,EasonYi/elasticsearch,knight1128/elasticsearch,feiqitian/elasticsearch,dylan8902/elasticsearch,likaiwalkman/elasticsearch,nazarewk/elasticsearch,SergVro/elasticsearch,feiqitian/elasticsearch,coding0011/elasticsearch,pranavraman/elasticsearch,linglaiyao1314/elasticsearch,sreeramjayan/elasticsearch,clintongormley/elasticsearch,AshishThakur/elasticsearch,fernandozhu/elasticsearch,ckclark/elasticsearch,nknize/elasticsearch,kevinkluge/elasticsearch,gfyoung/elasticsearch,mortonsykes/elasticsearch,ThalaivaStars/OrgRepo1,MisterAndersen/elasticsearch,rlugojr/elasticsearch,pritishppai/elasticsearch,JervyShi/elasticsearch,jchampion/elasticsearch,sreeramjayan/elasticsearch,brwe/elasticsearch,vietlq/elasticsearch,mkis-/elasticsearch,alexksikes/elasticsearch,jeteve/elasticsearch,alexkuk/elasticsearch,milodky/elasticsearch,camilojd/elasticsearch,nezirus/elasticsearch,rajanm/elasticsearch,Rygbee/elasticsearch,btiernay/elasticsearch,MjAbuz/elasticsearch,awislowski/elasticsearch,lightslife/elasticsearch,lchennup/elasticsearch,Microsoft/elasticsearch,kalimatas/elasticsearch,davidvgalbraith/elasticsearch,gfyoung/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,pozhidaevak/elasticsearch,brandonkearby/elasticsearch,knight1128/elasticsearch,djschny/elasticsearch,zeroctu/elasticsearch,Microsoft/elasticsearch,Shepard1212/elasticsearch,karthikjaps/elasticsearch,dataduke/elasticsearch,fekaputra/elasticsearch,apepper/elasticsearch,IanvsPoplicola/elasticsearch,kaneshin/elasticsearch,Microsoft/elasticsearch,alexbrasetvik/elasticsearch,humandb/elasticsearch,mnylen/elasticsearch,lightslife/elasticsearch,Siddartha07/elasticsearch,kkirsche/elasticsearch,abibell/elasticsearch,jaynblue/elasticsearch,gfyoung/elasticsearch,18098924759/elasticsearch,kalburgimanjunath/elasticsearch,nomoa/elasticsearch,kimimj/elasticsearch,ESamir/elasticsearch,ivansun1010/elasticsearch,myelin/elasticsearch,glefloch/elasticsearch,kalimatas/elasticsearch,C-Bish/elasticsearch,janmejay/elasticsearch,kenshin233/elasticsearch,mmaracic/elasticsearch,avikurapati/elasticsearch,mohit/elasticsearch,F0lha/elasticsearch,linglaiyao1314/elasticsearch,codebunt/elasticsearch,aglne/elasticsearch,wayeast/elasticsearch,schonfeld/elasticsearch,djschny/elasticsearch,amit-shar/elasticsearch,artnowo/elasticsearch,wuranbo/elasticsearch,mapr/elasticsearch,kenshin233/elasticsearch,clintongormley/elasticsearch,iantruslove/elasticsearch,kevinkluge/elasticsearch,mjhennig/elasticsearch,a2lin/elasticsearch,vietlq/elasticsearch,Stacey-Gammon/elasticsearch,mbrukman/elasticsearch,a2lin/elasticsearch,Kakakakakku/elasticsearch,pablocastro/elasticsearch,petmit/elasticsearch,yanjunh/elasticsearch,mgalushka/elasticsearch,polyfractal/elasticsearch,zkidkid/elasticsearch,caengcjd/elasticsearch,trangvh/elasticsearch,easonC/elasticsearch,dantuffery/elasticsearch,smflorentino/elasticsearch,ZTE-PaaS/elasticsearch,hydro2k/elasticsearch,ThiagoGarciaAlves/elasticsearch,fubuki/elasticsearch,Collaborne/elasticsearch,apepper/elasticsearch,mapr/elasticsearch,zeroctu/elasticsearch,drewr/elasticsearch,btiernay/elasticsearch,libosu/elasticsearch,Rygbee/elasticsearch,bestwpw/elasticsearch,AleksKochev/elasticsearch,qwerty4030/elasticsearch,slavau/elasticsearch,nellicus/elasticsearch,Uiho/elasticsearch,StefanGor/elasticsearch,AndreKR/elasticsearch,ouyangkongtong/elasticsearch,huypx1292/elasticsearch,skearns64/elasticsearch,hafkensite/elasticsearch,mkis-/elasticsearch,schonfeld/elasticsearch,mm0/elasticsearch,awislowski/elasticsearch,salyh/elasticsearch,coding0011/elasticsearch,jw0201/elastic,tkssharma/elasticsearch,wittyameta/elasticsearch,MaineC/elasticsearch,18098924759/elasticsearch,springning/elasticsearch,ThalaivaStars/OrgRepo1,vorce/es-metrics,mjhennig/elasticsearch,AleksKochev/elasticsearch,masaruh/elasticsearch,easonC/elasticsearch,martinstuga/elasticsearch,chirilo/elasticsearch,sneivandt/elasticsearch,avikurapati/elasticsearch,jsgao0/elasticsearch,polyfractal/elasticsearch,IanvsPoplicola/elasticsearch,MisterAndersen/elasticsearch,snikch/elasticsearch,ImpressTV/elasticsearch,lks21c/elasticsearch,geidies/elasticsearch,avikurapati/elasticsearch,elancom/elasticsearch,nezirus/elasticsearch,huanzhong/elasticsearch,alexkuk/elasticsearch,maddin2016/elasticsearch,TonyChai24/ESSource,rento19962/elasticsearch,kimimj/elasticsearch,sneivandt/elasticsearch,Shepard1212/elasticsearch,kkirsche/elasticsearch,Helen-Zhao/elasticsearch,hanst/elasticsearch,umeshdangat/elasticsearch,AleksKochev/elasticsearch,koxa29/elasticsearch,bestwpw/elasticsearch,ZTE-PaaS/elasticsearch,zhiqinghuang/elasticsearch,jw0201/elastic,mjhennig/elasticsearch,zkidkid/elasticsearch,khiraiwa/elasticsearch,qwerty4030/elasticsearch,jango2015/elasticsearch,kalburgimanjunath/elasticsearch,YosuaMichael/elasticsearch,martinstuga/elasticsearch,AndreKR/elasticsearch,artnowo/elasticsearch,golubev/elasticsearch,franklanganke/elasticsearch,phani546/elasticsearch,kcompher/elasticsearch,dataduke/elasticsearch,uschindler/elasticsearch,micpalmia/elasticsearch,nomoa/elasticsearch,pranavraman/elasticsearch,andrestc/elasticsearch,Fsero/elasticsearch,brwe/elasticsearch,zhaocloud/elasticsearch,ajhalani/elasticsearch,tahaemin/elasticsearch,masterweb121/elasticsearch,strapdata/elassandra5-rc,overcome/elasticsearch,scorpionvicky/elasticsearch,girirajsharma/elasticsearch,luiseduardohdbackup/elasticsearch,wayeast/elasticsearch,amit-shar/elasticsearch,JervyShi/elasticsearch,aglne/elasticsearch,mcku/elasticsearch,marcuswr/elasticsearch-dateline,smflorentino/elasticsearch,vingupta3/elasticsearch,lydonchandra/elasticsearch,F0lha/elasticsearch,golubev/elasticsearch,Siddartha07/elasticsearch,episerver/elasticsearch,jprante/elasticsearch,vroyer/elasticassandra,kevinkluge/elasticsearch,fubuki/elasticsearch,peschlowp/elasticsearch,salyh/elasticsearch,hirdesh2008/elasticsearch,fekaputra/elasticsearch,shreejay/elasticsearch,xpandan/elasticsearch,combinatorist/elasticsearch,trangvh/elasticsearch,liweinan0423/elasticsearch,yuy168/elasticsearch,likaiwalkman/elasticsearch,springning/elasticsearch,myelin/elasticsearch,thecocce/elasticsearch,kalburgimanjunath/elasticsearch,uschindler/elasticsearch,AleksKochev/elasticsearch,coding0011/elasticsearch,wangtuo/elasticsearch,Liziyao/elasticsearch,martinstuga/elasticsearch,sdauletau/elasticsearch,mapr/elasticsearch,MetSystem/elasticsearch,mbrukman/elasticsearch,iamjakob/elasticsearch,nellicus/elasticsearch,HarishAtGitHub/elasticsearch,dataduke/elasticsearch,javachengwc/elasticsearch,himanshuag/elasticsearch,mgalushka/elasticsearch,kunallimaye/elasticsearch,hafkensite/elasticsearch,drewr/elasticsearch,Siddartha07/elasticsearch,Ansh90/elasticsearch,socialrank/elasticsearch,wuranbo/elasticsearch,andrestc/elasticsearch,strapdata/elassandra5-rc,areek/elasticsearch,njlawton/elasticsearch,boliza/elasticsearch,markllama/elasticsearch,sauravmondallive/elasticsearch,tcucchietti/elasticsearch,dantuffery/elasticsearch,sarwarbhuiyan/elasticsearch,robin13/elasticsearch,Charlesdong/elasticsearch,lzo/elasticsearch-1,onegambler/elasticsearch,Siddartha07/elasticsearch,hirdesh2008/elasticsearch,shreejay/elasticsearch,jpountz/elasticsearch,jeteve/elasticsearch,lchennup/elasticsearch,vrkansagara/elasticsearch,camilojd/elasticsearch,knight1128/elasticsearch,lchennup/elasticsearch,Flipkart/elasticsearch,ThiagoGarciaAlves/elasticsearch,Kakakakakku/elasticsearch,mortonsykes/elasticsearch,markharwood/elasticsearch,sreeramjayan/elasticsearch,brandonkearby/elasticsearch,springning/elasticsearch,robin13/elasticsearch,sreeramjayan/elasticsearch,zhaocloud/elasticsearch,tkssharma/elasticsearch,pablocastro/elasticsearch,humandb/elasticsearch,mcku/elasticsearch,alexshadow007/elasticsearch,kingaj/elasticsearch,brandonkearby/elasticsearch,jimczi/elasticsearch,glefloch/elasticsearch,fooljohnny/elasticsearch,yanjunh/elasticsearch,amit-shar/elasticsearch,xuzha/elasticsearch,tsohil/elasticsearch,Chhunlong/elasticsearch,uboness/elasticsearch,hirdesh2008/elasticsearch,combinatorist/elasticsearch,spiegela/elasticsearch,ckclark/elasticsearch,Asimov4/elasticsearch,avikurapati/elasticsearch,Fsero/elasticsearch,AshishThakur/elasticsearch,markllama/elasticsearch,sposam/elasticsearch,StefanGor/elasticsearch,strapdata/elassandra5-rc,jango2015/elasticsearch,kalburgimanjunath/elasticsearch,pritishppai/elasticsearch,palecur/elasticsearch,wbowling/elasticsearch,JackyMai/elasticsearch,MisterAndersen/elasticsearch,gmarz/elasticsearch,kevinkluge/elasticsearch,JervyShi/elasticsearch,JervyShi/elasticsearch,strapdata/elassandra,obourgain/elasticsearch,yynil/elasticsearch,kaneshin/elasticsearch,acchen97/elasticsearch,jimczi/elasticsearch,sposam/elasticsearch,kunallimaye/elasticsearch,a2lin/elasticsearch,khiraiwa/elasticsearch,MaineC/elasticsearch,ouyangkongtong/elasticsearch,dylan8902/elasticsearch,chrismwendt/elasticsearch,liweinan0423/elasticsearch,mkis-/elasticsearch,wuranbo/elasticsearch,dylan8902/elasticsearch,18098924759/elasticsearch,polyfractal/elasticsearch,PhaedrusTheGreek/elasticsearch,kunallimaye/elasticsearch,ouyangkongtong/elasticsearch,lks21c/elasticsearch,tsohil/elasticsearch,pritishppai/elasticsearch,sdauletau/elasticsearch,Uiho/elasticsearch,palecur/elasticsearch,schonfeld/elasticsearch,fforbeck/elasticsearch,hydro2k/elasticsearch,kkirsche/elasticsearch,zhaocloud/elasticsearch,infusionsoft/elasticsearch,mrorii/elasticsearch,sc0ttkclark/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,onegambler/elasticsearch,wenpos/elasticsearch,jbertouch/elasticsearch,anti-social/elasticsearch,codebunt/elasticsearch,mjhennig/elasticsearch,dylan8902/elasticsearch,PhaedrusTheGreek/elasticsearch,aparo/elasticsearch,wbowling/elasticsearch,skearns64/elasticsearch,mcku/elasticsearch,achow/elasticsearch,zhiqinghuang/elasticsearch,dylan8902/elasticsearch,andrestc/elasticsearch,sjohnr/elasticsearch,achow/elasticsearch,Asimov4/elasticsearch,koxa29/elasticsearch,mohit/elasticsearch,girirajsharma/elasticsearch,xingguang2013/elasticsearch,MetSystem/elasticsearch,camilojd/elasticsearch,schonfeld/elasticsearch,lchennup/elasticsearch,infusionsoft/elasticsearch,Liziyao/elasticsearch,18098924759/elasticsearch,tkssharma/elasticsearch,infusionsoft/elasticsearch,vingupta3/elasticsearch,schonfeld/elasticsearch,jango2015/elasticsearch,GlenRSmith/elasticsearch,Siddartha07/elasticsearch,luiseduardohdbackup/elasticsearch,strapdata/elassandra-test,mjhennig/elasticsearch,codebunt/elasticsearch,truemped/elasticsearch,kimimj/elasticsearch,lydonchandra/elasticsearch,drewr/elasticsearch,himanshuag/elasticsearch,wbowling/elasticsearch,thecocce/elasticsearch,slavau/elasticsearch,diendt/elasticsearch,glefloch/elasticsearch,zeroctu/elasticsearch,wangtuo/elasticsearch,jeteve/elasticsearch,njlawton/elasticsearch,likaiwalkman/elasticsearch,truemped/elasticsearch,masaruh/elasticsearch,skearns64/elasticsearch,strapdata/elassandra-test,abhijitiitr/es,kubum/elasticsearch,dongjoon-hyun/elasticsearch,Rygbee/elasticsearch,btiernay/elasticsearch,EasonYi/elasticsearch,adrianbk/elasticsearch,HarishAtGitHub/elasticsearch,mortonsykes/elasticsearch,scorpionvicky/elasticsearch,kcompher/elasticsearch,wayeast/elasticsearch,nrkkalyan/elasticsearch,wimvds/elasticsearch,NBSW/elasticsearch,fforbeck/elasticsearch,nknize/elasticsearch,dylan8902/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,knight1128/elasticsearch,likaiwalkman/elasticsearch,pozhidaevak/elasticsearch,Microsoft/elasticsearch,skearns64/elasticsearch,onegambler/elasticsearch,ydsakyclguozi/elasticsearch,mbrukman/elasticsearch,markharwood/elasticsearch,queirozfcom/elasticsearch,Ansh90/elasticsearch,nknize/elasticsearch,zkidkid/elasticsearch,qwerty4030/elasticsearch,adrianbk/elasticsearch,huanzhong/elasticsearch,chrismwendt/elasticsearch,ESamir/elasticsearch,sc0ttkclark/elasticsearch,jimhooker2002/elasticsearch,MichaelLiZhou/elasticsearch,strapdata/elassandra,HarishAtGitHub/elasticsearch,KimTaehee/elasticsearch,areek/elasticsearch,episerver/elasticsearch,alexkuk/elasticsearch,alexshadow007/elasticsearch,fred84/elasticsearch,zhiqinghuang/elasticsearch,gmarz/elasticsearch,abibell/elasticsearch,markwalkom/elasticsearch,micpalmia/elasticsearch,amaliujia/elasticsearch,lydonchandra/elasticsearch,ricardocerq/elasticsearch,SergVro/elasticsearch,raishiv/elasticsearch,pranavraman/elasticsearch,hirdesh2008/elasticsearch,sauravmondallive/elasticsearch,onegambler/elasticsearch,awislowski/elasticsearch,SergVro/elasticsearch,milodky/elasticsearch,tkssharma/elasticsearch,jaynblue/elasticsearch,xpandan/elasticsearch,combinatorist/elasticsearch,Rygbee/elasticsearch,Widen/elasticsearch,phani546/elasticsearch,elasticdog/elasticsearch,Shekharrajak/elasticsearch,nilabhsagar/elasticsearch,iamjakob/elasticsearch,jimhooker2002/elasticsearch,KimTaehee/elasticsearch,amaliujia/elasticsearch,marcuswr/elasticsearch-dateline,springning/elasticsearch,rento19962/elasticsearch,IanvsPoplicola/elasticsearch,Shekharrajak/elasticsearch,sauravmondallive/elasticsearch,18098924759/elasticsearch,anti-social/elasticsearch,lmtwga/elasticsearch,jprante/elasticsearch,vietlq/elasticsearch,wangtuo/elasticsearch,lydonchandra/elasticsearch,mmaracic/elasticsearch,IanvsPoplicola/elasticsearch,mnylen/elasticsearch,iantruslove/elasticsearch,wittyameta/elasticsearch,Fsero/elasticsearch,wbowling/elasticsearch,kenshin233/elasticsearch,beiske/elasticsearch,salyh/elasticsearch,glefloch/elasticsearch,Stacey-Gammon/elasticsearch,raishiv/elasticsearch,rento19962/elasticsearch,spiegela/elasticsearch,rmuir/elasticsearch,yanjunh/elasticsearch,vrkansagara/elasticsearch,Shekharrajak/elasticsearch,KimTaehee/elasticsearch,Chhunlong/elasticsearch,avikurapati/elasticsearch,linglaiyao1314/elasticsearch,xuzha/elasticsearch,iantruslove/elasticsearch,queirozfcom/elasticsearch,lmtwga/elasticsearch,mbrukman/elasticsearch,weipinghe/elasticsearch,caengcjd/elasticsearch,hechunwen/elasticsearch,wayeast/elasticsearch,kimimj/elasticsearch,feiqitian/elasticsearch,xingguang2013/elasticsearch,s1monw/elasticsearch,weipinghe/elasticsearch,andrejserafim/elasticsearch,henakamaMSFT/elasticsearch,spiegela/elasticsearch,AshishThakur/elasticsearch,wangyuxue/elasticsearch,codebunt/elasticsearch,lzo/elasticsearch-1,easonC/elasticsearch,infusionsoft/elasticsearch,szroland/elasticsearch,VukDukic/elasticsearch,onegambler/elasticsearch,mm0/elasticsearch,i-am-Nathan/elasticsearch,clintongormley/elasticsearch,artnowo/elasticsearch,ImpressTV/elasticsearch,abhijitiitr/es,humandb/elasticsearch,diendt/elasticsearch,fred84/elasticsearch,scorpionvicky/elasticsearch,dantuffery/elasticsearch,jw0201/elastic,wbowling/elasticsearch,tcucchietti/elasticsearch,markwalkom/elasticsearch,tebriel/elasticsearch,yynil/elasticsearch,btiernay/elasticsearch,iamjakob/elasticsearch,markllama/elasticsearch,alexbrasetvik/elasticsearch,golubev/elasticsearch,sneivandt/elasticsearch,gingerwizard/elasticsearch,NBSW/elasticsearch,mohsinh/elasticsearch,sposam/elasticsearch,lightslife/elasticsearch,cnfire/elasticsearch-1,beiske/elasticsearch,diendt/elasticsearch,alexksikes/elasticsearch,markwalkom/elasticsearch,karthikjaps/elasticsearch,karthikjaps/elasticsearch,ouyangkongtong/elasticsearch,strapdata/elassandra5-rc,areek/elasticsearch,VukDukic/elasticsearch,nilabhsagar/elasticsearch,GlenRSmith/elasticsearch,fernandozhu/elasticsearch,markllama/elasticsearch,yanjunh/elasticsearch,mortonsykes/elasticsearch,javachengwc/elasticsearch,cwurm/elasticsearch,wbowling/elasticsearch,KimTaehee/elasticsearch,vorce/es-metrics,henakamaMSFT/elasticsearch,lydonchandra/elasticsearch,mmaracic/elasticsearch,szroland/elasticsearch,dataduke/elasticsearch,Clairebi/ElasticsearchClone,opendatasoft/elasticsearch,girirajsharma/elasticsearch,petabytedata/elasticsearch,slavau/elasticsearch,StefanGor/elasticsearch,episerver/elasticsearch,masterweb121/elasticsearch,weipinghe/elasticsearch,iacdingping/elasticsearch,ThalaivaStars/OrgRepo1,rento19962/elasticsearch,ImpressTV/elasticsearch,LeoYao/elasticsearch,Clairebi/ElasticsearchClone,gfyoung/elasticsearch,tahaemin/elasticsearch,jbertouch/elasticsearch,liweinan0423/elasticsearch,myelin/elasticsearch,milodky/elasticsearch,peschlowp/elasticsearch
|
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.store;
import com.carrotsearch.randomizedtesting.SeedUtils;
import org.apache.lucene.store.*;
import org.apache.lucene.store.MockDirectoryWrapper.Throttling;
import org.apache.lucene.util.Constants;
import org.elasticsearch.cache.memory.ByteBufferCache;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.DirectoryService;
import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.fs.FsDirectoryService;
import org.elasticsearch.index.store.fs.MmapFsDirectoryService;
import org.elasticsearch.index.store.fs.NioFsDirectoryService;
import org.elasticsearch.index.store.fs.SimpleFsDirectoryService;
import org.elasticsearch.index.store.memory.ByteBufferDirectoryService;
import org.elasticsearch.index.store.ram.RamDirectoryService;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import java.io.IOException;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
public class MockDirectoryHelper {
public static final String RANDOM_IO_EXCEPTION_RATE = "index.store.mock.random.io_exception_rate";
public static final String RANDOM_IO_EXCEPTION_RATE_ON_OPEN = "index.store.mock.random.io_exception_rate_on_open";
public static final String RANDOM_THROTTLE = "index.store.mock.random.throttle";
public static final String CHECK_INDEX_ON_CLOSE = "index.store.mock.check_index_on_close";
public static final String RANDOM_PREVENT_DOUBLE_WRITE = "index.store.mock.random.prevent_double_write";
public static final String RANDOM_NO_DELETE_OPEN_FILE = "index.store.mock.random.no_delete_open_file";
public static final String RANDOM_FAIL_ON_CLOSE= "index.store.mock.random.fail_on_close";
public static final Set<ElasticsearchMockDirectoryWrapper> wrappers = ConcurrentCollections.newConcurrentSet();
private final Random random;
private final double randomIOExceptionRate;
private final double randomIOExceptionRateOnOpen;
private final Throttling throttle;
private final boolean checkIndexOnClose;
private final Settings indexSettings;
private final ShardId shardId;
private final boolean preventDoubleWrite;
private final boolean noDeleteOpenFile;
private final ESLogger logger;
private final boolean failOnClose;
public MockDirectoryHelper(ShardId shardId, Settings indexSettings, ESLogger logger) {
final long seed = indexSettings.getAsLong(ElasticsearchIntegrationTest.INDEX_SEED_SETTING, 0l);
random = new Random(seed);
randomIOExceptionRate = indexSettings.getAsDouble(RANDOM_IO_EXCEPTION_RATE, 0.0d);
randomIOExceptionRateOnOpen = indexSettings.getAsDouble(RANDOM_IO_EXCEPTION_RATE_ON_OPEN, 0.0d);
preventDoubleWrite = indexSettings.getAsBoolean(RANDOM_PREVENT_DOUBLE_WRITE, true); // true is default in MDW
noDeleteOpenFile = indexSettings.getAsBoolean(RANDOM_NO_DELETE_OPEN_FILE, random.nextBoolean()); // true is default in MDW
random.nextInt(shardId.getId() + 1); // some randomness per shard
throttle = Throttling.valueOf(indexSettings.get(RANDOM_THROTTLE, random.nextDouble() < 0.1 ? "SOMETIMES" : "NEVER"));
checkIndexOnClose = indexSettings.getAsBoolean(CHECK_INDEX_ON_CLOSE, random.nextDouble() < 0.1);
failOnClose = indexSettings.getAsBoolean(RANDOM_FAIL_ON_CLOSE, false);
if (logger.isDebugEnabled()) {
logger.debug("Using MockDirWrapper with seed [{}] throttle: [{}] checkIndexOnClose: [{}]", SeedUtils.formatSeed(seed),
throttle, checkIndexOnClose);
}
this.indexSettings = indexSettings;
this.shardId = shardId;
this.logger = logger;
}
public Directory wrap(Directory dir) {
final ElasticsearchMockDirectoryWrapper w = new ElasticsearchMockDirectoryWrapper(random, dir, logger, failOnClose);
w.setRandomIOExceptionRate(randomIOExceptionRate);
w.setRandomIOExceptionRateOnOpen(randomIOExceptionRateOnOpen);
w.setThrottling(throttle);
w.setCheckIndexOnClose(checkIndexOnClose);
w.setPreventDoubleWrite(preventDoubleWrite);
w.setNoDeleteOpenFile(noDeleteOpenFile);
wrappers.add(w);
return w;
}
public Directory[] wrapAllInplace(Directory[] dirs) {
for (int i = 0; i < dirs.length; i++) {
dirs[i] = wrap(dirs[i]);
}
return dirs;
}
public FsDirectoryService randomDirectorService(IndexStore indexStore) {
if ((Constants.WINDOWS || Constants.SUN_OS) && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) {
return new MmapFsDirectoryService(shardId, indexSettings, indexStore);
} else if (Constants.WINDOWS) {
return new SimpleFsDirectoryService(shardId, indexSettings, indexStore);
}
switch (random.nextInt(3)) {
case 1:
return new MmapFsDirectoryService(shardId, indexSettings, indexStore);
case 0:
return new SimpleFsDirectoryService(shardId, indexSettings, indexStore);
default:
return new NioFsDirectoryService(shardId, indexSettings, indexStore);
}
}
public DirectoryService randomRamDirecoryService(ByteBufferCache byteBufferCache) {
switch (random.nextInt(2)) {
case 0:
return new RamDirectoryService(shardId, indexSettings);
default:
return new ByteBufferDirectoryService(shardId, indexSettings, byteBufferCache);
}
}
public static final class ElasticsearchMockDirectoryWrapper extends MockDirectoryWrapper {
private final ESLogger logger;
private final boolean failOnClose;
public ElasticsearchMockDirectoryWrapper(Random random, Directory delegate, ESLogger logger, boolean failOnClose) {
super(random, delegate);
this.logger = logger;
this.failOnClose = failOnClose;
}
@Override
public void close() throws IOException {
try {
super.close();
} catch (RuntimeException ex) {
if (failOnClose) {
throw ex;
}
// we catch the exception on close to properly close shards even if there are open files
// the test framework will call closeWithRuntimeException after the test exits to fail
// on unclosed files.
logger.debug("MockDirectoryWrapper#close() threw exception", ex);
}
}
public void closeWithRuntimeException() throws IOException {
super.close(); // force fail if open files etc. called in tear down of ElasticsearchIntegrationTest
}
@Override
public synchronized IndexInput openInput(String name, IOContext context) throws IOException {
return new CloseTrackingMockIndexInputWrapper(name, super.openInput(name, context), logger);
}
}
private static class CloseTrackingMockIndexInputWrapper extends IndexInput {
private final AtomicBoolean closed = new AtomicBoolean(false);
private final String name;
private IndexInput delegate;
private final ESLogger logger;
private volatile RuntimeException closingStack;
public CloseTrackingMockIndexInputWrapper(String name, IndexInput delegate, ESLogger logger) {
super(name);
this.delegate = delegate;
this.logger = logger;
this.name = name;
}
@Override
public void close() throws IOException {
if (closed.compareAndSet(false, true)) {
closingStack = new RuntimeException("IndexInput closed");
delegate.close();
}
}
@Override
public IndexInput clone() {
ensureOpen();
return delegate.clone();
}
private void ensureOpen() {
if (closed.get()) {
logger.debug("Abusing IndexInput for: [" + name + "] - already closed", closingStack);
throw new RuntimeException("Abusing closed IndexInput!");
}
}
@Override
public long getFilePointer() {
ensureOpen();
return delegate.getFilePointer();
}
@Override
public void seek(long pos) throws IOException {
ensureOpen();
delegate.seek(pos);
}
@Override
public long length() {
ensureOpen();
return delegate.length();
}
@Override
public byte readByte() throws IOException {
ensureOpen();
return delegate.readByte();
}
@Override
public void readBytes(byte[] b, int offset, int len) throws IOException {
ensureOpen();
delegate.readBytes(b, offset, len);
}
@Override
public void readBytes(byte[] b, int offset, int len, boolean useBuffer)
throws IOException {
ensureOpen();
delegate.readBytes(b, offset, len, useBuffer);
}
@Override
public short readShort() throws IOException {
ensureOpen();
return delegate.readShort();
}
@Override
public int readInt() throws IOException {
ensureOpen();
return delegate.readInt();
}
@Override
public long readLong() throws IOException {
ensureOpen();
return delegate.readLong();
}
@Override
public String readString() throws IOException {
ensureOpen();
return delegate.readString();
}
@Override
public Map<String,String> readStringStringMap() throws IOException {
ensureOpen();
return delegate.readStringStringMap();
}
@Override
public int readVInt() throws IOException {
ensureOpen();
return delegate.readVInt();
}
@Override
public long readVLong() throws IOException {
ensureOpen();
return delegate.readVLong();
}
@Override
public String toString() {
return "CloseTrackingMockIndexInputWrapper(" + delegate + ")";
}
}
}
|
src/test/java/org/elasticsearch/test/store/MockDirectoryHelper.java
|
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.store;
import com.carrotsearch.randomizedtesting.SeedUtils;
import org.apache.lucene.store.*;
import org.apache.lucene.store.MockDirectoryWrapper.Throttling;
import org.apache.lucene.util.Constants;
import org.elasticsearch.cache.memory.ByteBufferCache;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.DirectoryService;
import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.fs.FsDirectoryService;
import org.elasticsearch.index.store.fs.MmapFsDirectoryService;
import org.elasticsearch.index.store.fs.NioFsDirectoryService;
import org.elasticsearch.index.store.fs.SimpleFsDirectoryService;
import org.elasticsearch.index.store.memory.ByteBufferDirectoryService;
import org.elasticsearch.index.store.ram.RamDirectoryService;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import java.io.IOException;
import java.util.Random;
import java.util.Set;
public class MockDirectoryHelper {
public static final String RANDOM_IO_EXCEPTION_RATE = "index.store.mock.random.io_exception_rate";
public static final String RANDOM_IO_EXCEPTION_RATE_ON_OPEN = "index.store.mock.random.io_exception_rate_on_open";
public static final String RANDOM_THROTTLE = "index.store.mock.random.throttle";
public static final String CHECK_INDEX_ON_CLOSE = "index.store.mock.check_index_on_close";
public static final String RANDOM_PREVENT_DOUBLE_WRITE = "index.store.mock.random.prevent_double_write";
public static final String RANDOM_NO_DELETE_OPEN_FILE = "index.store.mock.random.no_delete_open_file";
public static final String RANDOM_FAIL_ON_CLOSE= "index.store.mock.random.fail_on_close";
public static final Set<ElasticsearchMockDirectoryWrapper> wrappers = ConcurrentCollections.newConcurrentSet();
private final Random random;
private final double randomIOExceptionRate;
private final double randomIOExceptionRateOnOpen;
private final Throttling throttle;
private final boolean checkIndexOnClose;
private final Settings indexSettings;
private final ShardId shardId;
private final boolean preventDoubleWrite;
private final boolean noDeleteOpenFile;
private final ESLogger logger;
private final boolean failOnClose;
public MockDirectoryHelper(ShardId shardId, Settings indexSettings, ESLogger logger) {
final long seed = indexSettings.getAsLong(ElasticsearchIntegrationTest.INDEX_SEED_SETTING, 0l);
random = new Random(seed);
randomIOExceptionRate = indexSettings.getAsDouble(RANDOM_IO_EXCEPTION_RATE, 0.0d);
randomIOExceptionRateOnOpen = indexSettings.getAsDouble(RANDOM_IO_EXCEPTION_RATE_ON_OPEN, 0.0d);
preventDoubleWrite = indexSettings.getAsBoolean(RANDOM_PREVENT_DOUBLE_WRITE, true); // true is default in MDW
noDeleteOpenFile = indexSettings.getAsBoolean(RANDOM_NO_DELETE_OPEN_FILE, random.nextBoolean()); // true is default in MDW
random.nextInt(shardId.getId() + 1); // some randomness per shard
throttle = Throttling.valueOf(indexSettings.get(RANDOM_THROTTLE, random.nextDouble() < 0.1 ? "SOMETIMES" : "NEVER"));
checkIndexOnClose = indexSettings.getAsBoolean(CHECK_INDEX_ON_CLOSE, random.nextDouble() < 0.1);
failOnClose = indexSettings.getAsBoolean(RANDOM_FAIL_ON_CLOSE, false);
if (logger.isDebugEnabled()) {
logger.debug("Using MockDirWrapper with seed [{}] throttle: [{}] checkIndexOnClose: [{}]", SeedUtils.formatSeed(seed),
throttle, checkIndexOnClose);
}
this.indexSettings = indexSettings;
this.shardId = shardId;
this.logger = logger;
}
public Directory wrap(Directory dir) {
final ElasticsearchMockDirectoryWrapper w = new ElasticsearchMockDirectoryWrapper(random, dir, logger, failOnClose);
w.setRandomIOExceptionRate(randomIOExceptionRate);
w.setRandomIOExceptionRateOnOpen(randomIOExceptionRateOnOpen);
w.setThrottling(throttle);
w.setCheckIndexOnClose(checkIndexOnClose);
w.setPreventDoubleWrite(preventDoubleWrite);
w.setNoDeleteOpenFile(noDeleteOpenFile);
wrappers.add(w);
return w;
}
public Directory[] wrapAllInplace(Directory[] dirs) {
for (int i = 0; i < dirs.length; i++) {
dirs[i] = wrap(dirs[i]);
}
return dirs;
}
public FsDirectoryService randomDirectorService(IndexStore indexStore) {
if ((Constants.WINDOWS || Constants.SUN_OS) && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) {
return new MmapFsDirectoryService(shardId, indexSettings, indexStore);
} else if (Constants.WINDOWS) {
return new SimpleFsDirectoryService(shardId, indexSettings, indexStore);
}
switch (random.nextInt(3)) {
case 1:
return new MmapFsDirectoryService(shardId, indexSettings, indexStore);
case 0:
return new SimpleFsDirectoryService(shardId, indexSettings, indexStore);
default:
return new NioFsDirectoryService(shardId, indexSettings, indexStore);
}
}
public DirectoryService randomRamDirecoryService(ByteBufferCache byteBufferCache) {
switch (random.nextInt(2)) {
case 0:
return new RamDirectoryService(shardId, indexSettings);
default:
return new ByteBufferDirectoryService(shardId, indexSettings, byteBufferCache);
}
}
public static final class ElasticsearchMockDirectoryWrapper extends MockDirectoryWrapper {
private final ESLogger logger;
private final boolean failOnClose;
public ElasticsearchMockDirectoryWrapper(Random random, Directory delegate, ESLogger logger, boolean failOnClose) {
super(random, delegate);
this.logger = logger;
this.failOnClose = failOnClose;
}
@Override
public void close() throws IOException {
try {
super.close();
} catch (RuntimeException ex) {
if (failOnClose) {
throw ex;
}
// we catch the exception on close to properly close shards even if there are open files
// the test framework will call closeWithRuntimeException after the test exits to fail
// on unclosed files.
logger.debug("MockDirectoryWrapper#close() threw exception", ex);
}
}
public void closeWithRuntimeException() throws IOException {
super.close(); // force fail if open files etc. called in tear down of ElasticsearchIntegrationTest
}
}
}
|
Add more information to IndexInput failures related to abusing closed input
|
src/test/java/org/elasticsearch/test/store/MockDirectoryHelper.java
|
Add more information to IndexInput failures related to abusing closed input
|
|
Java
|
apache-2.0
|
cd0858f9915918010e061e7060595cd5278cc6c2
| 0
|
lisaglendenning/zookeeper-lite,lisaglendenning/zookeeper-lite
|
package edu.uw.zookeeper.util;
import com.google.common.base.Objects;
public class PromiseTask<T,V> extends ForwardingPromise<V> {
public static <V> Promise<V> newPromise() {
return SettableFuturePromise.<V>create();
}
public static <T,V> PromiseTask<T,V> of(T task) {
Promise<V> promise = newPromise();
return of(task, promise);
}
public static <T,V> PromiseTask<T,V> of(T task, Promise<V> promise) {
return new PromiseTask<T,V>(task, promise);
}
protected final T task;
protected final Promise<V> delegate;
protected PromiseTask(T task, Promise<V> delegate) {
super();
this.task = task;
this.delegate = delegate;
}
public T task() {
return task;
}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("task", task())
.add("future", delegate())
.toString();
}
@Override
protected Promise<V> delegate() {
return delegate;
}
}
|
src/main/java/edu/uw/zookeeper/util/PromiseTask.java
|
package edu.uw.zookeeper.util;
import com.google.common.base.Objects;
public class PromiseTask<T,V> extends ForwardingPromise<V> {
public static <V> Promise<V> newPromise() {
return SettableFuturePromise.<V>create();
}
public static <T,V> PromiseTask<T,V> of(T task) {
Promise<V> promise = newPromise();
return of(task, promise);
}
public static <T,V> PromiseTask<T,V> of(T task, Promise<V> promise) {
return new PromiseTask<T,V>(task, promise);
}
protected final T task;
protected final Promise<V> delegate;
protected PromiseTask(T task, Promise<V> delegate) {
super();
this.task = task;
this.delegate = delegate;
}
public T task() {
return task;
}
@Override
public String toString() {
String futureString = Objects.toStringHelper(delegate())
.add("isDone", delegate().isDone())
.toString();
return Objects.toStringHelper(this)
.add("task", task())
.add("future", futureString)
.toString();
}
@Override
protected Promise<V> delegate() {
return delegate;
}
}
|
minor
|
src/main/java/edu/uw/zookeeper/util/PromiseTask.java
|
minor
|
|
Java
|
apache-2.0
|
f4b318368882e1b637b42ac964c4862e3a0ecaae
| 0
|
gxa/gxa,gxa/gxa,gxa/gxa,gxa/gxa,gxa/gxa
|
package ae3.service;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Multiset;
import com.google.common.collect.Ordering;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import uk.ac.ebi.gxa.efo.Efo;
import uk.ac.ebi.gxa.index.StatisticsStorageFactory;
import uk.ac.ebi.gxa.index.builder.IndexBuilder;
import uk.ac.ebi.gxa.index.builder.IndexBuilderEventHandler;
import uk.ac.ebi.gxa.statistics.*;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* Created by IntelliJ IDEA.
* User: rpetry
* Date: Nov 2, 2010
* Time: 5:27:03 PM
* This class provides gene expression statistics query service:
* - manages the index storage management and interaction with IndexBuider service
* - delegates statistics queries to StatisticsQueryUtils
*/
public class AtlasStatisticsQueryService implements IndexBuilderEventHandler, DisposableBean {
final private Logger log = LoggerFactory.getLogger(getClass());
// Handler for the BitIndex builder
private IndexBuilder indexBuilder;
// Bitindex Object which is de-serialized from indexFileName in atlasIndexDir
private StatisticsStorage<Long> statisticsStorage;
private File atlasIndexDir;
private String indexFileName;
// Used for finding children for query efo's
private Efo efo;
public AtlasStatisticsQueryService(String indexFileName) {
this.indexFileName = indexFileName;
}
public void setAtlasIndex(File atlasIndexDir) {
this.atlasIndexDir = atlasIndexDir;
}
public void setIndexBuilder(IndexBuilder indexBuilder) {
this.indexBuilder = indexBuilder;
indexBuilder.registerIndexBuildEventHandler(this);
}
public void setStatisticsStorage(StatisticsStorage<Long> statisticsStorage) {
this.statisticsStorage = statisticsStorage;
}
public void setEfo(Efo efo) {
this.efo = efo;
}
/**
* Index rebuild notification handler - after bit index is re-built, de-serialize it into statisticsStorage and re-populate statTypeToEfoToScores cache
*/
public void onIndexBuildFinish() {
StatisticsStorageFactory statisticsStorageFactory = new StatisticsStorageFactory(indexFileName);
statisticsStorageFactory.setAtlasIndex(atlasIndexDir);
try {
statisticsStorage = statisticsStorageFactory.createStatisticsStorage();
} catch (IOException ioe) {
String errMsg = "Failed to create statisticsStorage from " + atlasIndexDir.getAbsolutePath() + File.separator + indexFileName;
log.error(errMsg, ioe);
throw new RuntimeException(errMsg, ioe);
}
}
public void onIndexBuildStart() {
// Nothing to do here
}
/**
* Destructor called by Spring
*
* @throws Exception
*/
public void destroy() throws Exception {
if (indexBuilder != null)
indexBuilder.unregisterIndexBuildEventHandler(this);
}
/**
* @param efvOrEfo
* @param statisticsType
* @param isEfo
* @param geneId
* @return Experiment count for statisticsType, attributes and geneId
*/
public Integer getExperimentCountsForGene(String efvOrEfo, StatisticsType statisticsType, boolean isEfo, Long geneId) {
return getExperimentCountsForGene(efvOrEfo, statisticsType, isEfo, geneId, null, null);
}
/**
* @param efvOrEfo
* @param statisticsType
* @param isEfo
* @param geneId
* @return Experiment count for statisticsType, attributes and geneId
*/
public Integer getExperimentCountsForGene(
String efvOrEfo,
StatisticsType statisticsType,
boolean isEfo,
Long geneId,
Set<Long> geneRestrictionSet,
HashMap<String, Multiset<Integer>> scoresCacheForStatType) {
Attribute attr = new Attribute(efvOrEfo, isEfo, statisticsType);
if (!isEfo && statisticsStorage.getIndexForAttribute(attr) == null) {
// TODO NB. This is currently possible as sample properties are not currently stored in statisticsStorage
log.debug("Attribute " + attr + " was not found in Attribute Index");
// return experiment count == 0 if an efv attribute could not be found in AttributeIndex (note
// that efo attributes are not explicitly stored in AttributeIndex)
return 0;
}
if (geneRestrictionSet == null) { // By default restrict the experiment count query to geneId
geneRestrictionSet = new HashSet<Long>(Collections.singletonList(geneId));
}
StatisticsQueryCondition statsQuery = new StatisticsQueryCondition(geneRestrictionSet);
statsQuery.and(getStatisticsOrQuery(Collections.singletonList(attr)));
Multiset<Integer> scores = StatisticsQueryUtils.getExperimentCounts(statsQuery, statisticsStorage, null);
// Cache geneRestrictionSet's scores for efvOrEfo - this cache will be re-used in heatmaps for rows other than the first one
if (scoresCacheForStatType != null) {
scoresCacheForStatType.put(efvOrEfo, scores);
}
Integer geneIndex = statisticsStorage.getIndexForGeneId(geneId);
if (scores != null) {
long time = System.currentTimeMillis();
int expCountForGene = scores.count(geneIndex);
if (expCountForGene > 0) {
log.debug(statisticsType + " " + efvOrEfo + " expCountForGene: " + geneId + " (" + geneIndex + ") = " + expCountForGene + " got in: " + (System.currentTimeMillis() - time) + " ms");
}
return expCountForGene;
}
return 0;
}
/**
* @param orAttributes
* @return StatisticsQueryOrConditions, including children of all efo's in orAttributes
*/
public StatisticsQueryOrConditions<StatisticsQueryCondition> getStatisticsOrQuery(List<Attribute> orAttributes) {
List<Attribute> efoPlusChildren = includeEfoChildren(orAttributes);
return StatisticsQueryUtils.getStatisticsOrQuery(efoPlusChildren, statisticsStorage);
}
/**
* @param orAttributes
* @return List containing all (afv and efo) attributes in orAttributes, plus the children of all efo's in orAttributes
*/
private List<Attribute> includeEfoChildren(List<Attribute> orAttributes) {
Set<Attribute> attrsPlusChildren = new HashSet<Attribute>();
for (Attribute attr : orAttributes) {
if (attr.isEfo() == StatisticsQueryUtils.EFO) {
Collection<String> efoPlusChildren = efo.getTermAndAllChildrenIds(attr.getEfv());
log.debug("Expanded efo: " + attr + " into: " + efoPlusChildren);
for (String efoTerm : efoPlusChildren) {
attrsPlusChildren.add(new Attribute(efoTerm, StatisticsQueryUtils.EFO, attr.getStatType()));
}
} else {
attrsPlusChildren.add(attr);
}
}
return new ArrayList<Attribute>(attrsPlusChildren);
}
public Integer getIndexForGene(Long geneId) {
return statisticsStorage.getIndexForGeneId(geneId);
}
/**
* http://stackoverflow.com/questions/3029151/find-top-n-elements-in-a-multiset-from-google-collections
*
* @param multiset
* @param <T>
* @return
*/
private static <T> ImmutableList<Multiset.Entry<T>> sortedByCount(Multiset<T> multiset) {
Ordering<Multiset.Entry<T>> countComp = new Ordering<Multiset.Entry<T>>() {
public int compare(Multiset.Entry<T> e1, Multiset.Entry<T> e2) {
return e2.getCount() - e1.getCount();
}
};
return countComp.immutableSortedCopy(multiset.entrySet());
}
/**
* http://stackoverflow.com/questions/3029151/find-top-n-elements-in-a-multiset-from-google-collections
*
* @param multiset
* @param min
* @param max
* @param <T>
* @return
*/
private static <T> ImmutableList<Multiset.Entry<T>> getEntriesBetweenMinMaxFromListSortedByCount(Multiset<T> multiset,
int min, int max) {
ImmutableList<Multiset.Entry<T>> sortedByCount = sortedByCount(multiset);
if (sortedByCount.size() > max) {
sortedByCount = sortedByCount.subList(min, max);
}
return sortedByCount;
}
/**
* @param statsQuery
* @param minPos
* @param rows
* @param sortedGenesChunk - a chunk of the overall sorted (by experiment counts - in desc order) list of genes,
* starting from 'minPos' and containing maximums 'rows' genes
* @return The overall number of genes for which counts exist in statsQuery
*/
public Integer getSortedGenes(final StatisticsQueryCondition statsQuery, final int minPos, final int rows, List<Long> sortedGenesChunk) {
long timeStart = System.currentTimeMillis();
Multiset<Integer> countsForConditions = StatisticsQueryUtils.getExperimentCounts(statsQuery, statisticsStorage, null);
log.info("conditions bit index query for " + statsQuery.prettyPrint() + " (genes with counts present: " + countsForConditions.elementSet().size() + ") retrieved in : " + (System.currentTimeMillis() - timeStart) + " ms");
List<Multiset.Entry<Integer>> sortedCounts = getEntriesBetweenMinMaxFromListSortedByCount(countsForConditions, minPos, minPos + rows);
for (Multiset.Entry<Integer> entry : sortedCounts) {
Long geneId = statisticsStorage.getGeneIdForIndex(entry.getElement());
if (geneId != null) {
sortedGenesChunk.add(geneId);
} else {
log.error("Failed to retrieve gene id for index: " + entry.getElement());
}
}
return countsForConditions.elementSet().size();
}
/**
*
* @param geneIds
* @param statType
* @return Set of efo and efv attributes that have non-zero experiment counts for statType in bit index
*/
public Set<Attribute> getScoringAttributesForGenes(Set<Long> geneIds, StatisticsType statType) {
long timeStart = System.currentTimeMillis();
Set<Attribute> result = new HashSet<Attribute>();
Set<Attribute> allEfvAttributesForStat = statisticsStorage.getAllAttributes(statType);
for (Attribute attr : allEfvAttributesForStat) {
attr.setStatType(statType);
StatisticsQueryCondition statsQuery = new StatisticsQueryCondition(geneIds);
statsQuery.and(getStatisticsOrQuery(Collections.singletonList(attr)));
Set<Experiment> scoringExps = new HashSet<Experiment>();
Multiset<Integer> scores = StatisticsQueryUtils.getExperimentCounts(statsQuery, statisticsStorage, scoringExps);
if (scores.size() > 0) { // at least one gene in geneIds had an experiment count > 0 for attr
result.add(attr);
for (Experiment exp : scoringExps) {
String efoTerm = statisticsStorage.getEfoTerm(attr, exp);
if (efoTerm != null) {
result.add(new Attribute(efoTerm, StatisticsQueryUtils.EFO, statType));
log.debug("Adding efo: " + efoTerm + " for attr: " + attr + " and exp: " + exp);
}
}
}
}
log.info("Retrieved " + result.size() + " scoring attributes for statType: " + statType + " and gene ids: (" + geneIds + ") in " + (System.currentTimeMillis() - timeStart) + "ms");
return result;
}
}
|
atlas-web/src/main/java/ae3/service/AtlasStatisticsQueryService.java
|
package ae3.service;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Multiset;
import com.google.common.collect.Ordering;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import uk.ac.ebi.gxa.efo.Efo;
import uk.ac.ebi.gxa.index.StatisticsStorageFactory;
import uk.ac.ebi.gxa.index.builder.IndexBuilder;
import uk.ac.ebi.gxa.index.builder.IndexBuilderEventHandler;
import uk.ac.ebi.gxa.statistics.*;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* Created by IntelliJ IDEA.
* User: rpetry
* Date: Nov 2, 2010
* Time: 5:27:03 PM
* This class provides gene expression statistics query service:
* - manages the index storage management and interaction with IndexBuider service
* - delegates statistics queries to StatisticsQueryUtils
*/
public class AtlasStatisticsQueryService implements IndexBuilderEventHandler, DisposableBean {
final private Logger log = LoggerFactory.getLogger(getClass());
// Handler for the BitIndex builder
private IndexBuilder indexBuilder;
// Bitindex Object which is de-serialized from indexFileName in atlasIndexDir
private StatisticsStorage<Long> statisticsStorage;
private File atlasIndexDir;
private String indexFileName;
// Used for finding children for query efo's
private Efo efo;
public AtlasStatisticsQueryService(String indexFileName) {
this.indexFileName = indexFileName;
}
public void setAtlasIndex(File atlasIndexDir) {
this.atlasIndexDir = atlasIndexDir;
}
public void setIndexBuilder(IndexBuilder indexBuilder) {
this.indexBuilder = indexBuilder;
indexBuilder.registerIndexBuildEventHandler(this);
}
public void setStatisticsStorage(StatisticsStorage<Long> statisticsStorage) {
this.statisticsStorage = statisticsStorage;
}
public void setEfo(Efo efo) {
this.efo = efo;
}
/**
* Index rebuild notification handler - after bit index is re-built, de-serialize it into statisticsStorage and re-populate statTypeToEfoToScores cache
*/
public void onIndexBuildFinish() {
StatisticsStorageFactory statisticsStorageFactory = new StatisticsStorageFactory(indexFileName);
statisticsStorageFactory.setAtlasIndex(atlasIndexDir);
try {
statisticsStorage = statisticsStorageFactory.createStatisticsStorage();
} catch (IOException ioe) {
String errMsg = "Failed to create statisticsStorage from " + atlasIndexDir.getAbsolutePath() + File.separator + indexFileName;
log.error(errMsg, ioe);
throw new RuntimeException(errMsg, ioe);
}
}
public void onIndexBuildStart() {
// Nothing to do here
}
/**
* Destructor called by Spring
*
* @throws Exception
*/
public void destroy() throws Exception {
if (indexBuilder != null)
indexBuilder.unregisterIndexBuildEventHandler(this);
}
/**
* @param efvOrEfo
* @param statisticsType
* @param isEfo
* @param geneId
* @return Experiment count for statisticsType, attributes and geneId
*/
public Integer getExperimentCountsForGene(String efvOrEfo, StatisticsType statisticsType, boolean isEfo, Long geneId) {
return getExperimentCountsForGene(efvOrEfo, statisticsType, isEfo, geneId, null, null);
}
/**
* @param efvOrEfo
* @param statisticsType
* @param isEfo
* @param geneId
* @return Experiment count for statisticsType, attributes and geneId
*/
public Integer getExperimentCountsForGene(
String efvOrEfo,
StatisticsType statisticsType,
boolean isEfo,
Long geneId,
Set<Long> geneRestrictionSet,
HashMap<String, Multiset<Integer>> scoresCacheForStatType) {
Attribute attr = new Attribute(efvOrEfo, isEfo, statisticsType);
if (!isEfo && statisticsStorage.getIndexForAttribute(attr) == null) {
// TODO NB. This is currently possible as sample properties are not currently stored in statisticsStorage
log.debug("Attribute " + attr + " was not found in Attribute Index");
// return experiment count == 0 if an efv attribute could not be found in AttributeIndex (note
// that efo attributes are not explicitly stored in AttributeIndex)
return 0;
}
if (geneRestrictionSet == null) { // By default restrict the experiment count query to geneId
geneRestrictionSet = new HashSet<Long>(Collections.singletonList(geneId));
}
StatisticsQueryCondition statsQuery = new StatisticsQueryCondition(geneRestrictionSet);
statsQuery.and(getStatisticsOrQuery(Collections.singletonList(attr)));
Multiset<Integer> scores = StatisticsQueryUtils.getExperimentCounts(statsQuery, statisticsStorage);
// Cache geneRestrictionSet's scores for efvOrEfo - this cache will be re-used in heatmaps for rows other than the first one
if (scoresCacheForStatType != null) {
scoresCacheForStatType.put(efvOrEfo, scores);
}
Integer geneIndex = statisticsStorage.getIndexForGeneId(geneId);
if (scores != null) {
long time = System.currentTimeMillis();
int expCountForGene = scores.count(geneIndex);
if (expCountForGene > 0) {
log.debug(statisticsType + " " + efvOrEfo + " expCountForGene: " + geneId + " (" + geneIndex + ") = " + expCountForGene + " got in: " + (System.currentTimeMillis() - time) + " ms");
}
return expCountForGene;
}
return 0;
}
/**
* @param orAttributes
* @return StatisticsQueryOrConditions, including children of all efo's in orAttributes
*/
public StatisticsQueryOrConditions<StatisticsQueryCondition> getStatisticsOrQuery(List<Attribute> orAttributes) {
List<Attribute> efoPlusChildren = includeEfoChildren(orAttributes);
return StatisticsQueryUtils.getStatisticsOrQuery(efoPlusChildren, statisticsStorage);
}
/**
* @param orAttributes
* @return List containing all (afv and efo) attributes in orAttributes, plus the children of all efo's in orAttributes
*/
private List<Attribute> includeEfoChildren(List<Attribute> orAttributes) {
Set<Attribute> attrsPlusChildren = new HashSet<Attribute>();
for (Attribute attr : orAttributes) {
if (attr.isEfo() == StatisticsQueryUtils.EFO_QUERY) {
Collection<String> efoPlusChildren = efo.getTermAndAllChildrenIds(attr.getEfv());
log.debug("Expanded efo: " + attr + " into: " + efoPlusChildren);
for (String efoTerm : efoPlusChildren) {
attrsPlusChildren.add(new Attribute(efoTerm, StatisticsQueryUtils.EFO_QUERY, attr.getStatType()));
}
} else {
attrsPlusChildren.add(attr);
}
}
return new ArrayList<Attribute>(attrsPlusChildren);
}
public Integer getIndexForGene(Long geneId) {
return statisticsStorage.getIndexForGeneId(geneId);
}
/**
* http://stackoverflow.com/questions/3029151/find-top-n-elements-in-a-multiset-from-google-collections
*
* @param multiset
* @param <T>
* @return
*/
private static <T> ImmutableList<Multiset.Entry<T>> sortedByCount(Multiset<T> multiset) {
Ordering<Multiset.Entry<T>> countComp = new Ordering<Multiset.Entry<T>>() {
public int compare(Multiset.Entry<T> e1, Multiset.Entry<T> e2) {
return e2.getCount() - e1.getCount();
}
};
return countComp.immutableSortedCopy(multiset.entrySet());
}
/**
* http://stackoverflow.com/questions/3029151/find-top-n-elements-in-a-multiset-from-google-collections
*
* @param multiset
* @param min
* @param max
* @param <T>
* @return
*/
private static <T> ImmutableList<Multiset.Entry<T>> getEntriesBetweenMinMaxFromListSortedByCount(Multiset<T> multiset,
int min, int max) {
ImmutableList<Multiset.Entry<T>> sortedByCount = sortedByCount(multiset);
if (sortedByCount.size() > max) {
sortedByCount = sortedByCount.subList(min, max);
}
return sortedByCount;
}
/**
* @param statsQuery
* @param minPos
* @param rows
* @param sortedGenesChunk - a chunk of the overall sorted (by experiment counts - in desc order) list of genes,
* starting from 'minPos' and containing maximums 'rows' genes
* @return The overall number of genes for which counts exist in statsQuery
*/
public Integer getSortedGenes(final StatisticsQueryCondition statsQuery, final int minPos, final int rows, List<Long> sortedGenesChunk) {
long timeStart = System.currentTimeMillis();
Multiset<Integer> countsForConditions = StatisticsQueryUtils.getExperimentCounts(statsQuery, statisticsStorage);
log.info("conditions bit index query for " + statsQuery.prettyPrint() + " (genes with counts present: " + countsForConditions.elementSet().size() + ") retrieved in : " + (System.currentTimeMillis() - timeStart) + " ms");
List<Multiset.Entry<Integer>> sortedCounts = getEntriesBetweenMinMaxFromListSortedByCount(countsForConditions, minPos, minPos + rows);
for (Multiset.Entry<Integer> entry : sortedCounts) {
Long geneId = statisticsStorage.getGeneIdForIndex(entry.getElement());
if (geneId != null) {
sortedGenesChunk.add(geneId);
} else {
log.error("Failed to retrieve gene id for index: " + entry.getElement());
}
}
return countsForConditions.elementSet().size();
}
}
|
Added getScoringAttributesForGenes()
git-svn-id: cf994790c380884b68018c4cd7b01b4c2e3bb7f4@16276 2913f559-6b04-0410-9a09-c530ee9f5186
|
atlas-web/src/main/java/ae3/service/AtlasStatisticsQueryService.java
|
Added getScoringAttributesForGenes()
|
|
Java
|
bsd-2-clause
|
3e5105c6e22c6feb4c1b1d5707a27eae2be5f5e6
| 0
|
malensek/sing
|
package io.sigpipe.sing.dataset;
import java.util.ArrayList;
import java.util.List;
import java.util.NavigableSet;
import java.util.TreeSet;
import io.sigpipe.sing.dataset.feature.Feature;
public class Quantizer {
private NavigableSet<Feature> ticks = new TreeSet<>();
public Quantizer(Feature... ticks) {
for (Feature tick : ticks) {
addTick(tick);
}
}
public Quantizer(Object start, Object end, Object step) {
this(
Feature.fromPrimitiveType(start),
Feature.fromPrimitiveType(end),
Feature.fromPrimitiveType(step));
}
public Quantizer(Feature start, Feature end, Feature step) {
if (start.sameType(end) == false || start.sameType(step) == false) {
throw new IllegalArgumentException(
"All feature types must be the same");
}
Feature tick = new Feature(start);
while (tick.less(end)) {
addTick(tick);
tick = tick.add(step);
}
}
private void addTick(Feature tick) {
ticks.add(tick);
}
public int numTicks() {
return ticks.size();
}
/**
* Quantizes a given Feature based on this Quantizer's tick mark
* configuration. When quantizing a Feature, a bucket will be retrieved that
* represents the Feature in question in the tick mark range. Note that the
* bucket returned is not necessarily closest in value to the Feature, but
* simply represents its range of values.
*
* @param feature The Feature to quantize
* @return A quantized representation of the Feature
*/
public Feature quantize(Feature feature) {
Feature result = ticks.floor(feature);
if (result == null) {
return ticks.first();
}
return result;
}
/**
* Retrieves the next tick mark value after the given Feature. In other
* words, this method will return the bucket after the given Feature's
* bucket. If there is no next tick mark (the specified Feature's bucket is
* at the end of the range) then this method returns null.
*
* @param feature Feature to use to locate the next tick mark bucket in the
* range.
* @return Next tick mark, or null if the end of the range has been reached.
*/
public Feature nextTick(Feature feature) {
return ticks.higher(feature);
}
/**
* Retrieves the tick mark value preceding the given Feature. In other
* words, this method will return the bucket before the given Feature's
* bucket. If there is no previous tick mark (the specified Feature's bucket
* is at the beginning of the range) then this method returns null.
*
* @param feature Feature to use to locate the previous tick mark bucket in
* the range.
* @return Next tick mark, or null if the end of the range has been reached.
*/
public Feature prevTick(Feature feature) {
return ticks.lower(feature);
}
@Override
public String toString() {
String output = "";
for (Feature f : ticks) {
output += f.getString() + System.lineSeparator();
}
return output;
}
/**
* Builder that allows incremental creation of a {@link Quantizer}.
*/
public static class QuantizerBuilder {
List<Feature> ticks = new ArrayList<>();
public void addTick(Feature tick) {
this.ticks.add(tick);
}
public void addTicks(Feature... ticks) {
for (Feature tick : ticks) {
addTick(tick);
}
}
public void removeTick(Feature tick) {
this.ticks.remove(tick);
}
public List<Feature> getTicks() {
return new ArrayList<Feature>(ticks);
}
public Quantizer build() {
Quantizer q = new Quantizer();
for (Feature tick : ticks) {
q.addTick(tick);
}
return q;
}
}
}
|
src/main/java/io/sigpipe/sing/dataset/Quantizer.java
|
package io.sigpipe.sing.dataset;
import java.util.NavigableSet;
import java.util.TreeSet;
import io.sigpipe.sing.dataset.feature.Feature;
public class Quantizer {
private NavigableSet<Feature> ticks = new TreeSet<>();
public Quantizer(Feature... ticks) {
for (Feature tick : ticks) {
addTick(tick);
}
}
public Quantizer(Object start, Object end, Object step) {
this(
Feature.fromPrimitiveType(start),
Feature.fromPrimitiveType(end),
Feature.fromPrimitiveType(step));
}
public Quantizer(Feature start, Feature end, Feature step) {
if (start.sameType(end) == false || start.sameType(step) == false) {
throw new IllegalArgumentException(
"All feature types must be the same");
}
Feature tick = new Feature(start);
while (tick.less(end)) {
addTick(tick);
tick = tick.add(step);
}
}
private void addTick(Feature tick) {
ticks.add(tick);
}
public int numTicks() {
return ticks.size();
}
/**
* Quantizes a given Feature based on this Quantizer's tick mark
* configuration. When quantizing a Feature, a bucket will be retrieved that
* represents the Feature in question in the tick mark range. Note that the
* bucket returned is not necessarily closest in value to the Feature, but
* simply represents its range of values.
*
* @param feature The Feature to quantize
* @return A quantized representation of the Feature
*/
public Feature quantize(Feature feature) {
Feature result = ticks.floor(feature);
if (result == null) {
return ticks.first();
}
return result;
}
/**
* Retrieves the next tick mark value after the given Feature. In other
* words, this method will return the bucket after the given Feature's
* bucket. If there is no next tick mark (the specified Feature's bucket is
* at the end of the range) then this method returns null.
*
* @param feature Feature to use to locate the next tick mark bucket in the
* range.
* @return Next tick mark, or null if the end of the range has been reached.
*/
public Feature nextTick(Feature feature) {
return ticks.higher(feature);
}
/**
* Retrieves the tick mark value preceding the given Feature. In other
* words, this method will return the bucket before the given Feature's
* bucket. If there is no previous tick mark (the specified Feature's bucket
* is at the beginning of the range) then this method returns null.
*
* @param feature Feature to use to locate the previous tick mark bucket in
* the range.
* @return Next tick mark, or null if the end of the range has been reached.
*/
public Feature prevTick(Feature feature) {
return ticks.lower(feature);
}
@Override
public String toString() {
String output = "";
for (Feature f : ticks) {
output += f.getString() + System.lineSeparator();
}
return output;
}
/**
* Builder that allows incremental creation of a {@link Quantizer}.
*/
public static class QuantizerBuilder {
List<Feature> ticks = new ArrayList<>();
public void addTick(Feature tick) {
this.ticks.add(tick);
}
public void addTicks(Feature... ticks) {
for (Feature tick : ticks) {
addTick(tick);
}
}
public void removeTick(Feature tick) {
this.ticks.remove(tick);
}
public List<Feature> getTicks() {
return new ArrayList<Feature>(ticks);
}
public Quantizer build() {
Quantizer q = new Quantizer();
for (Feature tick : ticks) {
q.addTick(tick);
}
return q;
}
}
}
|
Update imports
|
src/main/java/io/sigpipe/sing/dataset/Quantizer.java
|
Update imports
|
|
Java
|
bsd-3-clause
|
cab619fd7790f65e92cd422aef30ac6353022ec4
| 0
|
scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown,scristopher/paintown
|
package com.rafkind.paintown.level;
import java.awt.*;
import java.awt.image.*;
import java.io.*;
import javax.imageio.*;
import java.util.Iterator;
import java.util.List;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import com.rafkind.paintown.exception.LoadException;
import com.rafkind.paintown.TokenReader;
import com.rafkind.paintown.Token;
import com.rafkind.paintown.MaskedImage;
import com.rafkind.paintown.Editor;
public class Level{
private String name;
private Image background;
private String backgroundFile;
private int width;
private List frontPanels;
private HashMap backPanels;
private List panelOrder;
private int minZ;
private int maxZ;
private double backgroundParallax;
private double foregroundParallax;
private double scale;
private File path;
private String atmosphere;
private String description;
private Token trigger;
private List blocks;
private static int nextId = 0;
private static int nextBlockId = 0;
public static int nextId(){
nextId += 1;
return nextId;
}
public static void checkId(int i){
if (i > nextId){
nextId = i;
}
}
public static void resetId(){
nextId = 0;
}
public static int nextBlockId(){
nextBlockId += 1;
return nextBlockId;
}
public static void checkBlockId(int i){
if (i > nextBlockId){
nextBlockId = i;
}
}
public static void resetBlockId(){
nextBlockId = 0;
}
private class Panel{
public String name;
public Image image;
public Panel( String name, Image i ){
this.name = name;
this.image = i;
}
}
public Level(){
initAll();
this.path = null;
}
public File getPath(){
return path;
}
public void setPath( File path ){
this.path = path;
}
public Level( File path ) throws LoadException {
this.path = path;
load( path );
}
public void setAtmosphere( String s ){
this.atmosphere = s;
}
public String getAtmosphere(){
return this.atmosphere;
}
public void setBackgroundParallax( double d ){
this.backgroundParallax = d;
}
public double getBackgroundParallax(){
return this.backgroundParallax;
}
public void setForegroundParallax( double d ){
this.foregroundParallax = d;
}
public double getForegroundParallax(){
return this.foregroundParallax;
}
public String getDescription(){
return this.description;
}
public void setDescription(String description){
this.description = description;
}
public List getBlocks(){
return blocks;
}
public int getMinZ(){
return minZ;
}
public int getMaxZ(){
return maxZ;
}
public void setMinZ( int z ){
minZ = z;
}
public void setMaxZ( int z ){
maxZ = z;
}
public void addFrontPanel( String s ) throws LoadException {
frontPanels.add( new Panel( s, loadImage( s ) ) );
}
public List getBackPanelOrder(){
return new ArrayList( panelOrder );
}
public void addBackPanelOrder( String path ){
for ( Iterator it = this.backPanels.entrySet().iterator(); it.hasNext(); ){
Map.Entry entry = (Map.Entry) it.next();
Integer key = (Integer) entry.getKey();
Panel panel = (Panel) entry.getValue();
if ( panel.name.equals( path ) ){
panelOrder.add( key );
return;
}
}
}
public void removeLastOrder(){
if ( ! panelOrder.isEmpty() ){
panelOrder.remove( panelOrder.size() - 1 );
}
}
private Integer findFreeKey( HashMap map ){
int i = 0;
for ( Iterator it = map.keySet().iterator(); it.hasNext(); ){
Integer x = (Integer) it.next();
if ( i <= x.intValue() ){
i = x.intValue() + 1;
}
}
return new Integer( i );
}
public void addBackPanel( String path ) throws LoadException {
this.backPanels.put( findFreeKey( backPanels ), new Panel( path, loadImage( path ) ) );
}
public String getBackPanelName( int i ){
Panel panel = (Panel) backPanels.get( new Integer( i ) );
return panel.name;
}
public void removeBackPanel( String path ){
for ( Iterator it = this.backPanels.entrySet().iterator(); it.hasNext(); ){
Map.Entry entry = (Map.Entry) it.next();
Integer key = (Integer) entry.getKey();
Panel panel = (Panel) entry.getValue();
if ( panel.name.equals( path ) ){
this.backPanels.remove( key );
for ( Iterator orders = this.panelOrder.iterator(); orders.hasNext(); ){
Integer o = (Integer) orders.next();
if ( o.equals( key ) ){
orders.remove();
}
}
return;
}
}
}
public List getFrontPanelNames(){
List names = new ArrayList();
for ( Iterator it = frontPanels.iterator(); it.hasNext(); ){
Panel panel = (Panel) it.next();
names.add( panel.name );
}
return names;
}
public List getBackPanelNames(){
List names = new ArrayList();
for ( Iterator it = backPanels.values().iterator(); it.hasNext(); ){
Panel panel = (Panel) it.next();
names.add( panel.name );
}
return names;
}
public void removeFrontPanel( String s ){
for ( Iterator it = frontPanels.iterator(); it.hasNext(); ){
Panel panel = (Panel) it.next();
if ( panel.name.equals( s ) ){
frontPanels.remove( panel );
break;
}
}
}
private void drawFrontPanels( Graphics2D g ){
int w = 0;
if ( ! frontPanels.isEmpty() ){
while ( w < getWidth() / getScale() ){
int ow = w;
for ( Iterator it = frontPanels.iterator(); it.hasNext(); ){
Panel p = (Panel) it.next();
Image panel = (Image) p.image;
g.drawImage( panel, w, 0, null );
w += panel.getWidth( null );
}
/* make sure not to get into an infinite loop due to lack
* of width on the images
*/
if ( w == ow ){
w += 1;
}
}
}
}
private void drawBackground( Graphics2D g ){
if ( background != null ){
int w = 0;
while ( w < getWidth() / getScale() ){
g.drawImage( background, w, 0, null );
w += background.getWidth( null );
}
}
}
private void drawBackPanels( Graphics2D g ){
int w = 0;
for ( Iterator it = this.panelOrder.iterator(); it.hasNext(); ){
Integer i = (Integer) it.next();
Image image = ((Panel) this.backPanels.get( i )).image;
g.drawImage( image, w, 0, null );
w += image.getWidth( null );
}
}
private void drawBlocks( Graphics2D g, int height ){
int w = 0;
int num = 1;
for ( Iterator it = this.blocks.iterator(); it.hasNext(); ){
Block b = (Block) it.next();
if ( b.isEnabled() ){
b.render( g, w, height, minZ, maxZ, num );
g.drawString( "Block " + num, w + 5, 15 );
w += b.getLength();
}
num += 1;
}
}
public void render( Graphics2D g, int x, int y, int width, int height ){
// g.clearRect( 0, 0, (int) getWidth(), (int) getHeight() );
g.scale( getScale(), getScale() );
drawBackground( g );
drawBackPanels( g );
g.setColor( new Color( 255, 0, 0 ) );
g.drawLine( 0, getMinZ(), (int)(getWidth() / getScale()), getMinZ() );
g.drawLine( 0, getMaxZ(), (int)(getWidth() / getScale()), getMaxZ() );
drawBlocks( g, height );
drawFrontPanels( g );
}
public Block findBlock( Thing t ){
for ( Iterator it = blocks.iterator(); it.hasNext(); ){
Block b = (Block) it.next();
if ( b.hasThing( t ) ){
return b;
}
}
return null;
}
/* find the block that contains point x */
public Block findBlock( int x ){
int total = 0;
for ( Iterator it = getBlocks().iterator(); it.hasNext(); ){
Block b = (Block) it.next();
if ( b.isEnabled() ){
if ( x >= total && x <= total + b.getLength() ){
return b;
}
total += b.getLength();
}
}
return null;
}
public Thing findThing( int x, int y ){
for ( Iterator it = blocks.iterator(); it.hasNext(); ){
Block b = (Block) it.next();
if ( b.isEnabled() ){
Thing t = b.findThing( x, y - getMinZ() );
if ( t != null ){
return t;
}
x -= b.getLength();
}
}
return null;
}
public void moveThing( Thing thing, int x, int y ){
for ( Iterator it = blocks.iterator(); it.hasNext(); ){
Block b = (Block) it.next();
if ( b.isEnabled() ){
if ( b.hasThing( thing ) ){
int my = y - getMinZ();
if ( my > getMaxZ() - getMinZ() ){
my = getMaxZ() - getMinZ();
}
if ( my < 0 ){
my = 0;
}
// System.out.println( b + ". Move " + thing + " to " + x + " " + my );
thing.setX( x );
thing.setY( my );
} else {
// System.out.println( b + " does not have " + thing );
}
// x -= b.getLength();
}
}
}
public void initAll(){
this.name = null;
this.background = null;
this.backgroundFile = null;
this.minZ = 100;
this.maxZ = 200;
this.scale = 2;
this.backgroundParallax = 5;
this.foregroundParallax = 1.2;
this.atmosphere = null;
this.width = 640;
this.frontPanels = new ArrayList();
this.backPanels = new HashMap();
this.panelOrder = new ArrayList();
this.blocks = new ArrayList();
resetId();
resetBlockId();
}
private void load( File f ) throws LoadException {
initAll();
TokenReader reader = new TokenReader( f );
Token head = reader.nextToken();
if ( ! head.getName().equals( "level" ) ){
throw new LoadException( "Starting token is not 'level'" );
}
Token z = head.findToken( "z" );
if ( z != null ){
Token min = z.findToken( "minimum" );
if ( min != null ){
minZ = min.readInt( 0 );
}
Token max = z.findToken( "maximum" );
if ( max != null ){
maxZ = max.readInt( 0 );
}
}
Token desc = head.findToken("description");
if (desc != null){
setDescription(desc.readString(0));
}
trigger = head.findToken("trigger");
Token atm = head.findToken( "atmosphere" );
if ( atm != null ){
setAtmosphere( atm.readString( 0 ) );
}
Token parallax = head.findToken( "background-parallax" );
if ( parallax != null ){
setBackgroundParallax( parallax.readDouble( 0 ) );
}
parallax = head.findToken( "foreground-parallax" );
if ( parallax != null ){
setForegroundParallax( parallax.readDouble( 0 ) );
}
loadBackground( head.findToken( "background" ) );
// setWidth( calculateWidth( head.findTokens( "block/length" ) ) );
for ( Iterator it = head.findTokens( "frontpanel" ).iterator(); it.hasNext(); ){
Token t = (Token) it.next();
String file = t.readString( 0 );
frontPanels.add( new Panel( file, loadImage( file ) ) );
}
for ( Iterator it = head.findTokens( "panel" ).iterator(); it.hasNext(); ){
Token t = (Token) it.next();
int index = t.readInt( 0 );
String file = t.readString( 1 );
this.backPanels.put( new Integer( index ), new Panel( file, loadImage( file ) ) );
}
Token order = head.findToken( "order" );
if ( order != null ){
for ( Iterator it = order.iterator(); it.hasNext(); ){
panelOrder.add( Integer.valueOf( it.next().toString() ) );
}
} else {
System.out.println( "No 'order' token given" );
}
for ( Iterator it = head.findTokens( "block" ).iterator(); it.hasNext(); ){
Token t = (Token) it.next();
this.blocks.add( new Block( t ) );
}
System.out.println( "Loaded " + f );
}
private Image loadImage( String s ) throws LoadException {
try{
return MaskedImage.load( Editor.dataPath( s ) );
} catch ( IOException ie ){
throw new LoadException( "Could not load " + s );
}
}
private void loadBackground( Token t ) throws LoadException {
if ( t != null ){
String s = String.valueOf( t.iterator().next() );
setBackground( loadImage( s ) );
backgroundFile = s;
}
}
public void loadBackground( String s ){
try{
setBackground( loadImage( s ) );
backgroundFile = s;
} catch ( LoadException e ){
e.printStackTrace();
}
}
public String getBackgroundFile(){
return backgroundFile;
}
private void setWidth( int w ){
width = w;
}
private int calculateWidth( List lengths ){
int w = 0;
for ( Iterator it = lengths.iterator(); it.hasNext(); ){
Token t = (Token) it.next();
w += t.readInt( 0 ) * getScale();
}
return w;
}
private void setBackground( Image i ){
background = i;
}
public Dimension getSize(){
return new Dimension( (int) getWidth(), (int) getHeight() );
}
public double getWidth(){
double w = 0;
for ( Iterator it = blocks.iterator(); it.hasNext(); ){
Block b = (Block) it.next();
if ( b.isEnabled() ){
w += b.getLength() * getScale();
}
}
return w;
}
public Token toToken(){
Token level = new Token();
level.addToken( new Token( level, "level" ) );
Token z = new Token( level );
level.addToken( z );
z.addToken( new Token( z, "z" ) );
z.addToken( new String[]{ "minimum", String.valueOf( getMinZ() ) } );
z.addToken( new String[]{ "maximum", String.valueOf( getMaxZ() ) } );
if ( getAtmosphere() != null ){
level.addToken( new String[]{ "atmosphere", getAtmosphere() } );
}
level.addToken( new String[]{ "background-parallax", String.valueOf( getBackgroundParallax() ) } );
level.addToken( new String[]{ "foreground-parallax", String.valueOf( getForegroundParallax() ) } );
if ( backgroundFile != null ){
level.addToken( new String[]{ "background", "\"" + backgroundFile.replaceAll( "\\\\", "/" ) + "\"" } );
}
for ( Iterator it = frontPanels.iterator(); it.hasNext(); ){
/*
Token f = new Token( level );
level.addToken( f );
Panel p = (Panel) it.next();
f.addToken( new Token( f, "frontpanel" ) );
f.addToken( new Token( f, p.name ) );
*/
Panel p = (Panel) it.next();
level.addToken( new String[]{ "frontpanel", "\"" + p.name.replaceAll( "\\\\", "/" ) + "\"" } );
}
for ( Iterator it = backPanels.entrySet().iterator(); it.hasNext(); ){
Map.Entry entry = (Map.Entry) it.next();
Token f = new Token( level );
level.addToken( f );
f.addToken( new Token( f, "panel" ) );
Panel p = (Panel) entry.getValue();
f.addToken( new Token( f, entry.getKey().toString() ) );
f.addToken( new Token( f, "\"" + p.name.replaceAll( "\\\\", "/" ) + "\"" ) );
f.addToken( new Token( f, "junk" ) );
f.addToken( new Token( f, "junk" ) );
}
if (getDescription() != null){
level.addToken(new String[]{ "description", "\"" + getDescription() + "\""});
}
if (trigger != null){
level.addToken(trigger);
}
Token order = new Token( level );
level.addToken( order );
order.addToken( new Token( order, "order" ) );
for ( Iterator it = panelOrder.iterator(); it.hasNext(); ){
order.addToken( new Token( order, it.next().toString() ) );
}
for ( Iterator it = blocks.iterator(); it.hasNext(); ){
Block b = (Block) it.next();
level.addToken( b.toToken() );
}
return level;
}
public double getScale(){
return scale;
}
public void setScale( double s ){
scale = s;
}
public double getHeight(){
return 240 * getScale();
}
}
|
editor/src/com/rafkind/paintown/level/Level.java
|
package com.rafkind.paintown.level;
import java.awt.*;
import java.awt.image.*;
import java.io.*;
import javax.imageio.*;
import java.util.Iterator;
import java.util.List;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import com.rafkind.paintown.exception.LoadException;
import com.rafkind.paintown.TokenReader;
import com.rafkind.paintown.Token;
import com.rafkind.paintown.MaskedImage;
import com.rafkind.paintown.Editor;
public class Level{
private String name;
private Image background;
private String backgroundFile;
private int width;
private List frontPanels;
private HashMap backPanels;
private List panelOrder;
private int minZ;
private int maxZ;
private double backgroundParallax;
private double foregroundParallax;
private double scale;
private File path;
private String atmosphere;
private String description;
private List blocks;
private static int nextId = 0;
private static int nextBlockId = 0;
public static int nextId(){
nextId += 1;
return nextId;
}
public static void checkId(int i){
if (i > nextId){
nextId = i;
}
}
public static void resetId(){
nextId = 0;
}
public static int nextBlockId(){
nextBlockId += 1;
return nextBlockId;
}
public static void checkBlockId(int i){
if (i > nextBlockId){
nextBlockId = i;
}
}
public static void resetBlockId(){
nextBlockId = 0;
}
private class Panel{
public String name;
public Image image;
public Panel( String name, Image i ){
this.name = name;
this.image = i;
}
}
public Level(){
initAll();
this.path = null;
}
public File getPath(){
return path;
}
public void setPath( File path ){
this.path = path;
}
public Level( File path ) throws LoadException {
this.path = path;
load( path );
}
public void setAtmosphere( String s ){
this.atmosphere = s;
}
public String getAtmosphere(){
return this.atmosphere;
}
public void setBackgroundParallax( double d ){
this.backgroundParallax = d;
}
public double getBackgroundParallax(){
return this.backgroundParallax;
}
public void setForegroundParallax( double d ){
this.foregroundParallax = d;
}
public double getForegroundParallax(){
return this.foregroundParallax;
}
public String getDescription(){
return this.description;
}
public void setDescription(String description){
this.description = description;
}
public List getBlocks(){
return blocks;
}
public int getMinZ(){
return minZ;
}
public int getMaxZ(){
return maxZ;
}
public void setMinZ( int z ){
minZ = z;
}
public void setMaxZ( int z ){
maxZ = z;
}
public void addFrontPanel( String s ) throws LoadException {
frontPanels.add( new Panel( s, loadImage( s ) ) );
}
public List getBackPanelOrder(){
return new ArrayList( panelOrder );
}
public void addBackPanelOrder( String path ){
for ( Iterator it = this.backPanels.entrySet().iterator(); it.hasNext(); ){
Map.Entry entry = (Map.Entry) it.next();
Integer key = (Integer) entry.getKey();
Panel panel = (Panel) entry.getValue();
if ( panel.name.equals( path ) ){
panelOrder.add( key );
return;
}
}
}
public void removeLastOrder(){
if ( ! panelOrder.isEmpty() ){
panelOrder.remove( panelOrder.size() - 1 );
}
}
private Integer findFreeKey( HashMap map ){
int i = 0;
for ( Iterator it = map.keySet().iterator(); it.hasNext(); ){
Integer x = (Integer) it.next();
if ( i <= x.intValue() ){
i = x.intValue() + 1;
}
}
return new Integer( i );
}
public void addBackPanel( String path ) throws LoadException {
this.backPanels.put( findFreeKey( backPanels ), new Panel( path, loadImage( path ) ) );
}
public String getBackPanelName( int i ){
Panel panel = (Panel) backPanels.get( new Integer( i ) );
return panel.name;
}
public void removeBackPanel( String path ){
for ( Iterator it = this.backPanels.entrySet().iterator(); it.hasNext(); ){
Map.Entry entry = (Map.Entry) it.next();
Integer key = (Integer) entry.getKey();
Panel panel = (Panel) entry.getValue();
if ( panel.name.equals( path ) ){
this.backPanels.remove( key );
for ( Iterator orders = this.panelOrder.iterator(); orders.hasNext(); ){
Integer o = (Integer) orders.next();
if ( o.equals( key ) ){
orders.remove();
}
}
return;
}
}
}
public List getFrontPanelNames(){
List names = new ArrayList();
for ( Iterator it = frontPanels.iterator(); it.hasNext(); ){
Panel panel = (Panel) it.next();
names.add( panel.name );
}
return names;
}
public List getBackPanelNames(){
List names = new ArrayList();
for ( Iterator it = backPanels.values().iterator(); it.hasNext(); ){
Panel panel = (Panel) it.next();
names.add( panel.name );
}
return names;
}
public void removeFrontPanel( String s ){
for ( Iterator it = frontPanels.iterator(); it.hasNext(); ){
Panel panel = (Panel) it.next();
if ( panel.name.equals( s ) ){
frontPanels.remove( panel );
break;
}
}
}
private void drawFrontPanels( Graphics2D g ){
int w = 0;
if ( ! frontPanels.isEmpty() ){
while ( w < getWidth() / getScale() ){
int ow = w;
for ( Iterator it = frontPanels.iterator(); it.hasNext(); ){
Panel p = (Panel) it.next();
Image panel = (Image) p.image;
g.drawImage( panel, w, 0, null );
w += panel.getWidth( null );
}
/* make sure not to get into an infinite loop due to lack
* of width on the images
*/
if ( w == ow ){
w += 1;
}
}
}
}
private void drawBackground( Graphics2D g ){
if ( background != null ){
int w = 0;
while ( w < getWidth() / getScale() ){
g.drawImage( background, w, 0, null );
w += background.getWidth( null );
}
}
}
private void drawBackPanels( Graphics2D g ){
int w = 0;
for ( Iterator it = this.panelOrder.iterator(); it.hasNext(); ){
Integer i = (Integer) it.next();
Image image = ((Panel) this.backPanels.get( i )).image;
g.drawImage( image, w, 0, null );
w += image.getWidth( null );
}
}
private void drawBlocks( Graphics2D g, int height ){
int w = 0;
int num = 1;
for ( Iterator it = this.blocks.iterator(); it.hasNext(); ){
Block b = (Block) it.next();
if ( b.isEnabled() ){
b.render( g, w, height, minZ, maxZ, num );
g.drawString( "Block " + num, w + 5, 15 );
w += b.getLength();
}
num += 1;
}
}
public void render( Graphics2D g, int x, int y, int width, int height ){
// g.clearRect( 0, 0, (int) getWidth(), (int) getHeight() );
g.scale( getScale(), getScale() );
drawBackground( g );
drawBackPanels( g );
g.setColor( new Color( 255, 0, 0 ) );
g.drawLine( 0, getMinZ(), (int)(getWidth() / getScale()), getMinZ() );
g.drawLine( 0, getMaxZ(), (int)(getWidth() / getScale()), getMaxZ() );
drawBlocks( g, height );
drawFrontPanels( g );
}
public Block findBlock( Thing t ){
for ( Iterator it = blocks.iterator(); it.hasNext(); ){
Block b = (Block) it.next();
if ( b.hasThing( t ) ){
return b;
}
}
return null;
}
/* find the block that contains point x */
public Block findBlock( int x ){
int total = 0;
for ( Iterator it = getBlocks().iterator(); it.hasNext(); ){
Block b = (Block) it.next();
if ( b.isEnabled() ){
if ( x >= total && x <= total + b.getLength() ){
return b;
}
total += b.getLength();
}
}
return null;
}
public Thing findThing( int x, int y ){
for ( Iterator it = blocks.iterator(); it.hasNext(); ){
Block b = (Block) it.next();
if ( b.isEnabled() ){
Thing t = b.findThing( x, y - getMinZ() );
if ( t != null ){
return t;
}
x -= b.getLength();
}
}
return null;
}
public void moveThing( Thing thing, int x, int y ){
for ( Iterator it = blocks.iterator(); it.hasNext(); ){
Block b = (Block) it.next();
if ( b.isEnabled() ){
if ( b.hasThing( thing ) ){
int my = y - getMinZ();
if ( my > getMaxZ() - getMinZ() ){
my = getMaxZ() - getMinZ();
}
if ( my < 0 ){
my = 0;
}
// System.out.println( b + ". Move " + thing + " to " + x + " " + my );
thing.setX( x );
thing.setY( my );
} else {
// System.out.println( b + " does not have " + thing );
}
// x -= b.getLength();
}
}
}
public void initAll(){
this.name = null;
this.background = null;
this.backgroundFile = null;
this.minZ = 100;
this.maxZ = 200;
this.scale = 2;
this.backgroundParallax = 5;
this.foregroundParallax = 1.2;
this.atmosphere = null;
this.width = 640;
this.frontPanels = new ArrayList();
this.backPanels = new HashMap();
this.panelOrder = new ArrayList();
this.blocks = new ArrayList();
resetId();
resetBlockId();
}
private void load( File f ) throws LoadException {
initAll();
TokenReader reader = new TokenReader( f );
Token head = reader.nextToken();
if ( ! head.getName().equals( "level" ) ){
throw new LoadException( "Starting token is not 'level'" );
}
Token z = head.findToken( "z" );
if ( z != null ){
Token min = z.findToken( "minimum" );
if ( min != null ){
minZ = min.readInt( 0 );
}
Token max = z.findToken( "maximum" );
if ( max != null ){
maxZ = max.readInt( 0 );
}
}
Token desc = head.findToken("description");
if (desc != null){
setDescription(desc.readString(0));
}
Token atm = head.findToken( "atmosphere" );
if ( atm != null ){
setAtmosphere( atm.readString( 0 ) );
}
Token parallax = head.findToken( "background-parallax" );
if ( parallax != null ){
setBackgroundParallax( parallax.readDouble( 0 ) );
}
parallax = head.findToken( "foreground-parallax" );
if ( parallax != null ){
setForegroundParallax( parallax.readDouble( 0 ) );
}
loadBackground( head.findToken( "background" ) );
// setWidth( calculateWidth( head.findTokens( "block/length" ) ) );
for ( Iterator it = head.findTokens( "frontpanel" ).iterator(); it.hasNext(); ){
Token t = (Token) it.next();
String file = t.readString( 0 );
frontPanels.add( new Panel( file, loadImage( file ) ) );
}
for ( Iterator it = head.findTokens( "panel" ).iterator(); it.hasNext(); ){
Token t = (Token) it.next();
int index = t.readInt( 0 );
String file = t.readString( 1 );
this.backPanels.put( new Integer( index ), new Panel( file, loadImage( file ) ) );
}
Token order = head.findToken( "order" );
if ( order != null ){
for ( Iterator it = order.iterator(); it.hasNext(); ){
panelOrder.add( Integer.valueOf( it.next().toString() ) );
}
} else {
System.out.println( "No 'order' token given" );
}
for ( Iterator it = head.findTokens( "block" ).iterator(); it.hasNext(); ){
Token t = (Token) it.next();
this.blocks.add( new Block( t ) );
}
System.out.println( "Loaded " + f );
}
private Image loadImage( String s ) throws LoadException {
try{
return MaskedImage.load( Editor.dataPath( s ) );
} catch ( IOException ie ){
throw new LoadException( "Could not load " + s );
}
}
private void loadBackground( Token t ) throws LoadException {
if ( t != null ){
String s = String.valueOf( t.iterator().next() );
setBackground( loadImage( s ) );
backgroundFile = s;
}
}
public void loadBackground( String s ){
try{
setBackground( loadImage( s ) );
backgroundFile = s;
} catch ( LoadException e ){
e.printStackTrace();
}
}
public String getBackgroundFile(){
return backgroundFile;
}
private void setWidth( int w ){
width = w;
}
private int calculateWidth( List lengths ){
int w = 0;
for ( Iterator it = lengths.iterator(); it.hasNext(); ){
Token t = (Token) it.next();
w += t.readInt( 0 ) * getScale();
}
return w;
}
private void setBackground( Image i ){
background = i;
}
public Dimension getSize(){
return new Dimension( (int) getWidth(), (int) getHeight() );
}
public double getWidth(){
double w = 0;
for ( Iterator it = blocks.iterator(); it.hasNext(); ){
Block b = (Block) it.next();
if ( b.isEnabled() ){
w += b.getLength() * getScale();
}
}
return w;
}
public Token toToken(){
Token level = new Token();
level.addToken( new Token( level, "level" ) );
Token z = new Token( level );
level.addToken( z );
z.addToken( new Token( z, "z" ) );
z.addToken( new String[]{ "minimum", String.valueOf( getMinZ() ) } );
z.addToken( new String[]{ "maximum", String.valueOf( getMaxZ() ) } );
if ( getAtmosphere() != null ){
level.addToken( new String[]{ "atmosphere", getAtmosphere() } );
}
level.addToken( new String[]{ "background-parallax", String.valueOf( getBackgroundParallax() ) } );
level.addToken( new String[]{ "foreground-parallax", String.valueOf( getForegroundParallax() ) } );
if ( backgroundFile != null ){
level.addToken( new String[]{ "background", "\"" + backgroundFile.replaceAll( "\\\\", "/" ) + "\"" } );
}
for ( Iterator it = frontPanels.iterator(); it.hasNext(); ){
/*
Token f = new Token( level );
level.addToken( f );
Panel p = (Panel) it.next();
f.addToken( new Token( f, "frontpanel" ) );
f.addToken( new Token( f, p.name ) );
*/
Panel p = (Panel) it.next();
level.addToken( new String[]{ "frontpanel", "\"" + p.name.replaceAll( "\\\\", "/" ) + "\"" } );
}
for ( Iterator it = backPanels.entrySet().iterator(); it.hasNext(); ){
Map.Entry entry = (Map.Entry) it.next();
Token f = new Token( level );
level.addToken( f );
f.addToken( new Token( f, "panel" ) );
Panel p = (Panel) entry.getValue();
f.addToken( new Token( f, entry.getKey().toString() ) );
f.addToken( new Token( f, "\"" + p.name.replaceAll( "\\\\", "/" ) + "\"" ) );
f.addToken( new Token( f, "junk" ) );
f.addToken( new Token( f, "junk" ) );
}
if (getDescription() != null){
level.addToken(new String[]{ "description", "\"" + getDescription() + "\""});
}
Token order = new Token( level );
level.addToken( order );
order.addToken( new Token( order, "order" ) );
for ( Iterator it = panelOrder.iterator(); it.hasNext(); ){
order.addToken( new Token( order, it.next().toString() ) );
}
for ( Iterator it = blocks.iterator(); it.hasNext(); ){
Block b = (Block) it.next();
level.addToken( b.toToken() );
}
return level;
}
public double getScale(){
return scale;
}
public void setScale( double s ){
scale = s;
}
public double getHeight(){
return 240 * getScale();
}
}
|
parse triggers
|
editor/src/com/rafkind/paintown/level/Level.java
|
parse triggers
|
|
Java
|
bsd-3-clause
|
cf0f830aa1b783a3a449d6d27965db62e98b20cc
| 0
|
ontodev/robot,ontodev/robot,ontodev/robot
|
package org.obolibrary.robot;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.AddOntologyAnnotation;
import org.semanticweb.owlapi.model.OWLAnnotation;
import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom;
import org.semanticweb.owlapi.model.OWLAnnotationProperty;
import org.semanticweb.owlapi.model.OWLAnnotationSubject;
import org.semanticweb.owlapi.model.OWLAnnotationValue;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLDatatype;
import org.semanticweb.owlapi.model.OWLEntity;
import org.semanticweb.owlapi.model.OWLLiteral;
import org.semanticweb.owlapi.model.OWLNamedObject;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.model.OWLOntologyID;
import org.semanticweb.owlapi.model.OWLSubClassOfAxiom;
import org.semanticweb.owlapi.model.RemoveOntologyAnnotation;
import org.semanticweb.owlapi.model.SetOntologyID;
import org.semanticweb.owlapi.util.ReferencedEntitySetProvider;
/**
* Provides convenience methods for working with OWL ontologies.
*
* @author <a href="mailto:james@overton.ca">James A. Overton</a>
*/
public class OntologyHelper {
/**
* Logger.
*/
private static final Logger logger =
LoggerFactory.getLogger(OntologyHelper.class);
/**
* Set the ontology IRI and version IRI using strings.
*
* @param ontology the ontology to change
* @param ontologyIRIString the ontology IRI string, or null for no change
* @param versionIRIString the version IRI string, or null for no change
*/
public static void setOntologyIRI(OWLOntology ontology,
String ontologyIRIString, String versionIRIString) {
IRI ontologyIRI = null;
if (ontologyIRIString != null) {
ontologyIRI = IRI.create(ontologyIRIString);
}
IRI versionIRI = null;
if (versionIRIString != null) {
versionIRI = IRI.create(versionIRIString);
}
setOntologyIRI(ontology, ontologyIRI, versionIRI);
}
/**
* Set the ontology IRI and version IRI.
*
* @param ontology the ontology to change
* @param ontologyIRI the new ontology IRI, or null for no change
* @param versionIRI the new version IRI, or null for no change
*/
public static void setOntologyIRI(OWLOntology ontology, IRI ontologyIRI,
IRI versionIRI) {
OWLOntologyID currentID = ontology.getOntologyID();
if (ontologyIRI == null && versionIRI == null) {
// don't change anything
return;
} else if (ontologyIRI == null) {
ontologyIRI = currentID.getOntologyIRI().orNull();
} else if (versionIRI == null) {
versionIRI = currentID.getVersionIRI().orNull();
}
OWLOntologyID newID;
if (versionIRI == null) {
newID = new OWLOntologyID(ontologyIRI);
} else {
newID = new OWLOntologyID(ontologyIRI, versionIRI);
}
SetOntologyID setID = new SetOntologyID(ontology, newID);
ontology.getOWLOntologyManager().applyChange(setID);
}
/**
* Given an OWLAnnotationValue, return its value as a string.
*
* @param value the OWLAnnotationValue to get the string value of
* @return the string value
*/
public static String getValue(OWLAnnotationValue value) {
String result = null;
if (value instanceof OWLLiteral) {
result = ((OWLLiteral) value).getLiteral();
}
return result;
}
/**
* Given an OWLAnnotation, return its value as a string.
*
* @param annotation the OWLAnnotation to get the string value of
* @return the string value
*/
public static String getValue(OWLAnnotation annotation) {
return getValue(annotation.getValue());
}
/**
* Given an OWLAnnotationAssertionAxiom, return its value as a string.
*
* @param axiom the OWLAnnotationAssertionAxiom to get the string value of
* @return the string value
*/
public static String getValue(OWLAnnotationAssertionAxiom axiom) {
return getValue(axiom.getValue());
}
/**
* Given a set of OWLAnnotations, return the first string value
* as determined by natural string sorting.
*
* @param annotations a set of OWLAnnotations to get the value of
* @return the first string value
*/
public static String getValue(Set<OWLAnnotation> annotations) {
Set<String> valueSet = getValues(annotations);
List<String> valueList = new ArrayList<String>(valueSet);
Collections.sort(valueList);
String value = null;
if (valueList.size() > 0) {
value = valueList.get(0);
}
return value;
}
/**
* Given a set of OWLAnnotations, return a set of their value strings.
*
* @param annotations a set of OWLAnnotations to get the value of
* @return a set of the value strings
*/
public static Set<String> getValues(Set<OWLAnnotation> annotations) {
Set<String> results = new HashSet<String>();
for (OWLAnnotation annotation: annotations) {
String value = getValue(annotation);
if (value != null) {
results.add(value);
}
}
return results;
}
/**
* Given an annotation value, return its datatype, or null.
*
* @param value the value to check
* @return the datatype, or null if the value has none
*/
public static OWLDatatype getType(OWLAnnotationValue value) {
if (value instanceof OWLLiteral) {
return ((OWLLiteral) value).getDatatype();
}
return null;
}
/**
* Given an annotation value, return the IRI of its datatype, or null.
*
* @param value the value to check
* @return the IRI of the datatype, or null if the value has none
*/
public static IRI getTypeIRI(OWLAnnotationValue value) {
OWLDatatype datatype = getType(value);
if (datatype == null) {
return null;
} else {
return datatype.getIRI();
}
}
/**
* Given an ontology, and an optional set of annotation properties,
* return a set of annotation assertion axioms for those properties,
* for all subjects.
*
* @param ontology the ontology to search (including imports closure)
* @param property an annotation property
* @return a filtered set of annotation assertion axioms
*/
public static Set<OWLAnnotationAssertionAxiom> getAnnotationAxioms(
OWLOntology ontology, OWLAnnotationProperty property) {
Set<OWLAnnotationProperty> properties =
new HashSet<OWLAnnotationProperty>();
properties.add(property);
Set<IRI> subjects = null;
return getAnnotationAxioms(ontology, properties, subjects);
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return a set of annotation assertion axioms for those
* subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param property an annotation property
* @param subject an annotation subject IRIs
* @return a filtered set of annotation assertion axioms
*/
public static Set<OWLAnnotationAssertionAxiom> getAnnotationAxioms(
OWLOntology ontology, OWLAnnotationProperty property,
IRI subject) {
Set<OWLAnnotationProperty> properties =
new HashSet<OWLAnnotationProperty>();
properties.add(property);
Set<IRI> subjects = null;
if (subject != null) {
subjects = new HashSet<IRI>();
subjects.add(subject);
}
return getAnnotationAxioms(ontology, properties, subjects);
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return a set of annotation assertion axioms for those
* subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param properties a set of annotation properties,
* or null if all properties should be included
* @param subjects a set of annotation subject IRIs,
* or null if all subjects should be included
* @return a filtered set of annotation assertion axioms
*/
public static Set<OWLAnnotationAssertionAxiom> getAnnotationAxioms(
OWLOntology ontology, Set<OWLAnnotationProperty> properties,
Set<IRI> subjects) {
Set<OWLAnnotationAssertionAxiom> results =
new HashSet<OWLAnnotationAssertionAxiom>();
for (OWLAxiom axiom: ontology.getAxioms()) {
if (!(axiom instanceof OWLAnnotationAssertionAxiom)) {
continue;
}
OWLAnnotationAssertionAxiom aaa =
(OWLAnnotationAssertionAxiom) axiom;
if (properties != null && !properties.contains(aaa.getProperty())) {
continue;
}
OWLAnnotationSubject subject = aaa.getSubject();
if (subjects == null) {
results.add(aaa);
} else if (subject instanceof IRI
&& subjects.contains((IRI) subject)) {
results.add(aaa);
}
}
return results;
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return a set of annotation values for those
* subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param property an annotation property
* @param subject an annotation subject IRIs
* @return a filtered set of annotation values
*/
public static Set<OWLAnnotationValue> getAnnotationValues(
OWLOntology ontology, OWLAnnotationProperty property,
IRI subject) {
Set<OWLAnnotationProperty> properties =
new HashSet<OWLAnnotationProperty>();
properties.add(property);
Set<IRI> subjects = new HashSet<IRI>();
subjects.add(subject);
return getAnnotationValues(ontology, properties, subjects);
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return a set of annotation values for those
* subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param properties a set of annotation properties,
* or null if all properties should be included
* @param subjects a set of annotation subject IRIs,
* or null if all subjects should be included
* @return a filtered set of annotation values
*/
public static Set<OWLAnnotationValue> getAnnotationValues(
OWLOntology ontology, Set<OWLAnnotationProperty> properties,
Set<IRI> subjects) {
Set<OWLAnnotationValue> results = new HashSet<OWLAnnotationValue>();
Set<OWLAnnotationAssertionAxiom> axioms =
getAnnotationAxioms(ontology, properties, subjects);
for (OWLAnnotationAssertionAxiom axiom: axioms) {
results.add(axiom.getValue());
}
return results;
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return a set of strings for those subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param property an annotation property
* @param subject an annotation subject IRIs
* @return a filtered set of annotation strings
*/
public static Set<String> getAnnotationStrings(
OWLOntology ontology, OWLAnnotationProperty property,
IRI subject) {
Set<OWLAnnotationProperty> properties =
new HashSet<OWLAnnotationProperty>();
properties.add(property);
Set<IRI> subjects = new HashSet<IRI>();
subjects.add(subject);
return getAnnotationStrings(ontology, properties, subjects);
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return a set of strings for those subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param properties a set of annotation properties,
* or null if all properties should be included
* @param subjects a set of annotation subject IRIs,
* or null if all subjects should be included
* @return a filtered set of annotation strings
*/
public static Set<String> getAnnotationStrings(
OWLOntology ontology, Set<OWLAnnotationProperty> properties,
Set<IRI> subjects) {
Set<String> results = new HashSet<String>();
Set<OWLAnnotationValue> values =
getAnnotationValues(ontology, properties, subjects);
for (OWLAnnotationValue value: values) {
results.add(getValue(value));
}
return results;
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return the alphanumerically first annotation value string
* for those subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param property an annotation property
* @param subject an annotation subject IRIs
* @return the first annotation string
*/
public static String getAnnotationString(
OWLOntology ontology, OWLAnnotationProperty property,
IRI subject) {
Set<OWLAnnotationProperty> properties =
new HashSet<OWLAnnotationProperty>();
properties.add(property);
Set<IRI> subjects = new HashSet<IRI>();
subjects.add(subject);
return getAnnotationString(ontology, properties, subjects);
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return the alphanumerically first annotation value string
* for those subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param properties a set of annotation properties,
* or null if all properties should be included
* @param subjects a set of annotation subject IRIs,
* or null if all subjects should be included
* @return the first annotation string
*/
public static String getAnnotationString(
OWLOntology ontology, Set<OWLAnnotationProperty> properties,
Set<IRI> subjects) {
Set<String> valueSet =
getAnnotationStrings(ontology, properties, subjects);
List<String> valueList = new ArrayList<String>(valueSet);
Collections.sort(valueList);
String value = null;
if (valueList.size() > 0) {
value = valueList.get(0);
}
return value;
}
/**
* Given an ontology, return a map from IRIs to rdfs:labels.
* Includes labels asserted in for all imported ontologies.
* If there are multiple labels, use the alphanumerically first.
*
* @param ontology the ontology to use
* @return a map from IRIs to label strings
*/
public static Map<IRI, String> getLabels(OWLOntology ontology) {
logger.info("Fetching labels for "+ontology.getOntologyID());
Map<IRI, String> results = new HashMap<IRI, String>();
OWLOntologyManager manager = ontology.getOWLOntologyManager();
OWLAnnotationProperty rdfsLabel =
manager.getOWLDataFactory().getRDFSLabel();
Set<OWLOntology> ontologies = new HashSet<OWLOntology>();
ontologies.add(ontology);
ReferencedEntitySetProvider resp =
new ReferencedEntitySetProvider(ontologies);
logger.info("iterating through entities...");
for (OWLEntity entity: resp.getEntities()) {
String value = getAnnotationString(
ontology, rdfsLabel, entity.getIRI());
if (value != null) {
results.put(entity.getIRI(), value);
}
}
logger.info("Results: "+results.size());
return results;
}
/**
* Given an ontology, return a map from rdfs:label to IRIs.
* Includes labels asserted in for all imported ontologies.
* Duplicates overwrite existing with a warning.
*
* @param ontology the ontology to use
* @return a map from label strings to IRIs
*/
public static Map<String, IRI> getLabelIRIs(OWLOntology ontology) {
Map<String, IRI> results = new HashMap<String, IRI>();
OWLOntologyManager manager = ontology.getOWLOntologyManager();
OWLAnnotationProperty rdfsLabel =
manager.getOWLDataFactory().getRDFSLabel();
Set<OWLAnnotationAssertionAxiom> axioms =
getAnnotationAxioms(ontology, rdfsLabel);
for (OWLAnnotationAssertionAxiom axiom: axioms) {
String value = getValue(axiom);
if (value == null) {
continue;
}
OWLAnnotationSubject subject = axiom.getSubject();
if (subject == null || !(subject instanceof IRI)) {
continue;
}
if (results.containsKey(value)) {
logger.warn("Duplicate rdfs:label \""
+ value
+ "\" for subject "
+ subject);
}
results.put(value, (IRI) subject);
}
return results;
}
/**
* Generates a function that returns a label string for any named
* object in the ontology
*
* @param ontology
* @param useIriAsDefault
* @return function mapping object to label
*/
public static Function<OWLNamedObject,String> getLabelFunction(OWLOntology ontology, boolean useIriAsDefault) {
Map<IRI, String> labelMap = getLabels(ontology);
return (obj) -> {
String label;
if (labelMap.containsKey(obj.getIRI()))
label = labelMap.get(obj.getIRI());
else if (useIriAsDefault)
label = obj.getIRI().toString();
else
label = null;
return label;
};
}
/**
* Given an ontology and a set of term IRIs,
* return a set of entities for those IRIs.
* The input ontology is not changed.
*
* @param ontology the ontology to search
* @param iris the IRIs of the entities to find
* @return a set of OWLEntities with the given IRIs
*/
public static Set<OWLEntity> getEntities(OWLOntology ontology,
Set<IRI> iris) {
Set<OWLEntity> entities = new HashSet<OWLEntity>();
if (iris == null) {
return entities;
}
for (IRI iri: iris) {
entities.addAll(ontology.getEntitiesInSignature(iri, true));
}
return entities;
}
/**
* Given an ontology, return a set of all the entities in its signature.
*
* @param ontology the ontology to search
* @return a set of all entities in the ontology
*/
public static Set<OWLEntity> getEntities(OWLOntology ontology) {
Set<OWLOntology> ontologies = new HashSet<OWLOntology>();
ontologies.add(ontology);
ReferencedEntitySetProvider resp =
new ReferencedEntitySetProvider(ontologies);
return resp.getEntities();
}
/**
* Given an ontology, return a set of IRIs for all the entities
* in its signature.
*
* @param ontology the ontology to search
* @return a set of IRIs for all entities in the ontology
*/
public static Set<IRI> getIRIs(OWLOntology ontology) {
Set<IRI> iris = new HashSet<IRI>();
for (OWLEntity entity: getEntities(ontology)) {
iris.add(entity.getIRI());
}
return iris;
}
/**
* Remove all annotations on this ontology.
* Just annotations on the ontology itself,
* not annotations on its classes, etc.
*
* @param ontology the ontology to modify
*/
public static void removeOntologyAnnotations(OWLOntology ontology) {
OWLOntologyManager manager = ontology.getOWLOntologyManager();
for (OWLAnnotation annotation: ontology.getAnnotations()) {
RemoveOntologyAnnotation remove =
new RemoveOntologyAnnotation(ontology, annotation);
manager.applyChange(remove);
}
}
/**
* Given an ontology, a property IRI, and a value string,
* add an annotation to this ontology with that property and value.
*
* @param ontology the ontology to modify
* @param propertyIRI the IRI of the property to add
* @param value the IRI or literal value to add
*/
public static void addOntologyAnnotation(OWLOntology ontology,
IRI propertyIRI, OWLAnnotationValue value) {
OWLOntologyManager manager = ontology.getOWLOntologyManager();
OWLDataFactory df = manager.getOWLDataFactory();
OWLAnnotationProperty property =
df.getOWLAnnotationProperty(propertyIRI);
OWLAnnotation annotation = df.getOWLAnnotation(property, value);
addOntologyAnnotation(ontology, annotation);
}
/**
* Annotate the ontology with the annotation.
*
* @param ontology the ontology to modify
* @param annotation the annotation to add
*/
public static void addOntologyAnnotation(OWLOntology ontology,
OWLAnnotation annotation) {
OWLOntologyManager manager = ontology.getOWLOntologyManager();
AddOntologyAnnotation addition =
new AddOntologyAnnotation(ontology, annotation);
manager.applyChange(addition);
}
/**
* Given an ontology, an axiom, a property IRI, and a value string,
* add an annotation to this ontology with that property and value.
*
* Note that as axioms are immutable,
* the axiom is removed and replaced with a new one.
*
* @param ontology the ontology to modify
* @param axiom the axiom to annotate
* @param propertyIRI the IRI of the property to add
* @param value the IRI or literal value to add
*/
public static void addAxiomAnnotation(OWLOntology ontology, OWLAxiom axiom,
IRI propertyIRI, OWLAnnotationValue value) {
OWLOntologyManager manager = ontology.getOWLOntologyManager();
OWLDataFactory df = manager.getOWLDataFactory();
OWLAnnotationProperty property =
df.getOWLAnnotationProperty(propertyIRI);
OWLAnnotation annotation = df.getOWLAnnotation(property, value);
addAxiomAnnotation(ontology, axiom, Collections.singleton(annotation));
}
/**
* Given an ontology, an axiom, and a set of annotations,
* annotate the axiom with the annotations in the ontology.
*
* Note that as axioms are immutable,
* the axiom is removed and replaced with a new one.
*
* @param ontology the ontology to modify
* @param axiom the axiom to annotate
* @param annotations the set of annotation to add to the axiom
*/
public static void addAxiomAnnotation(OWLOntology ontology, OWLAxiom axiom,
Set<OWLAnnotation> annotations) {
OWLOntologyManager manager = ontology.getOWLOntologyManager();
OWLDataFactory factory = manager.getOWLDataFactory();
OWLAxiom newAxiom;
if (axiom instanceof OWLSubClassOfAxiom) {
OWLSubClassOfAxiom x = ((OWLSubClassOfAxiom) axiom);
newAxiom = factory.getOWLSubClassOfAxiom(x.getSubClass(),
x.getSuperClass(), annotations);
logger.debug("ANNOTATED: " + newAxiom);
} else {
// TODO - See https://github.com/ontodev/robot/issues/67
throw new UnsupportedOperationException(
"Cannot annotate axioms of type: " + axiom.getClass());
}
manager.removeAxiom(ontology, axiom);
manager.addAxiom(ontology, newAxiom);
}
/**
* Given an ontology, an annotation property IRI, and an annotation value,
* annotate all axioms in the ontology with that property and value.
*
* @param ontology the ontology to modify
* @param propertyIRI the IRI of the property to add
* @param value the IRI or literal value to add
*/
public static void addAxiomAnnotations(OWLOntology ontology,
IRI propertyIRI, OWLAnnotationValue value) {
for (OWLAxiom a : ontology.getAxioms()) {
addAxiomAnnotation(ontology, a, propertyIRI, value);
}
}
}
|
robot-core/src/main/java/org/obolibrary/robot/OntologyHelper.java
|
package org.obolibrary.robot;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.AddOntologyAnnotation;
import org.semanticweb.owlapi.model.OWLAnnotation;
import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom;
import org.semanticweb.owlapi.model.OWLAnnotationProperty;
import org.semanticweb.owlapi.model.OWLAnnotationSubject;
import org.semanticweb.owlapi.model.OWLAnnotationValue;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLDatatype;
import org.semanticweb.owlapi.model.OWLEntity;
import org.semanticweb.owlapi.model.OWLLiteral;
import org.semanticweb.owlapi.model.OWLNamedObject;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.model.OWLOntologyID;
import org.semanticweb.owlapi.model.OWLSubClassOfAxiom;
import org.semanticweb.owlapi.model.RemoveOntologyAnnotation;
import org.semanticweb.owlapi.model.SetOntologyID;
import org.semanticweb.owlapi.util.ReferencedEntitySetProvider;
/**
* Provides convenience methods for working with OWL ontologies.
*
* @author <a href="mailto:james@overton.ca">James A. Overton</a>
*/
public class OntologyHelper {
/**
* Logger.
*/
private static final Logger logger =
LoggerFactory.getLogger(OntologyHelper.class);
/**
* Set the ontology IRI and version IRI using strings.
*
* @param ontology the ontology to change
* @param ontologyIRIString the ontology IRI string, or null for no change
* @param versionIRIString the version IRI string, or null for no change
*/
public static void setOntologyIRI(OWLOntology ontology,
String ontologyIRIString, String versionIRIString) {
IRI ontologyIRI = null;
if (ontologyIRIString != null) {
ontologyIRI = IRI.create(ontologyIRIString);
}
IRI versionIRI = null;
if (versionIRIString != null) {
versionIRI = IRI.create(versionIRIString);
}
setOntologyIRI(ontology, ontologyIRI, versionIRI);
}
/**
* Set the ontology IRI and version IRI.
*
* @param ontology the ontology to change
* @param ontologyIRI the new ontology IRI, or null for no change
* @param versionIRI the new version IRI, or null for no change
*/
public static void setOntologyIRI(OWLOntology ontology, IRI ontologyIRI,
IRI versionIRI) {
OWLOntologyID currentID = ontology.getOntologyID();
if (ontologyIRI == null && versionIRI == null) {
// don't change anything
return;
} else if (ontologyIRI == null) {
ontologyIRI = currentID.getOntologyIRI().orNull();
} else if (versionIRI == null) {
versionIRI = currentID.getVersionIRI().orNull();
}
OWLOntologyID newID;
if (versionIRI == null) {
newID = new OWLOntologyID(ontologyIRI);
} else {
newID = new OWLOntologyID(ontologyIRI, versionIRI);
}
SetOntologyID setID = new SetOntologyID(ontology, newID);
ontology.getOWLOntologyManager().applyChange(setID);
}
/**
* Given an OWLAnnotationValue, return its value as a string.
*
* @param value the OWLAnnotationValue to get the string value of
* @return the string value
*/
public static String getValue(OWLAnnotationValue value) {
String result = null;
if (value instanceof OWLLiteral) {
result = ((OWLLiteral) value).getLiteral();
}
return result;
}
/**
* Given an OWLAnnotation, return its value as a string.
*
* @param annotation the OWLAnnotation to get the string value of
* @return the string value
*/
public static String getValue(OWLAnnotation annotation) {
return getValue(annotation.getValue());
}
/**
* Given an OWLAnnotationAssertionAxiom, return its value as a string.
*
* @param axiom the OWLAnnotationAssertionAxiom to get the string value of
* @return the string value
*/
public static String getValue(OWLAnnotationAssertionAxiom axiom) {
return getValue(axiom.getValue());
}
/**
* Given a set of OWLAnnotations, return the first string value
* as determined by natural string sorting.
*
* @param annotations a set of OWLAnnotations to get the value of
* @return the first string value
*/
public static String getValue(Set<OWLAnnotation> annotations) {
Set<String> valueSet = getValues(annotations);
List<String> valueList = new ArrayList<String>(valueSet);
Collections.sort(valueList);
String value = null;
if (valueList.size() > 0) {
value = valueList.get(0);
}
return value;
}
/**
* Given a set of OWLAnnotations, return a set of their value strings.
*
* @param annotations a set of OWLAnnotations to get the value of
* @return a set of the value strings
*/
public static Set<String> getValues(Set<OWLAnnotation> annotations) {
Set<String> results = new HashSet<String>();
for (OWLAnnotation annotation: annotations) {
String value = getValue(annotation);
if (value != null) {
results.add(value);
}
}
return results;
}
/**
* Given an annotation value, return its datatype, or null.
*
* @param value the value to check
* @return the datatype, or null if the value has none
*/
public static OWLDatatype getType(OWLAnnotationValue value) {
if (value instanceof OWLLiteral) {
return ((OWLLiteral) value).getDatatype();
}
return null;
}
/**
* Given an annotation value, return the IRI of its datatype, or null.
*
* @param value the value to check
* @return the IRI of the datatype, or null if the value has none
*/
public static IRI getTypeIRI(OWLAnnotationValue value) {
OWLDatatype datatype = getType(value);
if (datatype == null) {
return null;
} else {
return datatype.getIRI();
}
}
/**
* Given an ontology, and an optional set of annotation properties,
* return a set of annotation assertion axioms for those properties,
* for all subjects.
*
* @param ontology the ontology to search (including imports closure)
* @param property an annotation property
* @return a filtered set of annotation assertion axioms
*/
public static Set<OWLAnnotationAssertionAxiom> getAnnotationAxioms(
OWLOntology ontology, OWLAnnotationProperty property) {
Set<OWLAnnotationProperty> properties =
new HashSet<OWLAnnotationProperty>();
properties.add(property);
Set<IRI> subjects = null;
return getAnnotationAxioms(ontology, properties, subjects);
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return a set of annotation assertion axioms for those
* subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param property an annotation property
* @param subject an annotation subject IRIs
* @return a filtered set of annotation assertion axioms
*/
public static Set<OWLAnnotationAssertionAxiom> getAnnotationAxioms(
OWLOntology ontology, OWLAnnotationProperty property,
IRI subject) {
Set<OWLAnnotationProperty> properties =
new HashSet<OWLAnnotationProperty>();
properties.add(property);
Set<IRI> subjects = null;
if (subject != null) {
subjects = new HashSet<IRI>();
subjects.add(subject);
}
return getAnnotationAxioms(ontology, properties, subjects);
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return a set of annotation assertion axioms for those
* subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param properties a set of annotation properties,
* or null if all properties should be included
* @param subjects a set of annotation subject IRIs,
* or null if all subjects should be included
* @return a filtered set of annotation assertion axioms
*/
public static Set<OWLAnnotationAssertionAxiom> getAnnotationAxioms(
OWLOntology ontology, Set<OWLAnnotationProperty> properties,
Set<IRI> subjects) {
Set<OWLAnnotationAssertionAxiom> results =
new HashSet<OWLAnnotationAssertionAxiom>();
for (OWLAxiom axiom: ontology.getAxioms()) {
if (!(axiom instanceof OWLAnnotationAssertionAxiom)) {
continue;
}
OWLAnnotationAssertionAxiom aaa =
(OWLAnnotationAssertionAxiom) axiom;
if (properties != null && !properties.contains(aaa.getProperty())) {
continue;
}
OWLAnnotationSubject subject = aaa.getSubject();
if (subjects == null) {
results.add(aaa);
} else if (subject instanceof IRI
&& subjects.contains((IRI) subject)) {
results.add(aaa);
}
}
return results;
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return a set of annotation values for those
* subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param property an annotation property
* @param subject an annotation subject IRIs
* @return a filtered set of annotation values
*/
public static Set<OWLAnnotationValue> getAnnotationValues(
OWLOntology ontology, OWLAnnotationProperty property,
IRI subject) {
Set<OWLAnnotationProperty> properties =
new HashSet<OWLAnnotationProperty>();
properties.add(property);
Set<IRI> subjects = new HashSet<IRI>();
subjects.add(subject);
return getAnnotationValues(ontology, properties, subjects);
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return a set of annotation values for those
* subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param properties a set of annotation properties,
* or null if all properties should be included
* @param subjects a set of annotation subject IRIs,
* or null if all subjects should be included
* @return a filtered set of annotation values
*/
public static Set<OWLAnnotationValue> getAnnotationValues(
OWLOntology ontology, Set<OWLAnnotationProperty> properties,
Set<IRI> subjects) {
Set<OWLAnnotationValue> results = new HashSet<OWLAnnotationValue>();
Set<OWLAnnotationAssertionAxiom> axioms =
getAnnotationAxioms(ontology, properties, subjects);
for (OWLAnnotationAssertionAxiom axiom: axioms) {
results.add(axiom.getValue());
}
return results;
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return a set of strings for those subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param property an annotation property
* @param subject an annotation subject IRIs
* @return a filtered set of annotation strings
*/
public static Set<String> getAnnotationStrings(
OWLOntology ontology, OWLAnnotationProperty property,
IRI subject) {
Set<OWLAnnotationProperty> properties =
new HashSet<OWLAnnotationProperty>();
properties.add(property);
Set<IRI> subjects = new HashSet<IRI>();
subjects.add(subject);
return getAnnotationStrings(ontology, properties, subjects);
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return a set of strings for those subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param properties a set of annotation properties,
* or null if all properties should be included
* @param subjects a set of annotation subject IRIs,
* or null if all subjects should be included
* @return a filtered set of annotation strings
*/
public static Set<String> getAnnotationStrings(
OWLOntology ontology, Set<OWLAnnotationProperty> properties,
Set<IRI> subjects) {
Set<String> results = new HashSet<String>();
Set<OWLAnnotationValue> values =
getAnnotationValues(ontology, properties, subjects);
for (OWLAnnotationValue value: values) {
results.add(getValue(value));
}
return results;
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return the alphanumerically first annotation value string
* for those subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param property an annotation property
* @param subject an annotation subject IRIs
* @return the first annotation string
*/
public static String getAnnotationString(
OWLOntology ontology, OWLAnnotationProperty property,
IRI subject) {
Set<OWLAnnotationProperty> properties =
new HashSet<OWLAnnotationProperty>();
properties.add(property);
Set<IRI> subjects = new HashSet<IRI>();
subjects.add(subject);
return getAnnotationString(ontology, properties, subjects);
}
/**
* Given an ontology, an optional set of annotation properties,
* and an optional set of subject,
* return the alphanumerically first annotation value string
* for those subjects and those properties.
*
* @param ontology the ontology to search (including imports closure)
* @param properties a set of annotation properties,
* or null if all properties should be included
* @param subjects a set of annotation subject IRIs,
* or null if all subjects should be included
* @return the first annotation string
*/
public static String getAnnotationString(
OWLOntology ontology, Set<OWLAnnotationProperty> properties,
Set<IRI> subjects) {
Set<String> valueSet =
getAnnotationStrings(ontology, properties, subjects);
List<String> valueList = new ArrayList<String>(valueSet);
Collections.sort(valueList);
String value = null;
if (valueList.size() > 0) {
value = valueList.get(0);
}
return value;
}
/**
* Given an ontology, return a map from IRIs to rdfs:labels.
* Includes labels asserted in for all imported ontologies.
* If there are multiple labels, use the alphanumerically first.
*
* @param ontology the ontology to use
* @return a map from IRIs to label strings
*/
public static Map<IRI, String> getLabels(OWLOntology ontology) {
logger.info("Fetching labels for "+ontology);
Map<IRI, String> results = new HashMap<IRI, String>();
OWLOntologyManager manager = ontology.getOWLOntologyManager();
OWLAnnotationProperty rdfsLabel =
manager.getOWLDataFactory().getRDFSLabel();
Set<OWLOntology> ontologies = new HashSet<OWLOntology>();
ontologies.add(ontology);
ReferencedEntitySetProvider resp =
new ReferencedEntitySetProvider(ontologies);
logger.info("iterating through entities...");
for (OWLEntity entity: resp.getEntities()) {
String value = getAnnotationString(
ontology, rdfsLabel, entity.getIRI());
if (value != null) {
results.put(entity.getIRI(), value);
}
}
logger.info("Results: "+results.size());
return results;
}
/**
* Given an ontology, return a map from rdfs:label to IRIs.
* Includes labels asserted in for all imported ontologies.
* Duplicates overwrite existing with a warning.
*
* @param ontology the ontology to use
* @return a map from label strings to IRIs
*/
public static Map<String, IRI> getLabelIRIs(OWLOntology ontology) {
Map<String, IRI> results = new HashMap<String, IRI>();
OWLOntologyManager manager = ontology.getOWLOntologyManager();
OWLAnnotationProperty rdfsLabel =
manager.getOWLDataFactory().getRDFSLabel();
Set<OWLAnnotationAssertionAxiom> axioms =
getAnnotationAxioms(ontology, rdfsLabel);
for (OWLAnnotationAssertionAxiom axiom: axioms) {
String value = getValue(axiom);
if (value == null) {
continue;
}
OWLAnnotationSubject subject = axiom.getSubject();
if (subject == null || !(subject instanceof IRI)) {
continue;
}
if (results.containsKey(value)) {
logger.warn("Duplicate rdfs:label \""
+ value
+ "\" for subject "
+ subject);
}
results.put(value, (IRI) subject);
}
return results;
}
/**
* Generates a function that returns a label string for any named
* object in the ontology
*
* @param ontology
* @param useIriAsDefault
* @return function mapping object to label
*/
public static Function<OWLNamedObject,String> getLabelFunction(OWLOntology ontology, boolean useIriAsDefault) {
Map<IRI, String> labelMap = getLabels(ontology);
return (obj) -> {
String label;
if (labelMap.containsKey(obj.getIRI()))
label = labelMap.get(obj.getIRI());
else if (useIriAsDefault)
label = obj.getIRI().toString();
else
label = null;
return label;
};
}
/**
* Given an ontology and a set of term IRIs,
* return a set of entities for those IRIs.
* The input ontology is not changed.
*
* @param ontology the ontology to search
* @param iris the IRIs of the entities to find
* @return a set of OWLEntities with the given IRIs
*/
public static Set<OWLEntity> getEntities(OWLOntology ontology,
Set<IRI> iris) {
Set<OWLEntity> entities = new HashSet<OWLEntity>();
if (iris == null) {
return entities;
}
for (IRI iri: iris) {
entities.addAll(ontology.getEntitiesInSignature(iri, true));
}
return entities;
}
/**
* Given an ontology, return a set of all the entities in its signature.
*
* @param ontology the ontology to search
* @return a set of all entities in the ontology
*/
public static Set<OWLEntity> getEntities(OWLOntology ontology) {
Set<OWLOntology> ontologies = new HashSet<OWLOntology>();
ontologies.add(ontology);
ReferencedEntitySetProvider resp =
new ReferencedEntitySetProvider(ontologies);
return resp.getEntities();
}
/**
* Given an ontology, return a set of IRIs for all the entities
* in its signature.
*
* @param ontology the ontology to search
* @return a set of IRIs for all entities in the ontology
*/
public static Set<IRI> getIRIs(OWLOntology ontology) {
Set<IRI> iris = new HashSet<IRI>();
for (OWLEntity entity: getEntities(ontology)) {
iris.add(entity.getIRI());
}
return iris;
}
/**
* Remove all annotations on this ontology.
* Just annotations on the ontology itself,
* not annotations on its classes, etc.
*
* @param ontology the ontology to modify
*/
public static void removeOntologyAnnotations(OWLOntology ontology) {
OWLOntologyManager manager = ontology.getOWLOntologyManager();
for (OWLAnnotation annotation: ontology.getAnnotations()) {
RemoveOntologyAnnotation remove =
new RemoveOntologyAnnotation(ontology, annotation);
manager.applyChange(remove);
}
}
/**
* Given an ontology, a property IRI, and a value string,
* add an annotation to this ontology with that property and value.
*
* @param ontology the ontology to modify
* @param propertyIRI the IRI of the property to add
* @param value the IRI or literal value to add
*/
public static void addOntologyAnnotation(OWLOntology ontology,
IRI propertyIRI, OWLAnnotationValue value) {
OWLOntologyManager manager = ontology.getOWLOntologyManager();
OWLDataFactory df = manager.getOWLDataFactory();
OWLAnnotationProperty property =
df.getOWLAnnotationProperty(propertyIRI);
OWLAnnotation annotation = df.getOWLAnnotation(property, value);
addOntologyAnnotation(ontology, annotation);
}
/**
* Annotate the ontology with the annotation.
*
* @param ontology the ontology to modify
* @param annotation the annotation to add
*/
public static void addOntologyAnnotation(OWLOntology ontology,
OWLAnnotation annotation) {
OWLOntologyManager manager = ontology.getOWLOntologyManager();
AddOntologyAnnotation addition =
new AddOntologyAnnotation(ontology, annotation);
manager.applyChange(addition);
}
/**
* Given an ontology, an axiom, a property IRI, and a value string,
* add an annotation to this ontology with that property and value.
*
* Note that as axioms are immutable,
* the axiom is removed and replaced with a new one.
*
* @param ontology the ontology to modify
* @param axiom the axiom to annotate
* @param propertyIRI the IRI of the property to add
* @param value the IRI or literal value to add
*/
public static void addAxiomAnnotation(OWLOntology ontology, OWLAxiom axiom,
IRI propertyIRI, OWLAnnotationValue value) {
OWLOntologyManager manager = ontology.getOWLOntologyManager();
OWLDataFactory df = manager.getOWLDataFactory();
OWLAnnotationProperty property =
df.getOWLAnnotationProperty(propertyIRI);
OWLAnnotation annotation = df.getOWLAnnotation(property, value);
addAxiomAnnotation(ontology, axiom, Collections.singleton(annotation));
}
/**
* Given an ontology, an axiom, and a set of annotations,
* annotate the axiom with the annotations in the ontology.
*
* Note that as axioms are immutable,
* the axiom is removed and replaced with a new one.
*
* @param ontology the ontology to modify
* @param axiom the axiom to annotate
* @param annotations the set of annotation to add to the axiom
*/
public static void addAxiomAnnotation(OWLOntology ontology, OWLAxiom axiom,
Set<OWLAnnotation> annotations) {
OWLOntologyManager manager = ontology.getOWLOntologyManager();
OWLDataFactory factory = manager.getOWLDataFactory();
OWLAxiom newAxiom;
if (axiom instanceof OWLSubClassOfAxiom) {
OWLSubClassOfAxiom x = ((OWLSubClassOfAxiom) axiom);
newAxiom = factory.getOWLSubClassOfAxiom(x.getSubClass(),
x.getSuperClass(), annotations);
logger.debug("ANNOTATED: " + newAxiom);
} else {
// TODO - See https://github.com/ontodev/robot/issues/67
throw new UnsupportedOperationException(
"Cannot annotate axioms of type: " + axiom.getClass());
}
manager.removeAxiom(ontology, axiom);
manager.addAxiom(ontology, newAxiom);
}
/**
* Given an ontology, an annotation property IRI, and an annotation value,
* annotate all axioms in the ontology with that property and value.
*
* @param ontology the ontology to modify
* @param propertyIRI the IRI of the property to add
* @param value the IRI or literal value to add
*/
public static void addAxiomAnnotations(OWLOntology ontology,
IRI propertyIRI, OWLAnnotationValue value) {
for (OWLAxiom a : ontology.getAxioms()) {
addAxiomAnnotation(ontology, a, propertyIRI, value);
}
}
}
|
added more logging for reasoner command
|
robot-core/src/main/java/org/obolibrary/robot/OntologyHelper.java
|
added more logging for reasoner command
|
|
Java
|
mit
|
4cf1c10d5349477faf1347407dd0e8218e4f3672
| 0
|
CoreNetwork/Cornel
|
src/test/java/us/core_network/cornel/strings/TimeFormatterTest.java
|
package us.core_network.cornel.strings;
import java.sql.Time;
import org.junit.Test;
import static org.junit.Assert.*;
public class TimeFormatterTest
{
@Test
public void testFormatting() throws Exception
{
String string = "<Months> month<S> <Weeks> week<s> <Days> day<S> <Hours> hour<S> <Minutes> Minutes<S> <Seconds> second<S>";
TimeFormatter timeFormatter = new TimeFormatter(string, 0);
}
}
|
Deleting TimeFormatterTest for now.
|
src/test/java/us/core_network/cornel/strings/TimeFormatterTest.java
|
Deleting TimeFormatterTest for now.
|
||
Java
|
mit
|
d13809271b3715de8230d0cfb3a911958bf49a6b
| 0
|
andreytim/jafar
|
package com.andreytim.jafar.problems.sortsearch;
import java.util.Arrays;
/**
* Given an array of integers, write a method to find indices m and n such that
* if you sort the elements m through n, the entire array would be sorted.
* Minimize n - m (that is, find the smallest such sequence).
* CtCI, 17.6
*
* Created by shpolsky on 23.11.14.
*/
public class P710_MinSubarrayToSort {
// O(nlog(n)) solution, check the book for the one that linear but longer
public int[] minSubarrayToSort(int[] arr) {
if (arr == null || arr.length == 0)
throw new IllegalArgumentException();
int[] sorted = Arrays.copyOf(arr, arr.length);
Arrays.sort(sorted);
int i = 0;
while (i < arr.length && arr[i] == sorted[i]) i++;
if (i == arr.length) return new int[]{0,0}; // sorted already
int j = arr.length-1;
while (i < arr.length && arr[j] == sorted[j]) j--;
return new int[]{i,j};
}
private static void test(int[] arr) {
System.out.printf("Input: %s; Result: %s\n", Arrays.toString(arr),
Arrays.toString(new P710_MinSubarrayToSort().minSubarrayToSort(arr)));
}
public static void main(String[] args) {
test(new int[]{ 1 });
test(new int[]{ 1, 2 });
test(new int[]{ 2, 1 });
test(new int[]{ 1, 2, 3 });
test(new int[]{ 1, 3, 2 });
test(new int[]{ 2, 1, 3 });
test(new int[]{ 1, 3, 2, 4 });
test(new int[]{ 1, 2, 4, 7, 10, 11, 7, 12, 6, 7, 16, 18, 19 });
}
}
|
problems/src/main/java/com/andreytim/jafar/problems/sortsearch/P710_MinSubarrayToSort.java
|
package com.andreytim.jafar.problems.sortsearch;
import java.util.Arrays;
/**
* Given an array of integers, write a method to find indices m and n such that
* if you sort the elements m through n, the entire array would be sorted.
* Minimize n - m (that is, find the smallest such sequence).
* CtCI, 17.6
*
* Created by shpolsky on 23.11.14.
*/
public class P710_MinSubarrayToSort {
// O(nlog(n)) solution, check the book for the one that linear but much longer
public int[] minSubarrayToSort(int[] arr) {
if (arr == null || arr.length == 0)
throw new IllegalArgumentException();
int[] sorted = Arrays.copyOf(arr, arr.length);
Arrays.sort(sorted);
int i = 0;
while (i < arr.length && arr[i] == sorted[i]) i++;
if (i == arr.length) return new int[]{0,0}; // sorted already
int j = arr.length-1;
while (i < arr.length && arr[j] == sorted[j]) j--;
return new int[]{i,j};
}
private static void test(int[] arr) {
System.out.printf("Input: %s; Result: %s\n", Arrays.toString(arr),
Arrays.toString(new P710_MinSubarrayToSort().minSubarrayToSort(arr)));
}
public static void main(String[] args) {
test(new int[]{ 1 });
test(new int[]{ 1, 2 });
test(new int[]{ 2, 1 });
test(new int[]{ 1, 2, 3 });
test(new int[]{ 1, 3, 2 });
test(new int[]{ 2, 1, 3 });
test(new int[]{ 1, 3, 2, 4 });
test(new int[]{ 1, 2, 4, 7, 10, 11, 7, 12, 6, 7, 16, 18, 19 });
}
}
|
Problems
|
problems/src/main/java/com/andreytim/jafar/problems/sortsearch/P710_MinSubarrayToSort.java
|
Problems
|
|
Java
|
mit
|
891eeba8fce8ab7047d2f5ce8573d7c961312a41
| 0
|
jurporan/Teaching-HEIGVD-AMT-2015-Project,jurporan/Teaching-HEIGVD-AMT-2015-Project,jurporan/Teaching-HEIGVD-AMT-2015-Project
|
package ch.heigvd.amt.gary.models.entities;
import java.io.Serializable;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
/**
* Entity representing a rule from an application.
*
*/
@Entity
public class Rule implements Serializable
{
public enum rewardType {POINT_EVENT, BADGE_EVENT};
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String typeOfEvent;
private long ruleParameter;
private boolean isPenalty;
private rewardType type;
public Rule(String typeOfEvent, long ruleParameter, boolean isPenalty, rewardType type)
{
this.typeOfEvent = typeOfEvent;
this.ruleParameter = ruleParameter;
this.isPenalty = isPenalty;
this.type = type;
}
public Long getId()
{
return id;
}
public void setId(Long id)
{
this.id = id;
}
public String getTypeOfEvent()
{
return typeOfEvent;
}
public void setTypeOfEvent(String typeOfEvent)
{
this.typeOfEvent = typeOfEvent;
}
public long getRuleParameter()
{
return ruleParameter;
}
public void setRuleParameter(long ruleParameter)
{
this.ruleParameter = ruleParameter;
}
public boolean isPenalty()
{
return isPenalty;
}
public void isPenalty(boolean isPenalty)
{
this.isPenalty = isPenalty;
}
public rewardType getEventType()
{
return type;
}
public void setEventType(rewardType type)
{
this.type = type;
}
@Override
public int hashCode()
{
int hash = 0;
hash += (id != null ? id.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object)
{
if (!(object instanceof Rule))
{
return false;
}
Rule other = (Rule) object;
return !((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id)));
}
@Override
public String toString()
{
return "ch.heigvd.amt.gary.models.entities.Rules[ id=" + id + " ]";
}
}
|
src/Gary/src/main/java/ch/heigvd/amt/gary/models/entities/Rule.java
|
package ch.heigvd.amt.gary.models.entities;
import java.io.Serializable;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
/**
* Entity representing a rule from an application.
*
*/
@Entity
public class Rule implements Serializable
{
public enum eventType {POINT_EVENT, BADGE_EVENT};
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
private String typeOfEvent;
private long ruleParameter;
private boolean isPenalty;
private eventType type;
public Rule(String typeOfEvent, long ruleParameter, boolean isPenalty, eventType type)
{
this.typeOfEvent = typeOfEvent;
this.ruleParameter = ruleParameter;
this.isPenalty = isPenalty;
this.type = type;
}
public Long getId()
{
return id;
}
public void setId(Long id)
{
this.id = id;
}
public String getTypeOfEvent()
{
return typeOfEvent;
}
public void setTypeOfEvent(String typeOfEvent)
{
this.typeOfEvent = typeOfEvent;
}
public long getRuleParameter()
{
return ruleParameter;
}
public void setRuleParameter(long ruleParameter)
{
this.ruleParameter = ruleParameter;
}
public boolean isPenalty()
{
return isPenalty;
}
public void isPenalty(boolean isPenalty)
{
this.isPenalty = isPenalty;
}
public eventType getEventType()
{
return type;
}
public void setEventType(eventType type)
{
this.type = type;
}
@Override
public int hashCode()
{
int hash = 0;
hash += (id != null ? id.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object)
{
if (!(object instanceof Rule))
{
return false;
}
Rule other = (Rule) object;
return !((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id)));
}
@Override
public String toString()
{
return "ch.heigvd.amt.gary.models.entities.Rules[ id=" + id + " ]";
}
}
|
Changer le nom de l'enum pour qu'il soit plus explicite
|
src/Gary/src/main/java/ch/heigvd/amt/gary/models/entities/Rule.java
|
Changer le nom de l'enum pour qu'il soit plus explicite
|
|
Java
|
mit
|
9b7599429710d05883898d1f7946248bbbc9ab27
| 0
|
tobiatesan/serleena-android,tobiatesan/serleena-android
|
///////////////////////////////////////////////////////////////////////////////
//
// This file is part of Serleena.
//
// The MIT License (MIT)
//
// Copyright (C) 2015 Antonio Cavestro, Gabriele Pozzan, Matteo Lisotto,
// Nicola Mometto, Filippo Sestini, Tobia Tesan, Sebastiano Valle.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
///////////////////////////////////////////////////////////////////////////////
package com.kyloth.serleena;
import android.app.Activity;
import android.app.Fragment;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.drawable.BitmapDrawable;
import android.net.Uri;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import com.kyloth.serleena.common.GeoPoint;
import com.kyloth.serleena.common.IQuadrant;
import com.kyloth.serleena.common.UserPoint;
import com.kyloth.serleena.presentation.IMapPresenter;
/**
* A simple {@link Fragment} subclass.
* Activities that contain this fragment must implement the
* {@link OnFragmentInteractionListener} interface
* to handle interaction events.
* Use the {@link MapFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class MapFragment extends Fragment implements com.kyloth.serleena.presentation.IMapView {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private GeoPoint userPosition;
private BitmapDrawable mapRaster;
private IMapPresenter presenter;
private com.kyloth.serleena.OnFragmentInteractionListener mListener;
/**
* Use this factory method to create a new instance of
* this fragment using the provided parameters.
*
* @param param1 Parameter 1.
* @param param2 Parameter 2.
* @return A new instance of fragment MapFragment.
*/
// TODO: Rename and change types and number of parameters
public static MapFragment newInstance(String param1, String param2) {
MapFragment fragment = new MapFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
public MapFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
Bitmap temp = BitmapFactory.decodeResource(getResources(), R.drawable.background);
mapRaster = new BitmapDrawable(temp);
ImageView map = (ImageView) getActivity().findViewById(R.id.map_image);
map.setBackground(mapRaster);
return inflater.inflate(R.layout.fragment_map, container, false);
}
// TODO: Rename method, update argument and hook method into UI event
public void onButtonPressed(Uri uri) {
if (mListener != null) {
mListener.onFragmentInteraction(uri);
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
/*try {
mListener = (OnFragmentInteractionListener) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity.toString()
+ " must implement OnFragmentInteractionListener");
}*/
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
@Override
public void setUserLocation(GeoPoint point) {
userPosition = point;
}
@Override
public void displayQuadrant(IQuadrant q) {
Bitmap temp = q.getRaster();
mapRaster = new BitmapDrawable(temp);
ImageView map = (ImageView) getActivity().findViewById(R.id.map_image);
map.setBackground(mapRaster);
}
@Override
public void displayUP(Iterable<UserPoint> points) {
}
@Override
public void attachPresenter(IMapPresenter presenter) {
this.presenter = presenter;
}
}
|
serleena/app/src/main/java/com/kyloth/serleena/MapFragment.java
|
///////////////////////////////////////////////////////////////////////////////
//
// This file is part of Serleena.
//
// The MIT License (MIT)
//
// Copyright (C) 2015 Antonio Cavestro, Gabriele Pozzan, Matteo Lisotto,
// Nicola Mometto, Filippo Sestini, Tobia Tesan, Sebastiano Valle.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
///////////////////////////////////////////////////////////////////////////////
package com.kyloth.serleena;
import android.app.Activity;
import android.net.Uri;
import android.os.Bundle;
import android.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
/**
* A simple {@link Fragment} subclass.
* Activities that contain this fragment must implement the
* {@link MapFragment.OnFragmentInteractionListener} interface
* to handle interaction events.
* Use the {@link MapFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class MapFragment extends Fragment {
// TODO: Rename parameter arguments, choose names that match
// the fragment initialization parameters, e.g. ARG_ITEM_NUMBER
private static final String ARG_PARAM1 = "param1";
private static final String ARG_PARAM2 = "param2";
// TODO: Rename and change types of parameters
private String mParam1;
private String mParam2;
private OnFragmentInteractionListener mListener;
/**
* Use this factory method to create a new instance of
* this fragment using the provided parameters.
*
* @param param1 Parameter 1.
* @param param2 Parameter 2.
* @return A new instance of fragment MapFragment.
*/
// TODO: Rename and change types and number of parameters
public static MapFragment newInstance(String param1, String param2) {
MapFragment fragment = new MapFragment();
Bundle args = new Bundle();
args.putString(ARG_PARAM1, param1);
args.putString(ARG_PARAM2, param2);
fragment.setArguments(args);
return fragment;
}
public MapFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
mParam1 = getArguments().getString(ARG_PARAM1);
mParam2 = getArguments().getString(ARG_PARAM2);
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_map, container, false);
}
// TODO: Rename method, update argument and hook method into UI event
public void onButtonPressed(Uri uri) {
if (mListener != null) {
mListener.onFragmentInteraction(uri);
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mListener = (OnFragmentInteractionListener) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity.toString()
+ " must implement OnFragmentInteractionListener");
}
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
/**
* This interface must be implemented by activities that contain this
* fragment to allow an interaction in this fragment to be communicated
* to the activity and potentially other fragments contained in that
* activity.
* <p/>
* See the Android Training lesson <a href=
* "http://developer.android.com/training/basics/fragments/communicating.html"
* >Communicating with Other Fragments</a> for more information.
*/
public interface OnFragmentInteractionListener {
// TODO: Update argument type and name
public void onFragmentInteraction(Uri uri);
}
}
|
SH: Mappa - Implementazione stupida di metodi di IMapView
|
serleena/app/src/main/java/com/kyloth/serleena/MapFragment.java
|
SH: Mappa - Implementazione stupida di metodi di IMapView
|
|
Java
|
mit
|
5cd621f21f116e40fbccd80c97f434559d231689
| 0
|
VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs,VerkhovtsovPavel/BSUIR_Labs
|
package main;
import java.util.Scanner;
public class Main {
double[] ErmithQuot = { 1, 4, 4, 16 };
double[] WeightFactors = new double[4];
double[] CurW = new double[4];
double[] tmp = new double[4];
double[][] StudySample = new double[4][2];
double[] TestSample = new double[2];
int IterCount;
double minX, maxX, minY, maxY, ComX, ComY, shx, shy, it, jt, previt,
prevjt;
private Scanner in = new Scanner(System.in);
private void Button1Click() {
int i;
boolean fnd, prox;
double cks;
String func;
for (i = 0; i < 4; i++) {
WeightFactors[i] = 0;
}
System.out.println("Enter study points");
StudySample[0][0] = in.nextInt();
StudySample[0][1] = in.nextInt();
StudySample[1][0] = in.nextInt();
StudySample[1][1] = in.nextInt();
StudySample[2][0] = in.nextInt();
StudySample[2][1] = in.nextInt();
StudySample[3][0] = in.nextInt();
StudySample[3][1] = in.nextInt();
i = 0;
IterCount = 0;
WeightFactors[0] = 1;
WeightFactors[1] = 4 * StudySample[i][0];
WeightFactors[2] = 4 * StudySample[i][1];
WeightFactors[3] = 16 * StudySample[i][0] * StudySample[i][1];
do {
i = 0;
// if IterCount=0 i=0 else i=1;
fnd = false;
do {
CurW[0] = WeightFactors[0];
CurW[1] = WeightFactors[1] * StudySample[i + 1][0];
CurW[2] = WeightFactors[2] * StudySample[i + 1][1];
CurW[3] = WeightFactors[3] * StudySample[i + 1][0]
* StudySample[i + 1][1];
cks = 0;
for (int k = 0; k < 4; k++)
cks = cks + CurW[k];
tmp[0] = 1;
tmp[0] = 4 * StudySample[i + 1][0];
tmp[0] = 4 * StudySample[i + 1][1];
tmp[0] = 16 * StudySample[i + 1][0] * StudySample[i + 1][1];
if ((i + 1 <= 3/* ??? */) && (cks < 0/* ??? */)) {
fnd = true;
for (int j = 0; j < 4; j++)
WeightFactors[j] = WeightFactors[j] + tmp[j];
} else if ((i + 1 > 1/* ??? */) && (cks > 0/* ??? */)) {
fnd = true;
for (int j = 0; j < 4; j++)
WeightFactors[j] = WeightFactors[j] - tmp[j];
}
;
if (i < 3/* ??? */)
i++;
else
i = 0;
} while (i == 1);/* ??? */
IterCount++;
} while ((!fnd) || (IterCount == 50));
if ((cks <= 0) || (IterCount > 49)) {/* ??? */
System.out
.println("With this training set build a separating function impossible");
}
// else
{
// for i=1 to 4 ShowMessage(FloatToStr(WeightFactors[i]));
minX = StudySample[0][0];
maxX = minX;
minY = StudySample[0][1];
maxY = minY;
for (i = 1; i < 4; i++) {
if (minX > StudySample[i][0])
minX = StudySample[i][0];
if (minY > StudySample[i][1])
minY = StudySample[i][1];
if (maxX < StudySample[i][0])
maxX = StudySample[i][0];
if (maxY < StudySample[i][1])
maxY = StudySample[i][1];
}
maxX = maxX + 3;
minX = minX - 3;
maxY = maxY + 3;
minY = minY - 3;
ComX = 545 / (maxX - minX);
ComY = 369 / (maxY - minY);
// Draw some (axes maybe)
/*
* PaintBox1.Canvas.Pen.Color=clBlue;
* PaintBox1.Canvas.MoveTo(0,370-Round(-comy*minY));
* PaintBox1.Canvas.LineTo(545,370-Round(-comy*minY));
* PaintBox1.Canvas.MoveTo(Round(-minX*comx),0);
* PaintBox1.Canvas.LineTo(Round(-minX*comx),370);
* PaintBox1.Canvas.Pen.Color=clBlack;
* PaintBox1.Canvas.Brush.Color=clBlack;
*/
it = minX;
if ((WeightFactors[2] != 0) || (WeightFactors[3] != 0))
jt = -(WeightFactors[0] + WeightFactors[1] * it)
/ (WeightFactors[2] + WeightFactors[3] * it);
else
jt = -(WeightFactors[0] + WeightFactors[1] * it);
/*
* PaintBox1.Canvas.MoveTo(Round((it-minX)*ComX),370-Round((jt-minY)*
* ComY));
* Point lineStart = new Point((int)(it-minX)*ComX),370-(int)((jt-minY)*
* ComY)));
*/
while (it <= maxX) {
if (WeightFactors[3] != 0) {
if (it != -WeightFactors[2] / WeightFactors[3]) {
if ((WeightFactors[2] != 0) && (WeightFactors[3] != 0))
jt = -(WeightFactors[0] + WeightFactors[1] * it)
/ (WeightFactors[2] + WeightFactors[3] * it);
else
jt = -(WeightFactors[0] + WeightFactors[1] * it);
}
} else if (WeightFactors[2] != 0)
jt = -(WeightFactors[0] + WeightFactors[1] * it)
/ (WeightFactors[2] + WeightFactors[3] * it);
else
jt = -(WeightFactors[0] + WeightFactors[1] * it);
/*
* if ((jt<=maxY)&&(jt>=minY))
* PaintBox1.Canvas.LineTo(Round((it-
* minX)*ComX),370-Round((jt-minY)*ComY));
* drawLine(startPoint.x, startPoint.y,(int)(it-
* minX)*ComX), 370-(int)((jt-minY)*ComY))
* else
* PaintBox1.Canvas.
* MoveTo(Round((it-minX)*ComX),370-Round((jt-minY)*ComY));
* startPoint = new Point((int) (it-minX)*ComX), 370-(int)((jt-minY)*ComY))
*/
it = it + 0.001;
}
if (WeightFactors[0] / WeightFactors[2] == WeightFactors[1]
/ WeightFactors[3]) {
/*
* PaintBox1.Canvas.MoveTo(Round((-minX+WeightFactors[1]/
* WeightFactors[3])*ComX),0);
* PaintBox1.Canvas.LineTo(Round((-minX
* +WeightFactors[1]/WeightFactors[3])*ComX),370);
*/
}
/*
* PaintBox1.Canvas.Pen.Color=clGreen;
* PaintBox1.Canvas.Brush.Color=clGreen;
*/
for (i = 0; i < 4; i++) {
if (i == 2) {
/*
* PaintBox1.Canvas.Pen.Color=clRed;
* PaintBox1.Canvas.Brush.Color=clRed;
*/
}
/*
* PaintBox1.Canvas.Ellipse(Round((StudySample[i][0]-minX)*ComX)-
* 3
* ,370-Round((StudySample[i][1]-minY)*ComY)-3,Round((StudySample
* [
* i][0]-minX)*ComX)+3,370-Round((StudySample[i][1]-minY)*ComY)+
* 3);
*/
}
func = "y=-(" + String.valueOf(WeightFactors[0]);
if (WeightFactors[1] >= 0)
func = func + '+' + String.valueOf(WeightFactors[1]);
else
func = func + String.valueOf(WeightFactors[1]);
func = func + "x)";
if (WeightFactors[2] != 0) {
func = func + "/(" + String.valueOf(WeightFactors[2]);
if (WeightFactors[3] < 0)
func = func + String.valueOf(WeightFactors[3]) + "x";
else if (WeightFactors[3] > 0)
func = func + '+' + String.valueOf(WeightFactors[3]) + "x";
} else if (WeightFactors[3] != 0)
func = func + "/(" + String.valueOf(WeightFactors[3]) + "x";
func = func + ')';
/*
* Memo1.Clear; Memo1.Lines.Add(func);
*/
}
}
private void Button2Click() {
double cks;
int i;
TestSample[0] = in.nextInt();
TestSample[1] = in.nextInt();
CurW[0] = WeightFactors[0];
CurW[0] = WeightFactors[1] * TestSample[0];
CurW[0] = WeightFactors[2] * TestSample[1];
CurW[0] = WeightFactors[3] * TestSample[0] * TestSample[1];
cks = 0;
for (i = 0; i < 4; i++)
cks = cks + CurW[i];
if (cks <= 0) {
/*
* if (cks==0) Edit11.Text='Ãðàíèöà';
*/
/*
* Edit11.Text='2'; PaintBox1.Canvas.Pen.Color=clRed;
* PaintBox1.Canvas.Brush.Color=clRed;
*/
} else {
/*
* Edit11.Text='1'; PaintBox1.Canvas.Pen.Color=clGreen;
* PaintBox1.Canvas.Brush.Color=clGreen;
*/
}
/*
* PaintBox1.Canvas.Ellipse(Round((TestSample[0]-minX)*ComX)-3,370-Round(
* (
* TestSample[1]-minY)*ComY)-3,Round((TestSample[0]-minX)*ComX)+3,370-Round
* ((TestSample[1]-minY)*ComY)+3);
*/
}
}
|
src/main/Main.java
|
package main;
import java.util.Scanner;
public class Main {
double[] ErmithQuot = { 1, 4, 4, 16 };
double[] WeightFactors = new double[4];
double[] CurW = new double[4];
double[] tmp = new double[4];
double[][] StudySample = new double[4][2];
double[] TestSample = new double[2];
int IterCount;
double minX, maxX, minY, maxY, ComX, ComY, shx, shy, it, jt, previt, prevjt;
private Scanner in = new Scanner(System.in);
private void Button1Click() {
int i;
boolean fnd, prox;
double cks;
String func;
for (i = 0; i < 4; i++) {
WeightFactors[i] = 0;
}
System.out.println("Enter study points");
StudySample[0][0] = in.nextInt();
StudySample[0][1] = in.nextInt();
StudySample[1][0] = in.nextInt();
StudySample[1][1] = in.nextInt();
StudySample[2][0] = in.nextInt();
StudySample[2][1] = in.nextInt();
StudySample[3][0] = in.nextInt();
StudySample[3][1] = in.nextInt();
i = 0;
IterCount = 0;
WeightFactors[0] = 1;
WeightFactors[1] = 4 * StudySample[i][0];
WeightFactors[2] = 4 * StudySample[i][1];
WeightFactors[3] = 16 * StudySample[i][0] * StudySample[i][1];
do {
i = 0;
// if IterCount=0 i=0 else i=1;
fnd = false;
do {
CurW[0] = WeightFactors[0];
CurW[1] = WeightFactors[1] * StudySample[i + 1][0];
CurW[2] = WeightFactors[2] * StudySample[i + 1][1];
CurW[3] = WeightFactors[3] * StudySample[i + 1][0] * StudySample[i + 1][1];
cks = 0;
for (int k = 0; k < 4; k++)
cks = cks + CurW[k];
tmp[0] = 1;
tmp[0] = 4 * StudySample[i + 1][0];
tmp[0] = 4 * StudySample[i + 1][1];
tmp[0] = 16 * StudySample[i + 1][0] * StudySample[i + 1][1];
if ((i + 1 <= 3/*???*/) && (cks < 0/*???*/)) {
fnd = true;
for (int j = 0; j < 4; j++)
WeightFactors[j] = WeightFactors[j] + tmp[j];
} else if ((i + 1 > 1/*???*/) && (cks > 0/*???*/)) {
fnd = true;
for (int j = 0; j < 4; j++)
WeightFactors[j] = WeightFactors[j] - tmp[j];
}
;
if (i < 3/*???*/)
i++;
else
i = 0;
} while (i == 1);/*???*/
IterCount++;
} while ((!fnd) || (IterCount == 50));
if ((cks <= 0) || (IterCount > 49)) {/*???*/
System.out.println("With this training set build a separating function impossible");
}
// else
{
// for i=1 to 4 ShowMessage(FloatToStr(WeightFactors[i]));
minX = StudySample[0][0];
maxX = minX;
minY = StudySample[0][1];
maxY = minY;
for (i = 1; i < 4; i++) {
if (minX > StudySample[i][0])
minX = StudySample[i][0];
if (minY > StudySample[i][1])
minY = StudySample[i][1];
if (maxX < StudySample[i][0])
maxX = StudySample[i][0];
if (maxY < StudySample[i][1])
maxY = StudySample[i][1];
}
maxX = maxX + 3;
minX = minX - 3;
maxY = maxY + 3;
minY = minY - 3;
ComX = 545 / (maxX - minX);
ComY = 369 / (maxY - minY);
// Draw some
/*
* PaintBox1.Canvas.Pen.Color=clBlue;
* PaintBox1.Canvas.MoveTo(0,370-Round(-comy*minY));
* PaintBox1.Canvas.LineTo(545,370-Round(-comy*minY));
* PaintBox1.Canvas.MoveTo(Round(-minX*comx),0);
* PaintBox1.Canvas.LineTo(Round(-minX*comx),370);
* PaintBox1.Canvas.Pen.Color=clBlack;
* PaintBox1.Canvas.Brush.Color=clBlack;
*/
it = minX;
if ((WeightFactors[2] != 0) || (WeightFactors[3] != 0))
jt = -(WeightFactors[0] + WeightFactors[1] * it) / (WeightFactors[2] + WeightFactors[3] * it);
else
jt = -(WeightFactors[0] + WeightFactors[1] * it);
/*
* PaintBox1.Canvas.MoveTo(Round((it-minX)*ComX),370-Round((jt-minY)*
* ComY));
*/
while (it <= maxX) {
if (WeightFactors[3] != 0) {
if (it != -WeightFactors[2] / WeightFactors[3]) {
if ((WeightFactors[2] != 0) && (WeightFactors[3] != 0))
jt = -(WeightFactors[0] + WeightFactors[1] * it) / (WeightFactors[2] + WeightFactors[3] * it);
else
jt = -(WeightFactors[0] + WeightFactors[1] * it);
}
} else if (WeightFactors[2] != 0)
jt = -(WeightFactors[0] + WeightFactors[1] * it) / (WeightFactors[2] + WeightFactors[3] * it);
else
jt = -(WeightFactors[0] + WeightFactors[1] * it);
/*
* if ((jt<=maxY)&&(jt>=minY))
* PaintBox1.Canvas.LineTo(Round((it-
* minX)*ComX),370-Round((jt-minY)*ComY)); else
* PaintBox1.Canvas.
* MoveTo(Round((it-minX)*ComX),370-Round((jt-minY)*ComY));
*/
it = it + 0.001;
}
;
if (WeightFactors[0] / WeightFactors[2] == WeightFactors[1] / WeightFactors[3]) {
/*
* PaintBox1.Canvas.MoveTo(Round((-minX+WeightFactors[1]/
* WeightFactors[3])*ComX),0);
* PaintBox1.Canvas.LineTo(Round((-minX
* +WeightFactors[1]/WeightFactors[3])*ComX),370);
*/
}
;
/*
* PaintBox1.Canvas.Pen.Color=clGreen;
* PaintBox1.Canvas.Brush.Color=clGreen;
*/
for (i = 0; i < 4; i++) {
if (i == 2) {
/*
* PaintBox1.Canvas.Pen.Color=clRed;
* PaintBox1.Canvas.Brush.Color=clRed;
*/
}
/*
* PaintBox1.Canvas.Ellipse(Round((StudySample[i][0]-minX)*ComX)-
* 3
* ,370-Round((StudySample[i][1]-minY)*ComY)-3,Round((StudySample
* [
* i][0]-minX)*ComX)+3,370-Round((StudySample[i][1]-minY)*ComY)+
* 3);
*/
}
func = "y=-(" + String.valueOf(WeightFactors[0]);
if (WeightFactors[1] >= 0)
func = func + '+' + String.valueOf(WeightFactors[1]);
else
func = func + String.valueOf(WeightFactors[1]);
func = func + "x)";
if (WeightFactors[2] != 0) {
func = func + "/(" + String.valueOf(WeightFactors[2]);
if (WeightFactors[3] < 0)
func = func + String.valueOf(WeightFactors[3]) + "x";
else if (WeightFactors[3] > 0)
func = func + '+' + String.valueOf(WeightFactors[3]) + "x";
} else if (WeightFactors[3] != 0)
func = func + "/(" + String.valueOf(WeightFactors[3]) + "x";
func = func + ')';
/*
* Memo1.Clear; Memo1.Lines.Add(func);
*/
}
}
private void Button2Click() {
double cks;
int i;
/*
* TestSample[1]=StrToFloat(Edit9.Text);
* TestSample[2]=StrToFloat(Edit10.Text);
*/
CurW[0] = WeightFactors[0];
CurW[0] = WeightFactors[1] * TestSample[0];
CurW[0] = WeightFactors[2] * TestSample[1];
CurW[0] = WeightFactors[3] * TestSample[0] * TestSample[1];
cks = 0;
for (i = 0; i < 4; i++)
cks = cks + CurW[i];
if (cks <= 0) {
/*
* if (cks==0) Edit11.Text='Ãðàíèöà';
*/
/*
* Edit11.Text='2'; PaintBox1.Canvas.Pen.Color=clRed;
* PaintBox1.Canvas.Brush.Color=clRed;
*/
} else {
/*
* Edit11.Text='1'; PaintBox1.Canvas.Pen.Color=clGreen;
* PaintBox1.Canvas.Brush.Color=clGreen;
*/
}
/*
* PaintBox1.Canvas.Ellipse(Round((TestSample[0]-minX)*ComX)-3,370-Round(
* (
* TestSample[1]-minY)*ComY)-3,Round((TestSample[0]-minX)*ComX)+3,370-Round
* ((TestSample[1]-minY)*ComY)+3);
*/
}
}
|
Small comments
|
src/main/Main.java
|
Small comments
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.