gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright (c) 2012-2017: Christopher J. Brody (aka Chris Brody)
* Copyright (c) 2005-2010, Nitobi Software Inc.
* Copyright (c) 2010, IBM Corporation
*/
package io.sqlc;
import android.annotation.SuppressLint;
import android.util.Log;
import java.io.File;
import java.lang.IllegalArgumentException;
import java.lang.Number;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.IOException;
public class SQLitePlugin extends CordovaPlugin {
/**
* Multiple database runner map (static).
* NOTE: no public static accessor to db (runner) map since it would not work with db threading.
* FUTURE put DBRunner into a public class that can provide external accessor.
*/
static ConcurrentHashMap<String, DBRunner> dbrmap = new ConcurrentHashMap<String, DBRunner>();
/**
* NOTE: Using default constructor, no explicit constructor.
*/
/**
* Executes the request and returns PluginResult.
*
* @param actionAsString The action to execute.
* @param args JSONArry of arguments for the plugin.
* @param cbc Callback context from Cordova API
* @return Whether the action was valid.
*/
@Override
public boolean execute(String actionAsString, JSONArray args, CallbackContext cbc) {
Action action;
try {
action = Action.valueOf(actionAsString);
} catch (IllegalArgumentException e) {
// shouldn't ever happen
Log.e(SQLitePlugin.class.getSimpleName(), "unexpected error", e);
return false;
}
try {
return executeAndPossiblyThrow(action, args, cbc);
} catch (JSONException e) {
// TODO: signal JSON problem to JS
Log.e(SQLitePlugin.class.getSimpleName(), "unexpected error", e);
return false;
}
}
private boolean executeAndPossiblyThrow(Action action, JSONArray args, CallbackContext cbc)
throws JSONException {
boolean status = true;
JSONObject o;
String echo_value;
String dbname;
switch (action) {
case echoStringValue:
o = args.getJSONObject(0);
echo_value = o.getString("value");
cbc.success(echo_value);
break;
case open:
o = args.getJSONObject(0);
dbname = o.getString("name");
// open database and start reading its queue
this.startDatabase(dbname, o, cbc);
break;
case close:
o = args.getJSONObject(0);
dbname = o.getString("path");
// put request in the q to close the db
this.closeDatabase(dbname, cbc);
break;
case delete:
o = args.getJSONObject(0);
dbname = o.getString("path");
deleteDatabase(dbname, cbc);
break;
case executeSqlBatch:
case backgroundExecuteSqlBatch:
JSONObject allargs = args.getJSONObject(0);
JSONObject dbargs = allargs.getJSONObject("dbargs");
dbname = dbargs.getString("dbname");
JSONArray txargs = allargs.getJSONArray("executes");
if (txargs.isNull(0)) {
cbc.error("missing executes list");
} else {
int len = txargs.length();
String[] queries = new String[len];
JSONArray[] jsonparams = new JSONArray[len];
for (int i = 0; i < len; i++) {
JSONObject a = txargs.getJSONObject(i);
queries[i] = a.getString("sql");
jsonparams[i] = a.getJSONArray("params");
}
// put db query in the queue to be executed in the db thread:
DBQuery q = new DBQuery(queries, jsonparams, cbc);
DBRunner r = dbrmap.get(dbname);
if (r != null) {
try {
r.q.put(q);
} catch(Exception e) {
Log.e(SQLitePlugin.class.getSimpleName(), "couldn't add to queue", e);
cbc.error("couldn't add to queue");
}
} else {
cbc.error("database not open");
}
}
break;
}
return status;
}
/**
* Clean up and close all open databases.
*/
@Override
public void onDestroy() {
while (!dbrmap.isEmpty()) {
String dbname = dbrmap.keySet().iterator().next();
this.closeDatabaseNow(dbname);
DBRunner r = dbrmap.get(dbname);
try {
// stop the db runner thread:
r.q.put(new DBQuery());
} catch(Exception e) {
Log.e(SQLitePlugin.class.getSimpleName(), "couldn't stop db thread", e);
}
dbrmap.remove(dbname);
}
}
// --------------------------------------------------------------------------
// LOCAL METHODS
// --------------------------------------------------------------------------
private void startDatabase(String dbname, JSONObject options, CallbackContext cbc) {
// TODO: is it an issue that we can orphan an existing thread? What should we do here?
// If we re-use the existing DBRunner it might be in the process of closing...
DBRunner r = dbrmap.get(dbname);
// Brody TODO: It may be better to terminate the existing db thread here & start a new one, instead.
if (r != null) {
// don't orphan the existing thread; just re-open the existing database.
// In the worst case it might be in the process of closing, but even that's less serious
// than orphaning the old DBRunner.
cbc.success();
} else {
r = new DBRunner(dbname, options, cbc);
dbrmap.put(dbname, r);
this.cordova.getThreadPool().execute(r);
}
}
/**
* Open a database.
*
* @param dbName The name of the database file
*/
private SQLiteAndroidDatabase openDatabase(String dbname, CallbackContext cbc, boolean old_impl) throws Exception {
try {
// ASSUMPTION: no db (connection/handle) is already stored in the map
// [should be true according to the code in DBRunner.run()]
File dbfile = this.cordova.getActivity().getDatabasePath(dbname);
if (!dbfile.exists()) {
dbfile.getParentFile().mkdirs();
}
Log.v("info", "Open sqlite db: " + dbfile.getAbsolutePath());
SQLiteAndroidDatabase mydb = old_impl ? new SQLiteAndroidDatabase() : new SQLiteConnectorDatabase();
mydb.open(dbfile);
if (cbc != null) // XXX Android locking/closing BUG workaround
cbc.success();
return mydb;
} catch (Exception e) {
if (cbc != null) // XXX Android locking/closing BUG workaround
cbc.error("can't open database " + e);
throw e;
}
}
/**
* Close a database (in another thread).
*
* @param dbName The name of the database file
*/
private void closeDatabase(String dbname, CallbackContext cbc) {
DBRunner r = dbrmap.get(dbname);
if (r != null) {
try {
r.q.put(new DBQuery(false, cbc));
} catch(Exception e) {
if (cbc != null) {
cbc.error("couldn't close database" + e);
}
Log.e(SQLitePlugin.class.getSimpleName(), "couldn't close database", e);
}
} else {
if (cbc != null) {
cbc.success();
}
}
}
/**
* Close a database (in the current thread).
*
* @param dbname The name of the database file
*/
private void closeDatabaseNow(String dbname) {
DBRunner r = dbrmap.get(dbname);
if (r != null) {
SQLiteAndroidDatabase mydb = r.mydb;
if (mydb != null)
mydb.closeDatabaseNow();
}
}
private void deleteDatabase(String dbname, CallbackContext cbc) {
DBRunner r = dbrmap.get(dbname);
if (r != null) {
try {
r.q.put(new DBQuery(true, cbc));
} catch(Exception e) {
if (cbc != null) {
cbc.error("couldn't close database" + e);
}
Log.e(SQLitePlugin.class.getSimpleName(), "couldn't close database", e);
}
} else {
boolean deleteResult = this.deleteDatabaseNow(dbname);
if (deleteResult) {
cbc.success();
} else {
cbc.error("couldn't delete database");
}
}
}
/**
* Delete a database.
*
* @param dbName The name of the database file
*
* @return true if successful or false if an exception was encountered
*/
private boolean deleteDatabaseNow(String dbname) {
File dbfile = this.cordova.getActivity().getDatabasePath(dbname);
try {
return cordova.getActivity().deleteDatabase(dbfile.getAbsolutePath());
} catch (Exception e) {
Log.e(SQLitePlugin.class.getSimpleName(), "couldn't delete database", e);
return false;
}
}
private class DBRunner implements Runnable {
final String dbname;
private boolean oldImpl;
private boolean bugWorkaround;
final BlockingQueue<DBQuery> q;
final CallbackContext openCbc;
SQLiteAndroidDatabase mydb;
DBRunner(final String dbname, JSONObject options, CallbackContext cbc) {
this.dbname = dbname;
this.oldImpl = options.has("androidOldDatabaseImplementation");
Log.v(SQLitePlugin.class.getSimpleName(), "Android db implementation: built-in android.database.sqlite package");
this.bugWorkaround = this.oldImpl && options.has("androidBugWorkaround");
if (this.bugWorkaround)
Log.v(SQLitePlugin.class.getSimpleName(), "Android db closing/locking workaround applied");
this.q = new LinkedBlockingQueue<DBQuery>();
this.openCbc = cbc;
}
public void run() {
try {
this.mydb = openDatabase(dbname, this.openCbc, this.oldImpl);
} catch (Exception e) {
Log.e(SQLitePlugin.class.getSimpleName(), "unexpected error, stopping db thread", e);
dbrmap.remove(dbname);
return;
}
DBQuery dbq = null;
try {
dbq = q.take();
while (!dbq.stop) {
mydb.executeSqlBatch(dbq.queries, dbq.jsonparams, dbq.cbc);
if (this.bugWorkaround && dbq.queries.length == 1 && dbq.queries[0] == "COMMIT")
mydb.bugWorkaround();
dbq = q.take();
}
} catch (Exception e) {
Log.e(SQLitePlugin.class.getSimpleName(), "unexpected error", e);
}
if (dbq != null && dbq.close) {
try {
closeDatabaseNow(dbname);
dbrmap.remove(dbname); // (should) remove ourself
if (!dbq.delete) {
dbq.cbc.success();
} else {
try {
boolean deleteResult = deleteDatabaseNow(dbname);
if (deleteResult) {
dbq.cbc.success();
} else {
dbq.cbc.error("couldn't delete database");
}
} catch (Exception e) {
Log.e(SQLitePlugin.class.getSimpleName(), "couldn't delete database", e);
dbq.cbc.error("couldn't delete database: " + e);
}
}
} catch (Exception e) {
Log.e(SQLitePlugin.class.getSimpleName(), "couldn't close database", e);
if (dbq.cbc != null) {
dbq.cbc.error("couldn't close database: " + e);
}
}
}
}
}
private final class DBQuery {
// XXX TODO replace with DBRunner action enum:
final boolean stop;
final boolean close;
final boolean delete;
final String[] queries;
final JSONArray[] jsonparams;
final CallbackContext cbc;
DBQuery(String[] myqueries, JSONArray[] params, CallbackContext c) {
this.stop = false;
this.close = false;
this.delete = false;
this.queries = myqueries;
this.jsonparams = params;
this.cbc = c;
}
DBQuery(boolean delete, CallbackContext cbc) {
this.stop = true;
this.close = true;
this.delete = delete;
this.queries = null;
this.jsonparams = null;
this.cbc = cbc;
}
// signal the DBRunner thread to stop:
DBQuery() {
this.stop = true;
this.close = false;
this.delete = false;
this.queries = null;
this.jsonparams = null;
this.cbc = null;
}
}
private static enum Action {
echoStringValue,
open,
close,
delete,
executeSqlBatch,
backgroundExecuteSqlBatch,
}
}
/* vim: set expandtab : */
| |
/**
*
*/
package ur_rna.GUITester;
import ur_rna.GUITester.GuiTools.GuiAppManager;
import ur_rna.Utilities.*;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* The main entry-point for the test application.
*/
public class Startup {
//package-private
static final AppOptions options = new AppOptions();
static final AppLog log = new AppLog();
//package-private
static class AppOptions {
public Verbosity logVerbosity = Verbosity.Info;
public final ArrayList<Pair<String,ScriptSource>> scriptList = new ArrayList<>();
public boolean parseOnly;
}
/**
* @param args Command line arguments.
*/
public static void main(String[] args) {
// Parse the command line to set fields in the AppOptions object.
// This includes adding script arguments to the AppOptions scriptList.
if (!parseArgs(args)) return;
log.setVerbosity(options.logVerbosity);
if (options.scriptList.size() == 0) {
log.error("No test scripts were specified.");
return;
}
int errorCode = 0;
GuiAppManager app = new GuiAppManager(log);
try {
log.debug("UI Testing started.");
for (Pair<String, ScriptSource> s : options.scriptList) {
String fileName = s.getKey();
ScriptSource srcType = s.getValue();
ArrayList<Script> scripts = new ArrayList<>(1);
//String scriptText = null;
if (srcType == ScriptSource.INLINE) {
// The script has been specified in-line. fileName is actually the full text.
scripts.add(new Script(null, new StringReader(fileName)));
} else {
try {
String baseName = PathTools.getBaseName(fileName);
scripts.addAll(splitScriptSuite(new FileReader(fileName), fileName, baseName));
} catch (FileNotFoundException e) {
errorCode = 1;
log.error("Script file not found. Path: " + fileName, e);
continue;
}
}
for (Script script : scripts) {
try {
log.debug("Running UI test script: " + script.toString());
new ScriptRunner(script, app, log).run();
} catch (Exception ex) {
log.error("Exception in Script file: " + script.toString(), ex);
errorCode = 1;
} finally {
script.input.close();
}
}
}
} catch (Throwable ex) {
log.error("Unhandled error during test. Test aborted.", ex);
errorCode = 10;
} finally {
app.close();
}
if (errorCode != 0)
System.exit(errorCode);
}
private static boolean parseArgs(String[] commandArgs) {
AppOptions opts = Startup.options;
CommandLineParser p = new CommandLineParser(true); //ignore the case of flags
try {
p.parseFlagDefs("help,h trace,t log,l:R suite,s:R inline,i:R noexec");
CommandLineParser.ParseResults r = p.Parse(commandArgs);
if (r.hasFlag("help")) {
ShowAppHelp();
return false;
}
// If the -L flag was specified, set the log-level to the flag's parameter.
// (The value will be null if it was not found, which will result in the default value, LOG_INFO being used.)
if (r.hasFlag("log"))
opts.logVerbosity = Verbosity.fromImportance(Convert.ToInt(r.getFlagValue("log"), Verbosity.Info.importance));
if (r.hasFlag("trace")) opts.logVerbosity = Verbosity.Trace;
opts.parseOnly = r.hasFlag("noexec");
ArrayList<Pair<String,ScriptSource>> scripts = opts.scriptList;
for (CommandLineParser.Argument a : r.arguments) {
String source = a.fullText;
ScriptSource srcType;
if (a instanceof CommandLineParser.UnnamedArg)
srcType = ScriptSource.FILE;
else if (a instanceof CommandLineParser.FlagArg) {
CommandLineParser.FlagArg f = (CommandLineParser.FlagArg)a;
source = f.valueText;
switch (f.flag.name) {
case "suite": srcType= ScriptSource.SUITE_FILE; break;
case "inline": srcType= ScriptSource.INLINE; break;
default:
continue;
}
} else
continue; // There are only two types of Arguments, so we'd never get here.
scripts.add(new Pair<String, ScriptSource>(source, srcType));
}
return true;
} catch (SyntaxErrorException e) {
log.error("Command-line error: " + e.getMessage());
log.info(p.getUsageMessage());
return false;
}
}
private enum ScriptSource {
FILE,
SUITE_FILE,
INLINE
}
private static void ShowAppHelp() {
System.out.println(Startup.class.getPackage().getName() + " [-L <log-level>] [-h|-help] ( <file-path> | @\"<script-text>\" ...)*");
}
private static List<Script> splitScriptSuite(Reader input, String file, String defaultTitle) throws IOException {
LineNumberReader r = new LineNumberReader(input);
StringBuilder sb = new StringBuilder();
ArrayList<Script> scripts = new ArrayList<>();
long startOffset = r.getLineNumber();
long endOffset;
boolean scriptStarted = false;
String s, title = null;
Pattern p = Pattern.compile("<--([^\\n]*)-->");
boolean atEOF = false;
while (!atEOF) {
// this gets the position BEFORE the line is read, because if the line matches
// the pattern, the NEW line number indicates the start of the NEXT script, not the current one.
endOffset = r.getLineNumber();
s = r.readLine();
if (s == null) {
s = "<--FINAL-->";
atEOF = true;
}
Matcher m = p.matcher(s);
if (m.matches()) {
// This could either be on the first line (in which case there is NO previous script
// or there could be a script above this, possibly with no header-line.
if (scriptStarted) {
if (title == null) title = defaultTitle + " part #" + (scripts.size() + 1);
String header = "//Script: " + title + "\n";
Script scr = new Script(file, new StringReader(header + sb.toString()), startOffset, endOffset);
scr.title = title;
scripts.add(scr);
sb.setLength(0); // discard previous text
startOffset = endOffset + 1;
}
//get title for upcoming script
title = m.group(1);
} else {
if (scriptStarted)
sb.append('\n');
sb.append(s);
scriptStarted = true;
}
}
return scripts;
}
// int errors = 0;
// AppManager app = new AppManager();
// for (Script ts : scripts) {
// try {
// AppLog.info("Running Test: %s.", ts.sourceInfo);
// ts.parseTree = ParseScript(ts);
// ExecuteScript(ts, app);
// AppLog.info("Test Successful: %s.", ts.sourceInfo);
// catch (ScriptRuntimeException ex) {
// System.out.flush();
// AppLog.error(String.format("Runtime error in Script %s.", ts.sourceInfo), ex);
// errors++;
// }
// System.out.flush();
// System.err.flush();
//
// if (errors != 0) break; //just during initial testing
// }
// return scripts;
// }
// private BufferedReader buffered(final Reader input) {
// if (input instanceof BufferedReader)
// return (BufferedReader)_input;
// return new BufferedReader(_input);
// }
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.leveldb.test;
import org.apache.activemq.Service;
import org.apache.activemq.command.ActiveMQQueue;
import org.apache.activemq.leveldb.CountDownFuture;
import org.apache.activemq.leveldb.LevelDBStore;
import org.apache.activemq.leveldb.replicated.ElectingLevelDBStore;
import org.apache.activemq.store.MessageStore;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.concurrent.TimeUnit;
import static org.apache.activemq.leveldb.test.ReplicationTestSupport.*;
import static org.junit.Assert.*;
/**
*/
public class ElectingLevelDBStoreTest extends ZooKeeperTestSupport {
protected static final Logger LOG = LoggerFactory.getLogger(ElectingLevelDBStoreTest.class);
ArrayList<ElectingLevelDBStore> stores = new ArrayList<ElectingLevelDBStore>();
ElectingLevelDBStore master = null;
@Ignore("https://issues.apache.org/jira/browse/AMQ-5512")
@Test(timeout = 1000*60*10)
public void testElection() throws Exception {
deleteDirectory("leveldb-node1");
deleteDirectory("leveldb-node2");
deleteDirectory("leveldb-node3");
ArrayList<CountDownFuture> pending_starts = new ArrayList<CountDownFuture>();
for(String dir: new String[]{"leveldb-node1", "leveldb-node2", "leveldb-node3"}) {
ElectingLevelDBStore store = createStoreNode();
store.setDirectory(new File(data_dir(), dir));
stores.add(store);
pending_starts.add(asyncStart(store));
}
// At least one of the stores should have started.
CountDownFuture f = waitFor(30 * 1000, pending_starts.toArray(new CountDownFuture[pending_starts.size()]));
assertTrue(f!=null);
pending_starts.remove(f);
// The other stores should not start..
LOG.info("Making sure the other stores don't start");
Thread.sleep(5000);
for(CountDownFuture start: pending_starts) {
assertFalse(start.completed());
}
// Make sure only of the stores is reporting to be the master.
for(ElectingLevelDBStore store: stores) {
if( store.isMaster() ) {
assertNull(master);
master = store;
}
}
assertNotNull(master);
// We can work out who the slaves are...
HashSet<ElectingLevelDBStore> slaves = new HashSet<ElectingLevelDBStore>(stores);
slaves.remove(master);
// Start sending messages to the master.
ArrayList<String> expected_list = new ArrayList<String>();
MessageStore ms = master.createQueueMessageStore(new ActiveMQQueue("TEST"));
final int TOTAL = 500;
for (int i = 0; i < TOTAL; i++) {
if (i % ((int) (TOTAL * 0.10)) == 0) {
LOG.info("" + (100 * i / TOTAL) + "% done");
}
if( i == 250 ) {
LOG.info("Checking master state");
assertEquals(expected_list, getMessages(ms));
// mid way, lets kill the master..
LOG.info("Killing Master.");
master.stop();
// At least one of the remaining stores should complete starting.
LOG.info("Waiting for slave takeover...");
f = waitFor(60 * 1000, pending_starts.toArray(new CountDownFuture[pending_starts.size()]));
assertTrue(f!=null);
pending_starts.remove(f);
// Make sure one and only one of the slaves becomes the master..
master = null;
for(ElectingLevelDBStore store: slaves) {
if( store.isMaster() ) {
assertNull(master);
master = store;
}
}
assertNotNull(master);
slaves.remove(master);
ms = master.createQueueMessageStore(new ActiveMQQueue("TEST"));
}
String msgid = "m:" + i;
addMessage(ms, msgid);
expected_list.add(msgid);
}
LOG.info("Checking master state");
ArrayList<String> messagesInStore = getMessages(ms);
int index=0;
for (String id: expected_list) {
if (!id.equals(messagesInStore.get(index))) {
LOG.info("Mismatch for expected:" + id + ", got:" + messagesInStore.get(index));
break;
}
index++;
}
assertEquals(expected_list, messagesInStore);
}
@Test(timeout = 1000 * 60 * 10)
public void testZooKeeperServerFailure() throws Exception {
final ArrayList<ElectingLevelDBStore> stores = new ArrayList<ElectingLevelDBStore>();
ArrayList<CountDownFuture> pending_starts = new ArrayList<CountDownFuture>();
for (String dir : new String[]{"leveldb-node1", "leveldb-node2", "leveldb-node3"}) {
ElectingLevelDBStore store = createStoreNode();
store.setDirectory(new File(data_dir(), dir));
stores.add(store);
pending_starts.add(asyncStart(store));
}
// At least one of the stores should have started.
CountDownFuture f = waitFor(30 * 1000, pending_starts.toArray(new CountDownFuture[pending_starts.size()]));
assertTrue(f != null);
pending_starts.remove(f);
// The other stores should not start..
LOG.info("Making sure the other stores don't start");
Thread.sleep(5000);
for (CountDownFuture start : pending_starts) {
assertFalse(start.completed());
}
// Stop ZooKeeper..
LOG.info("SHUTTING DOWN ZooKeeper!");
connector.shutdown();
// None of the store should be slaves...
within( 30, TimeUnit.SECONDS, new Task(){
public void run() throws Exception {
for (ElectingLevelDBStore store : stores) {
assertFalse(store.isMaster());
}
}
});
}
/*
* testAMQ5082 tests the behavior of an ElectingLevelDBStore
* pool when ZooKeeper I/O timeouts occur. See issue AMQ-5082.
*/
@Test(timeout = 1000 * 60 * 5)
public void testAMQ5082() throws Throwable {
final ArrayList<ElectingLevelDBStore> stores = new ArrayList<ElectingLevelDBStore>();
LOG.info("Launching 3 stores");
for (String dir : new String[]{"leveldb-node1", "leveldb-node2", "leveldb-node3"}) {
ElectingLevelDBStore store = createStoreNode();
store.setDirectory(new File(data_dir(), dir));
stores.add(store);
asyncStart(store);
}
LOG.info("Waiting 30s for stores to start");
Thread.sleep(30 * 1000);
LOG.info("Checking for a single master");
ElectingLevelDBStore master = null;
for (ElectingLevelDBStore store: stores) {
if (store.isMaster()) {
assertNull(master);
master = store;
}
}
assertNotNull(master);
LOG.info("Imposing 1s I/O wait on Zookeeper connections, waiting 30s to confirm that quorum is not lost");
this.connector.testHandle.setIOWaitMillis(1 * 1000, 30 * 1000);
LOG.info("Confirming that the quorum has not been lost");
for (ElectingLevelDBStore store: stores) {
if (store.isMaster()) {
assertTrue(master == store);
}
}
LOG.info("Imposing 11s I/O wait on Zookeeper connections, waiting 30s for quorum to be lost");
this.connector.testHandle.setIOWaitMillis(11 * 1000, 30 * 1000);
LOG.info("Confirming that the quorum has been lost");
for (ElectingLevelDBStore store: stores) {
assertFalse(store.isMaster());
}
master = null;
LOG.info("Lifting I/O wait on Zookeeper connections, waiting 30s for quorum to be re-established");
this.connector.testHandle.setIOWaitMillis(0, 30 * 1000);
LOG.info("Checking for a single master");
for (ElectingLevelDBStore store: stores) {
if (store.isMaster()) {
assertNull(master);
master = store;
}
}
assertNotNull(master);
}
@After
public void stop() throws Exception {
if (master != null) {
master.stop();
FileUtils.deleteDirectory(master.directory());
}
for(ElectingLevelDBStore store: stores) {
store.stop();
FileUtils.deleteDirectory(store.directory());
}
stores.clear();
}
private CountDownFuture asyncStart(final Service service) {
final CountDownFuture<Throwable> f = new CountDownFuture<Throwable>();
LevelDBStore.BLOCKING_EXECUTOR().execute(new Runnable() {
public void run() {
try {
service.start();
f.set(null);
} catch (Throwable e) {
e.printStackTrace();
f.set(e);
}
}
});
return f;
}
private CountDownFuture asyncStop(final Service service) {
final CountDownFuture<Throwable> f = new CountDownFuture<Throwable>();
LevelDBStore.BLOCKING_EXECUTOR().execute(new Runnable() {
public void run() {
try {
service.stop();
f.set(null);
} catch (Throwable e) {
e.printStackTrace();
f.set(e);
}
}
});
return f;
}
private ElectingLevelDBStore createStoreNode() {
ElectingLevelDBStore store = new ElectingLevelDBStore();
store.setSecurityToken("foo");
store.setLogSize(1024 * 200);
store.setReplicas(2);
store.setSync("quorum_disk");
store.setZkSessionTimeout("15s");
store.setZkAddress("localhost:" + connector.getLocalPort());
store.setZkPath("/broker-stores");
store.setBrokerName("foo");
store.setHostname("localhost");
store.setBind("tcp://0.0.0.0:0");
return store;
}
}
| |
/*
* Copyright (c) 2007-2020 Texas A&M University System
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
* - Neither the name of the copyright holders nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.android.fogbox;
import java.io.IOException;
import java.io.InputStream;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnMultiChoiceClickListener;
import android.content.Intent;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
public class FileShareActivity extends Activity {
private final int DIALOG_PROGRESS_ID =1;
private Button mShareButton;
private Button mCancelButton;
private ImageView mImgview;
private TextView mTextView;
//private final String serverIP = "192.168.72.167";
//private final String portNo="4001";
private final String serverIP = "10.10.26.1";
private final String portNo="4001";
private String Filename = null;
private String TAG ="FileShareActivity";
private static final int SELECT_PICTURE = 1;
// private String selectedImagePath = "/mnt/sdcard/DCIM/Camera/";
private String selectedImagePath = "/mnt/sdcard/";
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.image_view);
AssetManager assetManager = getBaseContext().getAssets();
InputStream istr;
Bitmap bitmap = null;
mShareButton = (Button) findViewById(R.id.buttonShare);
mCancelButton = (Button) findViewById(R.id.buttonCancel);
mImgview =(ImageView) findViewById(R.id.imageView);
mTextView =(TextView) findViewById(R.id.filenameTextView);
Bundle bundle = getIntent().getExtras();
final Handler handler=new Handler();
int groupID = bundle.getInt("groupID");
Filename = bundle.getString("file_name");
//Toast.makeText(this,"Group ID is "+groupID, Toast.LENGTH_SHORT).show();
/*Intent intent = new Intent();
intent.setType("image/*");
intent.setAction(Intent.ACTION_GET_CONTENT);
startActivityForResult(Intent.createChooser(intent,
"Select Picture"), SELECT_PICTURE);*/
selectedImagePath += Filename;
Log.d(TAG,"The image path is "+selectedImagePath);
try {
istr = assetManager.open(Filename);
bitmap = BitmapFactory.decodeFile(selectedImagePath);
} catch (IOException e) {
return ;
}
mTextView.setText(Filename);
mImgview.setImageBitmap(bitmap);
mShareButton.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
AlertDialog.Builder builder = new AlertDialog.Builder(FileShareActivity.this);
builder.setMessage(R.string.ConfirmFileShare)
.setCancelable(false)
.setPositiveButton("Yes", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
final String [] items = new String[]{"Group 1", "Group 2", "Group 3","Group 4",
"Group 5","Group 6","Group 7","Group 8","Group 9","Group 10"};
AlertDialog.Builder builder = new AlertDialog.Builder(FileShareActivity.this);
builder.setTitle(R.string.SelectGroups);
builder.setMultiChoiceItems(items, null, new OnMultiChoiceClickListener() {
public void onClick(DialogInterface dialog,
int which, boolean ischecked) {
// TODO Auto-generated method stub
}
});
builder.setPositiveButton("Ok", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
showDialog(DIALOG_PROGRESS_ID);
Toast.makeText(FileShareActivity.this,"File is being sent", Toast.LENGTH_SHORT).show();
new Thread(new Runnable() {
public void run() {
FileList.getNativeLibInstance().sendFile(serverIP,portNo,Filename);
}
}).start();
handler.postDelayed(new Runnable()
{
public void run()
{
removeDialog(DIALOG_PROGRESS_ID);
}
}, 5000);
}
});
AlertDialog alert = builder.show();
}
})
.setNegativeButton("No", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
}
});
AlertDialog alert = builder.show();
}
});
mCancelButton.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
//Toast.makeText(FileShareActivity.this,R.string.Exiting, Toast.LENGTH_SHORT).show();
finish();
return;
}
});
}
protected Dialog onCreateDialog(int id) {
Dialog dialog;
switch(id) {
case DIALOG_PROGRESS_ID:
dialog = ProgressDialog.show(this, "",
"Sharing the file...", true);
break;
default:
dialog = null;
}
return dialog;
}
public void appupdate(int count){
Toast.makeText(this,"Count is " +count, Toast.LENGTH_SHORT).show();
}
/* public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == RESULT_OK) {
if (requestCode == SELECT_PICTURE) {
Uri selectedImageUri = data.getData();
selectedImagePath = getPath(selectedImageUri);
Log.d(TAG,"The image path is "+selectedImagePath);
}
}
}
public String getPath(Uri uri) {
String[] projection = { MediaStore.Images.Media.DATA };
Cursor cursor = managedQuery(uri, projection, null, null, null);
int column_index = cursor
.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
return cursor.getString(column_index);
}*/
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.blockmanagement;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSCluster.DataNodeProperties;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.server.datanode.FsDatasetTestUtils.MaterializedReplica;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil;
import org.apache.hadoop.hdfs.server.namenode.ha.TestDNFencing.RandomDeleterPolicy;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test;
import com.google.common.base.Supplier;
import com.google.common.collect.Lists;
/**
* Test when RBW block is removed. Invalidation of the corrupted block happens
* and then the under replicated block gets replicated to the datanode.
*/
public class TestRBWBlockInvalidation {
private static final Log LOG = LogFactory.getLog(TestRBWBlockInvalidation.class);
private static NumberReplicas countReplicas(final FSNamesystem namesystem,
ExtendedBlock block) {
final BlockManager blockManager = namesystem.getBlockManager();
return blockManager.countNodes(blockManager.getStoredBlock(
block.getLocalBlock()));
}
/**
* Test when a block's replica is removed from RBW folder in one of the
* datanode, namenode should ask to invalidate that corrupted block and
* schedule replication for one more replica for that under replicated block.
*/
@Test(timeout=600000)
public void testBlockInvalidationWhenRBWReplicaMissedInDN()
throws IOException, InterruptedException {
// This test cannot pass on Windows due to file locking enforcement. It will
// reject the attempt to delete the block file from the RBW folder.
assumeTrue(!Path.WINDOWS);
Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 2);
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 300);
conf.setLong(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 1);
conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2)
.build();
FSDataOutputStream out = null;
try {
final FSNamesystem namesystem = cluster.getNamesystem();
FileSystem fs = cluster.getFileSystem();
Path testPath = new Path("/tmp/TestRBWBlockInvalidation", "foo1");
out = fs.create(testPath, (short) 2);
out.writeBytes("HDFS-3157: " + testPath);
out.hsync();
cluster.startDataNodes(conf, 1, true, null, null, null);
ExtendedBlock blk = DFSTestUtil.getFirstBlock(fs, testPath);
// Delete partial block and its meta information from the RBW folder
// of first datanode.
MaterializedReplica replica = cluster.getMaterializedReplica(0, blk);
replica.deleteData();
replica.deleteMeta();
out.close();
int liveReplicas = 0;
while (true) {
if ((liveReplicas = countReplicas(namesystem, blk).liveReplicas()) < 2) {
// This confirms we have a corrupt replica
LOG.info("Live Replicas after corruption: " + liveReplicas);
break;
}
Thread.sleep(100);
}
assertEquals("There should be less than 2 replicas in the "
+ "liveReplicasMap", 1, liveReplicas);
while (true) {
if ((liveReplicas =
countReplicas(namesystem, blk).liveReplicas()) > 1) {
//Wait till the live replica count becomes equal to Replication Factor
LOG.info("Live Replicas after Rereplication: " + liveReplicas);
break;
}
Thread.sleep(100);
}
assertEquals("There should be two live replicas", 2, liveReplicas);
while (true) {
Thread.sleep(100);
if (countReplicas(namesystem, blk).corruptReplicas() == 0) {
LOG.info("Corrupt Replicas becomes 0");
break;
}
}
} finally {
if (out != null) {
out.close();
}
cluster.shutdown();
}
}
/**
* Regression test for HDFS-4799, a case where, upon restart, if there
* were RWR replicas with out-of-date genstamps, the NN could accidentally
* delete good replicas instead of the bad replicas.
*/
@Test(timeout=120000)
public void testRWRInvalidation() throws Exception {
Configuration conf = new HdfsConfiguration();
// Set the deletion policy to be randomized rather than the default.
// The default is based on disk space, which isn't controllable
// in the context of the test, whereas a random one is more accurate
// to what is seen in real clusters (nodes have random amounts of free
// space)
conf.setClass(DFSConfigKeys.DFS_BLOCK_REPLICATOR_CLASSNAME_KEY,
RandomDeleterPolicy.class, BlockPlacementPolicy.class);
// Speed up the test a bit with faster heartbeats.
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
int numFiles = 10;
// Test with a bunch of separate files, since otherwise the test may
// fail just due to "good luck", even if a bug is present.
List<Path> testPaths = Lists.newArrayList();
for (int i = 0; i < numFiles; i++) {
testPaths.add(new Path("/test" + i));
}
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2)
.build();
try {
List<FSDataOutputStream> streams = Lists.newArrayList();
try {
// Open the test files and write some data to each
for (Path path : testPaths) {
FSDataOutputStream out = cluster.getFileSystem().create(path, (short)2);
streams.add(out);
out.writeBytes("old gs data\n");
out.hflush();
}
for (Path path : testPaths) {
DFSTestUtil.waitReplication(cluster.getFileSystem(), path, (short)2);
}
// Shutdown one of the nodes in the pipeline
DataNodeProperties oldGenstampNode = cluster.stopDataNode(0);
// Write some more data and flush again. This data will only
// be in the latter genstamp copy of the blocks.
for (int i = 0; i < streams.size(); i++) {
Path path = testPaths.get(i);
FSDataOutputStream out = streams.get(i);
out.writeBytes("new gs data\n");
out.hflush();
// Set replication so that only one node is necessary for this block,
// and close it.
cluster.getFileSystem().setReplication(path, (short)1);
out.close();
}
for (Path path : testPaths) {
DFSTestUtil.waitReplication(cluster.getFileSystem(), path, (short)1);
}
// Upon restart, there will be two replicas, one with an old genstamp
// and one current copy. This test wants to ensure that the old genstamp
// copy is the one that is deleted.
LOG.info("=========================== restarting cluster");
DataNodeProperties otherNode = cluster.stopDataNode(0);
cluster.restartNameNode();
// Restart the datanode with the corrupt replica first.
cluster.restartDataNode(oldGenstampNode);
cluster.waitActive();
// Then the other node
cluster.restartDataNode(otherNode);
cluster.waitActive();
// Compute and send invalidations, waiting until they're fully processed.
cluster.getNameNode().getNamesystem().getBlockManager()
.computeInvalidateWork(2);
cluster.triggerHeartbeats();
HATestUtil.waitForDNDeletions(cluster);
cluster.triggerDeletionReports();
waitForNumTotalBlocks(cluster, numFiles);
// Make sure we can still read the blocks.
for (Path path : testPaths) {
String ret = DFSTestUtil.readFile(cluster.getFileSystem(), path);
assertEquals("old gs data\n" + "new gs data\n", ret);
}
} finally {
IOUtils.cleanup(LOG, streams.toArray(new Closeable[0]));
}
} finally {
cluster.shutdown();
}
}
private void waitForNumTotalBlocks(final MiniDFSCluster cluster,
final int numTotalBlocks) throws Exception {
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
try {
cluster.triggerBlockReports();
// Wait total blocks
if (cluster.getNamesystem().getBlocksTotal() == numTotalBlocks) {
return true;
}
} catch (Exception ignored) {
// Ignore the exception
}
return false;
}
}, 1000, 60000);
}
}
| |
package com.eaw1805.www.client.views.infopanels.units.mini;
import com.google.gwt.user.client.ui.HasHorizontalAlignment;
import com.google.gwt.user.client.ui.Image;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.eaw1805.data.constants.ArmyConstants;
import com.eaw1805.data.dto.web.army.BattalionDTO;
import com.eaw1805.data.dto.web.army.BrigadeDTO;
import com.eaw1805.www.client.events.units.UnitChangedEvent;
import com.eaw1805.www.client.events.units.UnitChangedHandler;
import com.eaw1805.www.client.events.units.UnitEventManager;
import com.eaw1805.www.client.views.extras.ArmyImage;
import com.eaw1805.www.client.views.infopanels.units.ArmyTypeInfoPanel;
import com.eaw1805.www.client.widgets.ClickAbsolutePanel;
import com.eaw1805.www.client.widgets.RenamingLabel;
import com.eaw1805.www.client.widgets.ToolTipPanel;
import com.eaw1805.www.shared.stores.GameStore;
import com.eaw1805.www.shared.stores.util.ArmyUnitInfoDTO;
import com.eaw1805.www.shared.stores.util.calculators.MiscCalculators;
public class BrigadeInfoMini
extends VerticalPanel
implements ArmyConstants {
private final ClickAbsolutePanel brigadePanel;
private BrigadeDTO brigade;
private final ArmyImage[] battImages = new ArmyImage[7];
private Image typeImage;
private final Label hdCountlbls[] = new Label[7];
private final Label explbls[] = new Label[7];
private final Label lblBrigade, lblMp;
private int nationId;
private Image selectedImg;
private boolean isSelected = false;
private final UnitChangedHandler unitChangedHandler;
private final boolean enabled;
public BrigadeInfoMini(final BrigadeDTO brigade, final boolean enabled) {
this.enabled = enabled;
setSize("170px", "90px");
this.brigade = brigade;
nationId = brigade.getNationId();
brigadePanel = new ClickAbsolutePanel();
brigadePanel.setStylePrimaryName("brigadeMiniInfoPanel");
brigadePanel.setStyleName("clickArmyPanel", true);
brigadePanel.setSize("170px", "87px");
add(brigadePanel);
typeImage = new Image("");
typeImage.setSize("", "19px");
selectedImg = new Image("http://static.eaw1805.com/images/infopanels/selected.png");
selectedImg.setSize("170px", "87px");
deselect();
lblBrigade = new RenamingLabel(brigade.getName(), BRIGADE, brigade.getBrigadeId());
lblBrigade.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_LEFT);
lblBrigade.setStyleName("clearFontSmall");
lblBrigade.setSize("167px", "18px");
lblMp = new Label("");
lblMp.setStyleName("clearFontMini");
lblMp.setTitle("Movement points");
setUpImages(enabled);
unitChangedHandler = new UnitChangedHandler() {
public void onUnitChanged(UnitChangedEvent event) {
if (event.getInfoType() == BRIGADE && event.getInfoId() == brigade.getBrigadeId()) {
setUpImages(enabled);
}
}
};
}
public void setUpImages(final boolean enabled) {
brigadePanel.clear();
brigadePanel.add(typeImage, 318, 38);
brigadePanel.add(lblMp, 131, 3);
this.brigadePanel.add(this.selectedImg, 0, 0);
brigadePanel.add(lblBrigade, 3, 3);
brigadePanel.add(lblMp, 131, 3);
final ArmyUnitInfoDTO brigInfo = MiscCalculators.getBrigadeInfo(brigade);
for (final BattalionDTO battalion : brigade.getBattalions()) {
final int index = battalion.getOrder() - 1;
battImages[index] = new ArmyImage();
battImages[index].setArmyTypeDTO(battalion.getEmpireArmyType());
battImages[index].setEmpireBattalionDTO(battalion);
hdCountlbls[index] = new Label();
explbls[index] = new Label();
explbls[index].setSize("10px", "20px");
if (brigade.getNationId() == GameStore.getInstance().getNationId()) {
battImages[index].setUrl("http://static.eaw1805.com/images/armies/" + nationId + "/" + battalion.getEmpireArmyType().getIntId() + ".jpg");
addOverViewPanelToImage(battImages[index]);
battImages[index].setTitle(battalion.getEmpireArmyType().getName());
hdCountlbls[index].setText(String.valueOf(battalion.getHeadcount()));
explbls[index].setText(battalion.getExperience() + " " + battalion.getHeadcount());
} else {
battImages[index].setUrl("http://static.eaw1805.com/img/commanders/Generic_Naval_Commander.png");
addOverViewPanelToImage(battImages[index]);
battImages[index].setTitle("Unknown type");
hdCountlbls[index].setText("N/A");
explbls[index].setText("? ???");
}
battImages[index].setSize("26px", "26px");
hdCountlbls[index].setStylePrimaryName("clearFontMini");
explbls[index].setStylePrimaryName("clearFontMini");
brigadePanel.add(battImages[index], 2 + (26 * index), 23);
brigadePanel.add(explbls[index], 2 + (26 * index), 52);
}
if (brigade.getNationId() == GameStore.getInstance().getNationId()) {
switch (brigInfo.getDominant()) {
case 1:
getTypeImage().setUrl("http://static.eaw1805.com/images/armies/dominant/infantry.png");
break;
case 2:
getTypeImage().setUrl("http://static.eaw1805.com/images/armies/dominant/cavalry.png");
break;
case 3:
getTypeImage().setUrl("http://static.eaw1805.com/images/armies/dominant/artillery.png");
break;
default:
break;
}
} else {
getTypeImage().removeFromParent();
final Image infImage = new Image("http://static.eaw1805.com/images/buttons/icons/formations/infantry.png");
final Image cavImage = new Image("http://static.eaw1805.com/images/buttons/icons/formations/cavalry.png");
final Image artImage = new Image("http://static.eaw1805.com/images/buttons/icons/formations/artillery.png");
brigadePanel.add(infImage, 287, 26);
brigadePanel.add(cavImage, 287, 44);
brigadePanel.add(artImage, 287, 62);
infImage.setSize("25px", "15px");
cavImage.setSize("25px", "15px");
artImage.setSize("25px", "15px");
infImage.setTitle("Infantry");
cavImage.setTitle("Cavalry");
artImage.setTitle("Artillery");
final Label lblInfantryNo = new Label(String.valueOf(brigInfo.getInfantry()));
lblInfantryNo.setStyleName("clearFontSmall");
brigadePanel.add(lblInfantryNo, 315, 26);
final Label lblCavalryNo = new Label(String.valueOf(brigInfo.getCavalry()));
lblCavalryNo.setStyleName("clearFontSmall");
brigadePanel.add(lblCavalryNo, 315, 44);
final Label lblArtilleryNo = new Label(String.valueOf(brigInfo.getArtillery()));
lblArtilleryNo.setStyleName("clearFontSmall");
brigadePanel.add(lblArtilleryNo, 315, 62);
}
if (brigade.getName().isEmpty()) {
lblBrigade.setText("Brigade Name");
} else {
lblBrigade.setText(brigade.getName());
}
if (brigade.getNationId() == GameStore.getInstance().getNationId()) {
lblMp.setText(brigInfo.getMps() + " MPs");
} else {
lblMp.removeFromParent();
}
if (!enabled) {
final Image disabledImage = new Image("http://static.eaw1805.com/images/infopanels/disabledMini.png");
if (brigade.getLoaded()) {
disabledImage.setTitle("This panel is disabled because brigade is loaded");
} else {
disabledImage.setTitle("Further orders are disabled due to orders already issued that may be in conflict");
}
brigadePanel.add(disabledImage);
}
}
private void addOverViewPanelToImage(final ArmyImage armyTypeImg) {
armyTypeImg.setStyleName("pointer", true);
new ToolTipPanel(armyTypeImg) {
@Override
public void generateTip() {
setTooltip(new ArmyTypeInfoPanel(armyTypeImg.getEmpireBattalionDTO().getEmpireArmyType(),
armyTypeImg.getEmpireBattalionDTO()));
}
};
}
public void onDetach() {
super.onDetach();
UnitEventManager.removeUnitChangedHandler(unitChangedHandler);
}
public void onAttach() {
super.onAttach();
setUpImages(enabled);
UnitEventManager.addUnitChangedHandler(unitChangedHandler);
}
public void select() {
isSelected = true;
this.selectedImg.setVisible(true);
}
public void deselect() {
isSelected = false;
this.selectedImg.setVisible(false);
}
public void MouseOver() {
if (!isSelected) {
selectedImg.setVisible(true);
}
}
public void MouseOut() {
if (!isSelected) {
selectedImg.setVisible(false);
}
}
public BrigadeDTO getBrigade() {
return brigade;
}
public ClickAbsolutePanel getBrigadePanel() {
return brigadePanel;
}
public Image getTypeImage() {
return typeImage;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.util;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import org.apache.jorphan.util.JMeterError;
import org.apache.jorphan.util.JMeterException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* BeanShell setup function - encapsulates all the access to the BeanShell
* Interpreter in a single class.
*
* The class uses dynamic class loading to access BeanShell, which means that
* all the source files can be built without needing access to the bsh jar.
*
* If the beanshell jar is not present at run-time, an error will be logged
*
*/
public class BeanShellInterpreter {
private static final Logger log = LoggerFactory.getLogger(BeanShellInterpreter.class);
private static final Method bshGet;
private static final Method bshSet;
private static final Method bshEval;
private static final Method bshSource;
private static final Class<?> bshClass;
private static final String BSH_INTERPRETER = "bsh.Interpreter"; //$NON-NLS-1$
static {
// Temporary copies, so can set the final ones
Method get = null, eval = null, set = null, source = null;
Class<?> clazz = null;
ClassLoader loader = Thread.currentThread().getContextClassLoader();
try {
clazz = loader.loadClass(BSH_INTERPRETER);
Class<String> string = String.class;
Class<Object> object = Object.class;
get = clazz.getMethod("get", //$NON-NLS-1$
new Class[] { string });
eval = clazz.getMethod("eval", //$NON-NLS-1$
new Class[] { string });
set = clazz.getMethod("set", //$NON-NLS-1$
new Class[] { string, object });
source = clazz.getMethod("source", //$NON-NLS-1$
new Class[] { string });
} catch (ClassNotFoundException|SecurityException | NoSuchMethodException e) {
log.error("Beanshell Interpreter not found", e);
} finally {
bshEval = eval;
bshGet = get;
bshSet = set;
bshSource = source;
bshClass = clazz;
}
}
// This class is not serialised
private Object bshInstance = null; // The interpreter instance for this class
private final String initFile; // Script file to initialize the Interpreter with
private final Logger logger; // Logger to use during initialization and script run
public BeanShellInterpreter() throws ClassNotFoundException {
this(null, null);
}
/**
*
* @param init initialisation file
* @param _log logger to pass to interpreter
* @throws ClassNotFoundException when beanshell can not be instantiated
*/
public BeanShellInterpreter(String init, Logger _log) throws ClassNotFoundException {
initFile = init;
logger = _log;
init();
}
// Called from ctor, so must be private (or final, but it does not seem useful elsewhere)
private void init() throws ClassNotFoundException {
if (bshClass == null) {
throw new ClassNotFoundException(BSH_INTERPRETER);
}
try {
bshInstance = bshClass.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
log.error("Can't instantiate BeanShell", e);
throw new ClassNotFoundException("Can't instantiate BeanShell", e);
}
if (logger != null) {// Do this before starting the script
try {
set("log", logger);//$NON-NLS-1$
} catch (JMeterException e) {
log.warn("Can't set logger variable", e);
}
}
if (initFile != null && initFile.length() > 0) {
String fileToUse=initFile;
// Check file so we can distinguish file error from script error
File in = new File(fileToUse);
if (!in.exists()){// Cannot find the file locally, so try the bin directory
fileToUse=JMeterUtils.getJMeterHome()
+File.separator+"bin" // $NON-NLS-1$
+File.separator+initFile;
in = new File(fileToUse);
if (!in.exists()) {
log.warn("Cannot find init file: "+initFile);
}
}
if (!in.canRead()) {
log.warn("Cannot read init file: "+fileToUse);
}
try {
source(fileToUse);
} catch (JMeterException e) {
log.warn("Cannot source init file: "+fileToUse,e);
}
}
}
/**
* Resets the BeanShell interpreter.
*
* @throws ClassNotFoundException if interpreter cannot be instantiated
*/
public void reset() throws ClassNotFoundException {
init();
}
private Object bshInvoke(Method m, Object[] o, boolean shouldLog) throws JMeterException {
Object r = null;
final String errorString = "Error invoking bsh method: ";
try {
r = m.invoke(bshInstance, o);
} catch (IllegalArgumentException | IllegalAccessException e) { // Programming error
final String message = errorString + m.getName();
log.error(message);
throw new JMeterError(message, e);
} catch (InvocationTargetException e) { // Can occur at run-time
// could be caused by the bsh Exceptions:
// EvalError, ParseException or TargetError
String message = errorString + m.getName();
Throwable cause = e.getCause();
if (cause != null) {
message += "\t" + cause.getLocalizedMessage();
}
if (shouldLog) {
log.error(message);
}
throw new JMeterException(message, e);
}
return r;
}
public Object eval(String s) throws JMeterException {
return bshInvoke(bshEval, new Object[] { s }, true);
}
public Object evalNoLog(String s) throws JMeterException {
return bshInvoke(bshEval, new Object[] { s }, false);
}
public Object set(String s, Object o) throws JMeterException {
return bshInvoke(bshSet, new Object[] { s, o }, true);
}
public Object set(String s, boolean b) throws JMeterException {
return bshInvoke(bshSet, new Object[] { s, Boolean.valueOf(b) }, true);
}
public Object source(String s) throws JMeterException {
return bshInvoke(bshSource, new Object[] { s }, true);
}
public Object get(String s) throws JMeterException {
return bshInvoke(bshGet, new Object[] { s }, true);
}
// For use by Unit Tests
public static boolean isInterpreterPresent(){
return bshClass != null;
}
}
| |
/*
Derby - Class org.apache.derby.impl.sql.execute.NoRowsResultSetImpl
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.sql.execute;
import java.sql.Timestamp;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.iapi.reference.SQLState;
import org.apache.derby.iapi.services.i18n.MessageService;
import org.apache.derby.iapi.services.loader.GeneratedMethod;
import org.apache.derby.iapi.services.monitor.Monitor;
import org.apache.derby.iapi.services.sanity.SanityManager;
import org.apache.derby.iapi.services.stream.HeaderPrintWriter;
import org.apache.derby.iapi.sql.Activation;
import org.apache.derby.iapi.sql.ResultColumnDescriptor;
import org.apache.derby.iapi.sql.ResultDescription;
import org.apache.derby.iapi.sql.ResultSet;
import org.apache.derby.iapi.sql.Row;
import org.apache.derby.iapi.sql.conn.LanguageConnectionContext;
import org.apache.derby.iapi.sql.conn.StatementContext;
import org.apache.derby.iapi.sql.dictionary.ConglomerateDescriptor;
import org.apache.derby.iapi.sql.dictionary.DataDictionary;
import org.apache.derby.iapi.sql.dictionary.TableDescriptor;
import org.apache.derby.iapi.sql.execute.ExecRow;
import org.apache.derby.iapi.sql.execute.NoPutResultSet;
import org.apache.derby.iapi.sql.execute.ResultSetStatisticsFactory;
import org.apache.derby.iapi.sql.execute.RunTimeStatistics;
import org.apache.derby.iapi.sql.execute.xplain.XPLAINVisitor;
import org.apache.derby.iapi.types.DataTypeDescriptor;
import org.apache.derby.iapi.types.DataValueDescriptor;
/**
* Abstract ResultSet for implementations that do not return rows.
* Examples are DDL statements, CALL statements and DML.
* <P>
* An implementation must provide a ResultSet.open() method
* that performs the required action.
* <P>
* ResultSet.returnsRows() returns false and any method
* that fetches a row will throw an exception.
*
*/
abstract class NoRowsResultSetImpl implements ResultSet
{
final Activation activation;
private NoPutResultSet[] subqueryTrackingArray;
private final boolean statisticsTimingOn;
/** True if the result set has been opened, and not yet closed. */
private boolean isOpen;
/* Run time statistics variables */
final LanguageConnectionContext lcc;
protected long beginTime;
protected long endTime;
protected long beginExecutionTime;
protected long endExecutionTime;
private int firstColumn = -1; // First column being stuffed. For UPDATES, this lies in the second half of the row.
private int[] generatedColumnPositions; // 1-based positions of generated columns in the target row
// One cell for each slot in generatedColumnPositions. These are temporary
// values which hold the result of running the generation clause before we
// stuff the result into the target row.
private DataValueDescriptor[] normalizedGeneratedValues;
NoRowsResultSetImpl(Activation activation)
{
this.activation = activation;
if (SanityManager.DEBUG) {
if (activation == null)
SanityManager.THROWASSERT("activation is null in result set " + getClass());
}
lcc = activation.getLanguageConnectionContext();
statisticsTimingOn = lcc.getStatisticsTiming();
/* NOTE - We can't get the current time until after setting up the
* activation, as we end up using the activation to get the
* LanguageConnectionContext.
*/
beginTime = getCurrentTimeMillis();
beginExecutionTime = beginTime;
}
/**
* Set up the result set for use. Should always be called from
* <code>open()</code>.
*
* @exception StandardException thrown on error
*/
void setup() throws StandardException {
isOpen = true;
StatementContext sc = lcc.getStatementContext();
sc.setTopResultSet(this, subqueryTrackingArray);
// Pick up any materialized subqueries
if (subqueryTrackingArray == null) {
subqueryTrackingArray = sc.getSubqueryTrackingArray();
}
}
/**
* Returns FALSE
*/
public final boolean returnsRows() { return false; }
/**
* Returns zero.
*/
public int modifiedRowCount() { return 0; }
/**
* Returns null.
*/
public ResultDescription getResultDescription()
{
return (ResultDescription)null;
}
public final Activation getActivation()
{
return activation;
}
/**
* Returns the row at the absolute position from the query,
* and returns NULL when there is no such position.
* (Negative position means from the end of the result set.)
* Moving the cursor to an invalid position leaves the cursor
* positioned either before the first row (negative position)
* or after the last row (positive position).
* NOTE: An exception will be thrown on 0.
*
* @param row The position.
* @return The row at the absolute position, or NULL if no such position.
*
* @exception StandardException Thrown on failure
* @see Row
*/
public final ExecRow getAbsoluteRow(int row) throws StandardException
{
/*
The JDBC use of this class will never call here.
Only the DB API used directly can get this exception.
*/
throw StandardException.newException(SQLState.LANG_DOES_NOT_RETURN_ROWS, "absolute");
}
/**
* Returns the row at the relative position from the current
* cursor position, and returns NULL when there is no such position.
* (Negative position means toward the beginning of the result set.)
* Moving the cursor to an invalid position leaves the cursor
* positioned either before the first row (negative position)
* or after the last row (positive position).
* NOTE: 0 is valid.
* NOTE: An exception is thrown if the cursor is not currently
* positioned on a row.
*
* @param row The position.
* @return The row at the relative position, or NULL if no such position.
*
* @exception StandardException Thrown on failure
* @see Row
*/
public final ExecRow getRelativeRow(int row) throws StandardException
{
/*
The JDBC use of this class will never call here.
Only the DB API used directly can get this exception.
*/
throw StandardException.newException(SQLState.LANG_DOES_NOT_RETURN_ROWS, "relative");
}
/**
* Sets the current position to before the first row and returns NULL
* because there is no current row.
*
* @return NULL.
*
* @exception StandardException Thrown on failure
* @see Row
*/
public final ExecRow setBeforeFirstRow()
throws StandardException
{
/*
The JDBC use of this class will never call here.
Only the DB API used directly can get this exception.
*/
throw StandardException.newException(SQLState.LANG_DOES_NOT_RETURN_ROWS, "beforeFirst");
}
/**
* Returns the first row from the query, and returns NULL when there
* are no rows.
*
* @return The first row, or NULL if no rows.
*
* @exception StandardException Thrown on failure
* @see Row
*/
public final ExecRow getFirstRow()
throws StandardException
{
/*
The JDBC use of this class will never call here.
Only the DB API used directly can get this exception.
*/
throw StandardException.newException(SQLState.LANG_DOES_NOT_RETURN_ROWS, "first");
}
/**
* No rows to return, so throw an exception.
*
* @exception StandardException Always throws a
* StandardException to indicate
* that this method is not intended to
* be used.
*/
public final ExecRow getNextRow() throws StandardException
{
/*
The JDBC use of this class will never call here.
Only the DB API used directly can get this exception.
*/
throw StandardException.newException(SQLState.LANG_DOES_NOT_RETURN_ROWS, "next");
}
/**
* Returns the previous row from the query, and returns NULL when there
* are no more previous rows.
*
* @return The previous row, or NULL if no more previous rows.
*
* @exception StandardException Thrown on failure
* @see Row
*/
public final ExecRow getPreviousRow()
throws StandardException
{
/*
The JDBC use of this class will never call here.
Only the DB API used directly can get this exception.
*/
throw StandardException.newException(SQLState.LANG_DOES_NOT_RETURN_ROWS, "previous");
}
/**
* Returns the last row from the query, and returns NULL when there
* are no rows.
*
* @return The last row, or NULL if no rows.
*
* @exception StandardException Thrown on failure
* @see Row
*/
public final ExecRow getLastRow()
throws StandardException
{
/*
The JDBC use of this class will never call here.
Only the DB API used directly can get this exception.
*/
throw StandardException.newException(SQLState.LANG_DOES_NOT_RETURN_ROWS, "last");
}
/**
* Sets the current position to after the last row and returns NULL
* because there is no current row.
*
* @return NULL.
*
* @exception StandardException Thrown on failure
* @see Row
*/
public final ExecRow setAfterLastRow()
throws StandardException
{
/*
The JDBC use of this class will never call here.
Only the DB API used directly can get this exception.
*/
throw StandardException.newException(SQLState.LANG_DOES_NOT_RETURN_ROWS, "afterLast");
}
/**
* Clear the current row. This is done after a commit on holdable
* result sets.
* This is a no-op on result set which do not provide rows.
*/
public final void clearCurrentRow()
{
}
/**
* Determine if the cursor is before the first row in the result
* set.
*
* @return true if before the first row, false otherwise. Returns
* false when the result set contains no rows.
*/
public final boolean checkRowPosition(int isType)
{
return false;
}
/**
* Returns the row number of the current row. Row
* numbers start from 1 and go to 'n'. Corresponds
* to row numbering used to position current row
* in the result set (as per JDBC).
*
* @return the row number, or 0 if not on a row
*
*/
public final int getRowNumber()
{
return 0;
}
/**
* Dump the stat if not already done so. Close all of the open subqueries.
*
* @exception StandardException thrown on error
*/
public void close() throws StandardException
{
if (!isOpen)
return;
{
/*
** If run time statistics tracing is turned on, then now is the
** time to dump out the information.
** NOTE - We make a special exception for commit. If autocommit
** is on, then the run time statistics from the autocommit is the
** only one that the user would ever see. So, we don't overwrite
** the run time statistics object for a commit.
** DERBY-2353: Also make an exception when the activation is
** closed. If the activation is closed, the run time statistics
** object is null and there's nothing to print. This may happen
** if a top-level result set closes the activation and close() is
** subsequently called on the child result sets. The information
** about the children is also printed by the parent, so it's safe
** to skip printing it.
*/
if (lcc.getRunTimeStatisticsMode() &&
!doesCommit() && !activation.isClosed() &&
!lcc.getStatementContext().getStatementWasInvalidated())
{
endExecutionTime = getCurrentTimeMillis();
ResultSetStatisticsFactory rssf =
lcc.getLanguageConnectionFactory().
getExecutionFactory().getResultSetStatisticsFactory();
// get the RuntimeStatisticsImpl object which is the wrapper for all
// statistics
RunTimeStatistics rsImpl = rssf.getRunTimeStatistics(activation, this, subqueryTrackingArray);
// save RTS object in lcc
lcc.setRunTimeStatisticsObject(rsImpl);
// explain gathered statistics
XPLAINVisitor visitor = lcc.getLanguageConnectionFactory().getExecutionFactory().getXPLAINFactory().getXPLAINVisitor();
visitor.doXPLAIN(rsImpl,activation);
}
}
/* This is the top ResultSet,
* close all of the open subqueries.
*/
int staLength = (subqueryTrackingArray == null) ? 0 :
subqueryTrackingArray.length;
for (int index = 0; index < staLength; index++)
{
if (subqueryTrackingArray[index] == null)
{
continue;
}
if (subqueryTrackingArray[index].isClosed())
{
continue;
}
subqueryTrackingArray[index].close();
}
isOpen = false;
if (activation.isSingleExecution())
activation.close();
}
/**
* Find out if the <code>ResultSet</code> is closed.
*
* @return <code>true</code> if closed, <code>false</code> otherwise
*/
public boolean isClosed() {
return !isOpen;
}
public void finish() throws StandardException
{
}
/**
* Get the execution time in milliseconds.
*
* @return long The execution time in milliseconds.
*/
public long getExecuteTime()
{
return endTime - beginTime;
}
/**
* Get the Timestamp for the beginning of execution.
*
* @return Timestamp The Timestamp for the beginning of execution.
*/
public Timestamp getBeginExecutionTimestamp()
{
if (beginExecutionTime == 0)
{
return null;
}
else
{
return new Timestamp(beginExecutionTime);
}
}
/**
* Get the Timestamp for the end of execution.
*
* @return Timestamp The Timestamp for the end of execution.
*/
public Timestamp getEndExecutionTimestamp()
{
if (endExecutionTime == 0)
{
return null;
}
else
{
return new Timestamp(endExecutionTime);
}
}
/**
* RESOLVE - This method will go away once it is overloaded in all subclasses.
* Return the query plan as a String.
*
* @param depth Indentation level.
*
* @return String The query plan as a String.
*/
public String getQueryPlanText(int depth)
{
return MessageService.getTextMessage(
SQLState.LANG_GQPT_NOT_SUPPORTED,
getClass().getName());
}
/**
* Return the total amount of time spent in this ResultSet
*
* @param type CURRENT_RESULTSET_ONLY - time spent only in this ResultSet
* ENTIRE_RESULTSET_TREE - time spent in this ResultSet and below.
*
* @return long The total amount of time spent (in milliseconds).
*/
public long getTimeSpent(int type)
{
/* RESOLVE - this should be overloaded in all subclasses */
return 0;
}
/**
* @see ResultSet#getSubqueryTrackingArray
*/
public final NoPutResultSet[] getSubqueryTrackingArray(int numSubqueries)
{
if (subqueryTrackingArray == null)
{
subqueryTrackingArray = new NoPutResultSet[numSubqueries];
}
return subqueryTrackingArray;
}
/**
* @see ResultSet#getAutoGeneratedKeysResultset
*/
public ResultSet getAutoGeneratedKeysResultset()
{
//A non-null resultset would be returned only for an insert statement
return (ResultSet)null;
}
/**
Return the cursor name, null in this case.
@see ResultSet#getCursorName
*/
public String getCursorName() {
return null;
}
// class implementation
/**
* Return the current time in milliseconds, if DEBUG and RunTimeStats is
* on, else return 0. (Only pay price of system call if need to.)
*
* @return long Current time in milliseconds.
*/
protected final long getCurrentTimeMillis()
{
if (statisticsTimingOn)
{
return System.currentTimeMillis();
}
else
{
return 0;
}
}
/**
* Run a check constraint against the current row. Raise an error if
* the check constraint is violated.
*
* @param checkGM Generated code to run the check constraint.
* @param checkName Name of the constraint to check.
* @param heapConglom Number of heap conglomerate.
* @param activation Class in which checkGM lives.
*
* @exception StandardException thrown on error
*/
public static void evaluateACheckConstraint
(
GeneratedMethod checkGM,
String checkName,
long heapConglom,
Activation activation
)
throws StandardException
{
if (checkGM != null)
{
DataValueDescriptor checkBoolean;
checkBoolean = (DataValueDescriptor) checkGM.invoke(activation);
/* Throw exception if check constraint is violated.
* (Only if check constraint evaluates to false.)
*/
if ((checkBoolean != null) &&
(! checkBoolean.isNull()) &&
(! checkBoolean.getBoolean()))
{
/* Now we have a lot of painful work to get the
* table name for the error message. All we have
* is the conglomerate number to work with.
*/
DataDictionary dd = activation.getLanguageConnectionContext().getDataDictionary();
ConglomerateDescriptor cd = dd.getConglomerateDescriptor( heapConglom );
TableDescriptor td = dd.getTableDescriptor(cd.getTableID());
StandardException se = StandardException.newException(SQLState.LANG_CHECK_CONSTRAINT_VIOLATED,
td.getQualifiedName(), checkName);
throw se;
}
}
}
/**
* Compute the generation clauses on the current row in order to fill in
* computed columns.
*
* @param generationClauses the generated method which evaluates generation clauses
* @param activation the thread-specific instance of the generated class
* @param source the tuple stream driving this INSERT/UPDATE
* @param newRow the base row being stuffed
* @param isUpdate true if this is an UPDATE. false otherwise.
*/
public void evaluateGenerationClauses
(
GeneratedMethod generationClauses,
Activation activation,
NoPutResultSet source,
ExecRow newRow,
boolean isUpdate
)
throws StandardException
{
if (generationClauses != null)
{
ExecRow oldRow = (ExecRow) activation.getCurrentRow( source.resultSetNumber() );
//
// We may need to poke the current row into the Activation so that
// it is visible to the method which evaluates the generation
// clause. This is because the generation clause may refer to other
// columns in that row.
//
try {
source.setCurrentRow( newRow );
generationClauses.invoke(activation);
//
// Now apply NOT NULL checks and other coercions. For non-generated columns, these
// are performed in the driving ResultSet.
//
if ( firstColumn < 0 ) { firstColumn = NormalizeResultSet.computeStartColumn( isUpdate, activation.getResultDescription() ); }
if ( generatedColumnPositions == null ) { setupGeneratedColumns( activation, (ValueRow) newRow ); }
ResultDescription resultDescription = activation.getResultDescription();
int count = generatedColumnPositions.length;
for ( int i = 0; i < count; i++ )
{
int position = generatedColumnPositions[ i ];
DataValueDescriptor normalizedColumn = NormalizeResultSet.normalizeColumn
(
resultDescription.getColumnDescriptor( position ).getType(),
newRow,
position,
normalizedGeneratedValues[ i ],
resultDescription
);
newRow.setColumn( position, normalizedColumn );
}
}
finally
{
//
// We restore the Activation to its state before we ran the generation
// clause. This may not be necessary but I don't understand all of
// the paths through the Insert and Update result sets. This
// defensive coding seems prudent to me.
//
if ( oldRow == null ) { source.clearCurrentRow(); }
else { source.setCurrentRow( oldRow ); }
}
}
}
/**
* Construct support for normalizing generated columns. This figures out
* which columns in the target row have generation clauses which need to be run.
*/
private void setupGeneratedColumns( Activation activation, ValueRow newRow )
throws StandardException
{
ResultDescription resultDescription = activation.getResultDescription();
int columnCount = resultDescription.getColumnCount();
ExecRow emptyRow = newRow.getNewNullRow();
int generatedColumnCount = 0;
// first count the number of generated columns
for ( int i = 1; i <= columnCount; i++ )
{
if ( i < firstColumn ) { continue; }
ResultColumnDescriptor rcd = resultDescription.getColumnDescriptor( i );
if ( rcd.hasGenerationClause() ) { generatedColumnCount++; }
}
// now allocate and populate support structures
generatedColumnPositions = new int[ generatedColumnCount ];
normalizedGeneratedValues = new DataValueDescriptor[ generatedColumnCount ];
int idx = 0;
for ( int i = 1; i <= columnCount; i++ )
{
if ( i < firstColumn ) { continue; }
ResultColumnDescriptor rcd = resultDescription.getColumnDescriptor( i );
if ( rcd.hasGenerationClause() )
{
generatedColumnPositions[ idx ] = i;
normalizedGeneratedValues[ idx ] = emptyRow.getColumn( i );
idx++;
}
}
}
/**
* Run check constraints against the current row. Raise an error if
* a check constraint is violated.
*
* @param checkGM Generated code to run the check constraint.
* @param activation Class in which checkGM lives.
*
* @exception StandardException thrown on error
*/
public static void evaluateCheckConstraints
(
GeneratedMethod checkGM,
Activation activation
)
throws StandardException
{
if (checkGM != null)
{
// Evaluate the expression containing the check constraints.
// This expression will throw an exception if there is a
// violation, so there is no need to check the result.
checkGM.invoke(activation);
}
}
/**
* Does this ResultSet cause a commit or rollback.
*
* @return Whether or not this ResultSet cause a commit or rollback.
*/
public boolean doesCommit()
{
return false;
}
public java.sql.SQLWarning getWarnings() {
return null;
}
}
| |
package nachos.userprog;
import nachos.machine.*;
import nachos.threads.*;
import nachos.userprog.*;
import java.io.EOFException;
/**
* Encapsulates the state of a user process that is not contained in its user
* thread (or threads). This includes its address translation state, a file
* table, and information about the program being executed.
*
* <p>
* This class is extended by other classes to support additional functionality
* (such as additional syscalls).
*
* @see nachos.vm.VMProcess
* @see nachos.network.NetProcess
*/
public class UserProcess {
/**
* Allocate a new process.
*/
public UserProcess() {
int numPhysPages = Machine.processor().getNumPhysPages();
pageTable = new TranslationEntry[numPhysPages];
fileDescriptors[0] = UserKernel.console.openForReading();
fileDescriptors[1] = UserKernel.console.openForWriting();
for (int i = 0; i < numPhysPages; i++)
pageTable[i] = new TranslationEntry(i, i, true, false, false, false);
}
/**
* Allocate and return a new process of the correct class. The class name is
* specified by the <tt>nachos.conf</tt> key
* <tt>Kernel.processClassName</tt>.
*
* @return a new process of the correct class.
*/
public static UserProcess newUserProcess() {
return (UserProcess) Lib.constructObject(Machine.getProcessClassName());
}
/**
* Execute the specified program with the specified arguments. Attempts to
* load the program, and then forks a thread to run it.
*
* @param name the name of the file containing the executable.
* @param args the arguments to pass to the executable.
* @return <tt>true</tt> if the program was successfully executed.
*/
public boolean execute(String name, String[] args) {
if (!load(name, args))
return false;
new UThread(this).setName(name).fork();
return true;
}
/**
* Save the state of this process in preparation for a context switch.
* Called by <tt>UThread.saveState()</tt>.
*/
public void saveState() {
}
/**
* Restore the state of this process after a context switch. Called by
* <tt>UThread.restoreState()</tt>.
*/
public void restoreState() {
Machine.processor().setPageTable(pageTable);
}
/**
* Read a null-terminated string from this process's virtual memory. Read at
* most <tt>maxLength + 1</tt> bytes from the specified address, search for
* the null terminator, and convert it to a <tt>java.lang.String</tt>,
* without including the null terminator. If no null terminator is found,
* returns <tt>null</tt>.
*
* @param vaddr the starting virtual address of the null-terminated string.
* @param maxLength the maximum number of characters in the string, not
* including the null terminator.
* @return the string read, or <tt>null</tt> if no null terminator was
* found.
*/
public String readVirtualMemoryString(int vaddr, int maxLength) {
Lib.assertTrue(maxLength >= 0);
byte[] bytes = new byte[maxLength + 1];
int bytesRead = readVirtualMemory(vaddr, bytes);
for (int length = 0; length < bytesRead; length++) {
if (bytes[length] == 0)
return new String(bytes, 0, length);
}
return null;
}
/**
* Transfer data from this process's virtual memory to all of the specified
* array. Same as <tt>readVirtualMemory(vaddr, data, 0, data.length)</tt>.
*
* @param vaddr the first byte of virtual memory to read.
* @param data the array where the data will be stored.
* @return the number of bytes successfully transferred.
*/
public int readVirtualMemory(int vaddr, byte[] data) {
return readVirtualMemory(vaddr, data, 0, data.length);
}
/**
* Transfer data from this process's virtual memory to the specified array.
* This method handles address translation details. This method must
* <i>not</i> destroy the current process if an error occurs, but instead
* should return the number of bytes successfully copied (or zero if no data
* could be copied).
*
* @param vaddr the first byte of virtual memory to read.
* @param data the array where the data will be stored.
* @param offset the first byte to write in the array.
* @param length the number of bytes to transfer from virtual memory to the
* array.
* @return the number of bytes successfully transferred.
*/
public int readVirtualMemory(int vaddr, byte[] data, int offset, int length) {
Lib.assertTrue(offset >= 0 && length >= 0
&& offset + length <= data.length);
byte[] memory = Machine.processor().getMemory();
// for now, just assume that virtual addresses equal physical addresses
if (vaddr < 0 || vaddr >= memory.length)
return 0;
int amount = Math.min(length, memory.length - vaddr);
System.arraycopy(memory, vaddr, data, offset, amount);
return amount;
}
/**
* Transfer all data from the specified array to this process's virtual
* memory. Same as <tt>writeVirtualMemory(vaddr, data, 0, data.length)</tt>.
*
* @param vaddr the first byte of virtual memory to write.
* @param data the array containing the data to transfer.
* @return the number of bytes successfully transferred.
*/
public int writeVirtualMemory(int vaddr, byte[] data) {
return writeVirtualMemory(vaddr, data, 0, data.length);
}
/**
* Transfer data from the specified array to this process's virtual memory.
* This method handles address translation details. This method must
* <i>not</i> destroy the current process if an error occurs, but instead
* should return the number of bytes successfully copied (or zero if no data
* could be copied).
*
* @param vaddr the first byte of virtual memory to write.
* @param data the array containing the data to transfer.
* @param offset the first byte to transfer from the array.
* @param length the number of bytes to transfer from the array to virtual
* memory.
* @return the number of bytes successfully transferred.
*/
public int writeVirtualMemory(int vaddr, byte[] data, int offset, int length) {
Lib.assertTrue(offset >= 0 && length >= 0
&& offset + length <= data.length);
byte[] memory = Machine.processor().getMemory();
// for now, just assume that virtual addresses equal physical addresses
if (vaddr < 0 || vaddr >= memory.length)
return 0;
int amount = Math.min(length, memory.length - vaddr);
System.arraycopy(data, offset, memory, vaddr, amount);
return amount;
}
/**
* Load the executable with the specified name into this process, and
* prepare to pass it the specified arguments. Opens the executable, reads
* its header information, and copies sections and arguments into this
* process's virtual memory.
*
* @param name the name of the file containing the executable.
* @param args the arguments to pass to the executable.
* @return <tt>true</tt> if the executable was successfully loaded.
*/
private boolean load(String name, String[] args) {
Lib.debug(dbgProcess, "UserProcess.load(\"" + name + "\")");
OpenFile executable = ThreadedKernel.fileSystem.open(name, false);
if (executable == null) {
Lib.debug(dbgProcess, "\topen failed");
return false;
}
try {
coff = new Coff(executable);
}
catch (EOFException e) {
executable.close();
Lib.debug(dbgProcess, "\tcoff load failed");
return false;
}
// make sure the sections are contiguous and start at page 0
numPages = 0;
for (int s = 0; s < coff.getNumSections(); s++) {
CoffSection section = coff.getSection(s);
if (section.getFirstVPN() != numPages) {
coff.close();
Lib.debug(dbgProcess, "\tfragmented executable");
return false;
}
numPages += section.getLength();
}
// make sure the argv array will fit in one page
byte[][] argv = new byte[args.length][];
int argsSize = 0;
for (int i = 0; i < args.length; i++) {
argv[i] = args[i].getBytes();
// 4 bytes for argv[] pointer; then string plus one for null byte
argsSize += 4 + argv[i].length + 1;
}
if (argsSize > pageSize) {
coff.close();
Lib.debug(dbgProcess, "\targuments too long");
return false;
}
// program counter initially points at the program entry point
initialPC = coff.getEntryPoint();
// next comes the stack; stack pointer initially points to top of it
numPages += stackPages;
initialSP = numPages * pageSize;
// and finally reserve 1 page for arguments
numPages++;
if (!loadSections())
return false;
// store arguments in last page
int entryOffset = (numPages - 1) * pageSize;
int stringOffset = entryOffset + args.length * 4;
this.argc = args.length;
this.argv = entryOffset;
for (int i = 0; i < argv.length; i++) {
byte[] stringOffsetBytes = Lib.bytesFromInt(stringOffset);
Lib.assertTrue(writeVirtualMemory(entryOffset, stringOffsetBytes) == 4);
entryOffset += 4;
Lib.assertTrue(writeVirtualMemory(stringOffset, argv[i]) == argv[i].length);
stringOffset += argv[i].length;
Lib.assertTrue(writeVirtualMemory(stringOffset, new byte[] { 0 }) == 1);
stringOffset += 1;
}
return true;
}
/**
* Allocates memory for this process, and loads the COFF sections into
* memory. If this returns successfully, the process will definitely be run
* (this is the last step in process initialization that can fail).
*
* @return <tt>true</tt> if the sections were successfully loaded.
*/
protected boolean loadSections() {
if (numPages > Machine.processor().getNumPhysPages()) {
coff.close();
Lib.debug(dbgProcess, "\tinsufficient physical memory");
return false;
}
// load sections
for (int s = 0; s < coff.getNumSections(); s++) {
CoffSection section = coff.getSection(s);
Lib.debug(dbgProcess, "\tinitializing " + section.getName()
+ " section (" + section.getLength() + " pages)");
for (int i = 0; i < section.getLength(); i++) {
int vpn = section.getFirstVPN() + i;
// for now, just assume virtual addresses=physical addresses
section.loadPage(i, vpn);
}
}
return true;
}
/**
* Release any resources allocated by <tt>loadSections()</tt>.
*/
protected void unloadSections() {
}
/**
* Initialize the processor's registers in preparation for running the
* program loaded into this process. Set the PC register to point at the
* start function, set the stack pointer register to point at the top of the
* stack, set the A0 and A1 registers to argc and argv, respectively, and
* initialize all other registers to 0.
*/
public void initRegisters() {
Processor processor = Machine.processor();
// by default, everything's 0
for (int i = 0; i < processor.numUserRegisters; i++)
processor.writeRegister(i, 0);
// initialize PC and SP according
processor.writeRegister(Processor.regPC, initialPC);
processor.writeRegister(Processor.regSP, initialSP);
// initialize the first two argument registers to argc and argv
processor.writeRegister(Processor.regA0, argc);
processor.writeRegister(Processor.regA1, argv);
}
/**
* Handle the halt() system call.
*/
private int handleHalt() {
Machine.halt();
Lib.assertNotReached("Machine.halt() did not halt machine!");
return 0;
}
private int handleExit()
{
Machine.halt();
return 0;
}
private int findFirstFreeFileDescriptor(){
for(int i = 2; i < MAXOPEN; i++){
if(fileDescriptors[i] == null){
return i;
}
}
return -1;
}
private int handleCreate(int a0){
String f_name = readVirtualMemoryString(a0,256);
if(f_name == null){
return -1;
}else{
Lib.debug(dbgProcess, "about to create file: "+f_name);
}
if(f_name.length() > 256){//check if argument is greater than 256
Lib.debug(dbgProcess, "argument too BIG!, make it smaller....Pl0x");
return -1;
}else{
Lib.debug(dbgProcess, "accepted file with filename: "+f_name);
}
int avail_file_descriptor = findFirstFreeFileDescriptor();
if(avail_file_descriptor == -1){
Lib.debug(dbgProcess, "all 16 files are being opened!");
return -1;
}else{
OpenFile o_f = UserKernel.fileSystem.open(f_name, true);
if(o_f == null){
Lib.debug(dbgProcess, "can't open file for some reason, wrong filename? no file in that directory");
return -1;
}
fileDescriptors[avail_file_descriptor] = o_f;
Lib.debug(dbgProcess, "created file "+f_name);
return avail_file_descriptor;
}
}
private int handleOpen(int a0){
String f_name = readVirtualMemoryString(a0,256);
if(f_name == null){
return -1;
}else{
Lib.debug(dbgProcess, "about to open file: "+f_name);
}
if(f_name.length() > 256){//check if argument is greater than 256
Lib.debug(dbgProcess, "argument too BIG!, make it smaller....Pl0x");
return -1;
}else{
Lib.debug(dbgProcess, "accepted file with file name: "+f_name);
}
int avail_file_descriptor = findFirstFreeFileDescriptor();
if(avail_file_descriptor == -1){
Lib.debug(dbgProcess, "all 16 files are being opened!");
return -1;
}else{
OpenFile o_f = UserKernel.fileSystem.open(f_name, false);
if(o_f == null){
Lib.debug(dbgProcess, "can't open file for some reason, wrong filename? no file in that directory");
return -1;
}
fileDescriptors[avail_file_descriptor] = o_f;
Lib.debug(dbgProcess, "opened file "+f_name);
return avail_file_descriptor;
}
}
/** Handle the read() system call */
private int handleRead(int fd, int bufferAddr, int count)
{
int nBytes = -1;
Lib.debug(dbgProcess, "fd = " + fd + " bufferAddr = " + bufferAddr + " count = " + count);
if(fd < 0 || count < 0 || fd == 1)
{
Lib.debug(dbgProcess, "invalid filedescriptor: " + fd);
return -1;
}
OpenFile of = fileDescriptors[fd];
if(of == null)
{
Lib.debug(dbgProcess, "invalid fd = " + fd);
return -1;
}
Lib.debug(dbgProcess, "reading from file: " + of.getName());
byte[] data = new byte[count];
nBytes = of.read(data, 0, data.length);
String s = new String(data);
Lib.debug(dbgProcess, "content read = " + s);
Lib.debug(dbgProcess, "nBytes = " + nBytes);
Lib.assertTrue(writeVirtualMemory(bufferAddr, data, 0, nBytes) == nBytes);
if(nBytes < count)
{
Lib.debug(dbgProcess, "read " + nBytes + " bytes. Expected to read " + count + " bytes");
}
if(nBytes < 0)
{
return -1;
}
return nBytes;
}
private int handleWrite(int fd, int bufferAddr, int count)
{
int nBytes = 0;
Lib.debug(dbgProcess, "fd = " + fd + " bufferAddr = " + bufferAddr + " count = " + count);
if(fd < 0 || count < 0 || fd == 0)
{
Lib.debug(dbgProcess, "invalid filedescriptor: " + fd);
return -1;
}
OpenFile of = fileDescriptors[fd];
if(of == null)
{
Lib.debug(dbgProcess, "invalid fd = " + fd);
return -1;
}
Lib.debug(dbgProcess, "writing to file: " + of.getName());
byte[] data = new byte[count];
nBytes = readVirtualMemory(bufferAddr, data, 0, data.length);
String s = new String(data);
Lib.debug(dbgProcess, "content read = " + s);
Lib.debug(dbgProcess, "nBytes = " + nBytes);
Lib.assertTrue(of.write(data, 0, nBytes) == nBytes);
if(nBytes < 0 || nBytes < count)
{
Lib.debug(dbgProcess, "wrote " + nBytes + " bytes. Expected to write " + count + " bytes");
return -1;
}
return nBytes+1;
}
private int handleClose(int fd) {
if(fd < 0 || fd >= MAXOPEN) return -1;
if(fileDescriptors[fd] == null) return -1;
fileDescriptors[fd].close();
fileDescriptors[fd] = null;
return 0;
}
private int handleUnlink(int a0) {
String f_name = readVirtualMemoryString(a0,256);
if(f_name == null){
return -1;
}
UserKernel.fileSystem.remove(f_name);
return 0;
}
private static final int syscallHalt = 0, syscallExit = 1, syscallExec = 2,
syscallJoin = 3, syscallCreate = 4, syscallOpen = 5,
syscallRead = 6, syscallWrite = 7, syscallClose = 8,
syscallUnlink = 9;
/**
* Handle a syscall exception. Called by <tt>handleException()</tt>. The
* <i>syscall</i> argument identifies which syscall the user executed:
*
* <table>
* <tr>
* <td>syscall#</td>
* <td>syscall prototype</td>
* </tr>
* <tr>
* <td>0</td>
* <td><tt>void halt();</tt></td>
* </tr>
* <tr>
* <td>1</td>
* <td><tt>void exit(int status);</tt></td>
* </tr>
* <tr>
* <td>2</td>
* <td><tt>int exec(char *name, int argc, char **argv);
* </tt></td>
* </tr>
* <tr>
* <td>3</td>
* <td><tt>int join(int pid, int *status);</tt></td>
* </tr>
* <tr>
* <td>4</td>
* <td><tt>int creat(char *name);</tt></td>
* </tr>
* <tr>
* <td>5</td>
* <td><tt>int open(char *name);</tt></td>
* </tr>
* <tr>
* <td>6</td>
* <td><tt>int read(int fd, char *buffer, int size);
* </tt></td>
* </tr>
* <tr>
* <td>7</td>
* <td><tt>int write(int fd, char *buffer, int size);
* </tt></td>
* </tr>
* <tr>
* <td>8</td>
* <td><tt>int close(int fd);</tt></td>
* </tr>
* <tr>
* <td>9</td>
* <td><tt>int unlink(char *name);</tt></td>
* </tr>
* </table>
*
* @param syscall the syscall number.
* @param a0 the first syscall argument.
* @param a1 the second syscall argument.
* @param a2 the third syscall argument.
* @param a3 the fourth syscall argument.
* @return the value to be returned to the user.
*/
public int handleSyscall(int syscall, int a0, int a1, int a2, int a3) {
switch (syscall) {
case syscallHalt:
return handleHalt();
case syscallExit:
return handleExit();
case syscallOpen:
return handleOpen(a0);
case syscallCreate:
return handleCreate(a0);
case syscallRead:
return handleRead(a0, a1, a2);
case syscallWrite:
return handleWrite(a0, a1, a2);
case syscallClose:
return handleClose(a0);
case syscallUnlink:
return handleUnlink(a0);
default:
Lib.debug(dbgProcess, "Unknown syscall " + syscall);
Lib.assertNotReached("Unknown system call!");
}
return 0;
}
/**
* Handle a user exception. Called by <tt>UserKernel.exceptionHandler()</tt>
* . The <i>cause</i> argument identifies which exception occurred; see the
* <tt>Processor.exceptionZZZ</tt> constants.
*
* @param cause the user exception that occurred.
*/
public void handleException(int cause) {
Processor processor = Machine.processor();
switch (cause) {
case Processor.exceptionSyscall:
int result = handleSyscall(processor.readRegister(Processor.regV0),
processor.readRegister(Processor.regA0),
processor.readRegister(Processor.regA1),
processor.readRegister(Processor.regA2),
processor.readRegister(Processor.regA3));
processor.writeRegister(Processor.regV0, result);
processor.advancePC();
break;
default:
Lib.debug(dbgProcess, "Unexpected exception: "
+ Processor.exceptionNames[cause]);
Lib.assertNotReached("Unexpected exception");
}
}
protected final int MAXOPEN = 16;
/** The program being run by this process. */
protected Coff coff;
/** This process's page table. */
protected TranslationEntry[] pageTable;
/** The number of contiguous pages occupied by the program. */
protected int numPages;
/** The number of pages in the program's stack. */
protected final int stackPages = 8;
private int initialPC, initialSP;
private int argc, argv;
private static final int pageSize = Processor.pageSize;
private static final char dbgProcess = 'a';
protected OpenFile[] fileDescriptors = new OpenFile[MAXOPEN];
}
| |
/*
* Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.admindistribution.outbound;
import gov.hhs.fha.nhinc.admindistribution.AdminDistributionAuditLogger;
import gov.hhs.fha.nhinc.admindistribution.AdminDistributionPolicyChecker;
import gov.hhs.fha.nhinc.admindistribution.MessageGeneratorUtils;
import gov.hhs.fha.nhinc.admindistribution.aspect.ADRequestTransformingBuilder;
import gov.hhs.fha.nhinc.admindistribution.entity.OutboundAdminDistributionDelegate;
import gov.hhs.fha.nhinc.admindistribution.entity.OutboundAdminDistributionOrchestratable;
import gov.hhs.fha.nhinc.aspect.OutboundProcessingEvent;
import gov.hhs.fha.nhinc.common.nhinccommon.AcknowledgementType;
import gov.hhs.fha.nhinc.common.nhinccommon.AssertionType;
import gov.hhs.fha.nhinc.common.nhinccommon.HomeCommunityType;
import gov.hhs.fha.nhinc.common.nhinccommon.NhinTargetCommunitiesType;
import gov.hhs.fha.nhinc.common.nhinccommon.NhinTargetSystemType;
import gov.hhs.fha.nhinc.common.nhinccommonentity.RespondingGatewaySendAlertMessageSecuredType;
import gov.hhs.fha.nhinc.common.nhinccommonentity.RespondingGatewaySendAlertMessageType;
import gov.hhs.fha.nhinc.connectmgr.ConnectionManagerCache;
import gov.hhs.fha.nhinc.connectmgr.ConnectionManagerException;
import gov.hhs.fha.nhinc.connectmgr.UrlInfo;
import gov.hhs.fha.nhinc.event.DefaultEventDescriptionBuilder;
import gov.hhs.fha.nhinc.nhinclib.NhincConstants;
import java.util.List;
import org.apache.log4j.Logger;
/**
*
* @author dunnek
*/
public class StandardOutboundAdminDistribution implements OutboundAdminDistribution {
private static final Logger LOG = Logger.getLogger(StandardOutboundAdminDistribution.class);
private final AdminDistributionAuditLogger auditLogger = null;
private final MessageGeneratorUtils msgUtils = MessageGeneratorUtils.getInstance();
/**
* This method sends AlertMessage to the target.
*
* @param message
* SendAlertMessage received.
* @param assertion
* Assertion received.
* @param target
* NhinTargetCommunity received.
*/
@Override
@OutboundProcessingEvent(beforeBuilder = ADRequestTransformingBuilder.class,
afterReturningBuilder = ADRequestTransformingBuilder.class, serviceType = "Admin Distribution",
version = "")
public void sendAlertMessage(RespondingGatewaySendAlertMessageSecuredType message, AssertionType assertion,
NhinTargetCommunitiesType target) {
RespondingGatewaySendAlertMessageType unsecured = msgUtils.convertToUnsecured(message, assertion, target);
this.sendAlertMessage(unsecured, assertion, target);
}
/**
* @param message
* SendAlerMessage Received.
* @param assertion
* Assertion received.
* @param target
* NhinTargetCommunity received.
*/
@Override
@OutboundProcessingEvent(beforeBuilder = ADRequestTransformingBuilder.class,
afterReturningBuilder = ADRequestTransformingBuilder.class, serviceType = "Admin Distribution",
version = "")
public void sendAlertMessage(RespondingGatewaySendAlertMessageType message, AssertionType assertion,
NhinTargetCommunitiesType target) {
auditMessage(message, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION);
List<UrlInfo> urlInfoList = getEndpoints(target);
if ((urlInfoList == null) || (urlInfoList.isEmpty())) {
LOG.warn("No targets were found for the Admin Distribution Request");
} else {
for (UrlInfo urlInfo : urlInfoList) {
// create a new request to send out to each target community
LOG.debug("Target: " + urlInfo.getHcid());
// check the policy for the outgoing request to the target community
boolean bIsPolicyOk = checkPolicy(message, assertion, urlInfo.getHcid());
if (bIsPolicyOk) {
NhinTargetSystemType targetSystem = buildTargetSystem(urlInfo);
sendToNhinProxy(message, assertion, targetSystem);
} else {
LOG.error("The policy engine evaluated the request and denied the request.");
}
}
}
}
/**
* This method audits the AdminDist Entity Message.
*
* @param message
* SendAlertMessage received.
* @param assertion
* Assertion received.
* @param direction
* The direction can be either outbound or inbound.
*/
protected void auditMessage(RespondingGatewaySendAlertMessageType message, AssertionType assertion, String direction) {
AcknowledgementType ack = getAuditLogger().auditEntityAdminDist(message, assertion, direction);
if (ack != null) {
LOG.debug("ack: " + ack.getMessage());
}
}
/**
* @return auditLogger to audit.
*/
protected AdminDistributionAuditLogger getAuditLogger() {
return (auditLogger != null) ? auditLogger : new AdminDistributionAuditLogger();
}
private NhinTargetSystemType buildTargetSystem(UrlInfo urlInfo) {
LOG.debug("Begin buildTargetSystem");
NhinTargetSystemType result = new NhinTargetSystemType();
HomeCommunityType hc = new HomeCommunityType();
hc.setHomeCommunityId(urlInfo.getHcid());
result.setHomeCommunity(hc);
result.setUrl(urlInfo.getUrl());
return result;
}
/**
* This method returns the list of url's of targetCommunities.
*
* @param targetCommunities
* NhinTargetCommunities received.
* @return list of urlInfo for target Communities.
*/
protected List<UrlInfo> getEndpoints(NhinTargetCommunitiesType targetCommunities) {
List<UrlInfo> urlInfoList = null;
try {
urlInfoList = ConnectionManagerCache.getInstance().getEndpointURLFromNhinTargetCommunities(
targetCommunities, NhincConstants.NHIN_ADMIN_DIST_SERVICE_NAME);
} catch (ConnectionManagerException ex) {
LOG.error("Failed to obtain target URLs", ex);
}
return urlInfoList;
}
/**
* This method returns boolean for the policyCheck for a specific HCID.
*
* @param request
* SendAlertMessage received.
* @param assertion
* Assertion received.
* @param hcid
* homeCommunityId to check policy.
* @return true if checkpolicy is permit; else false.
*/
protected boolean checkPolicy(RespondingGatewaySendAlertMessageType request, AssertionType assertion, String hcid) {
if (request != null) {
request.setAssertion(assertion);
}
return new AdminDistributionPolicyChecker().checkOutgoingPolicy(request, hcid);
}
/**
* This method send message to Nhin Proxy.
*
* @param newRequest
* SendAlertMessage received.
* @param assertion
* Assertion received.
* @param target
* NhinTargetSystem received.
*/
protected void sendToNhinProxy(RespondingGatewaySendAlertMessageType newRequest, AssertionType assertion,
NhinTargetSystemType target) {
LOG.debug("begin sendToNhinProxy");
OutboundAdminDistributionDelegate adDelegate = getNewOutboundAdminDistributionDelegate();
OutboundAdminDistributionOrchestratable orchestratable = new OutboundAdminDistributionOrchestratable(adDelegate);
orchestratable.setRequest(newRequest);
orchestratable.setAssertion(assertion);
orchestratable.setTarget(target);
orchestratable.setPassthru(false);
adDelegate.process(orchestratable);
}
/**
* @return an instance of OutboundAdminDistributionDelegate
*/
protected OutboundAdminDistributionDelegate getNewOutboundAdminDistributionDelegate() {
return new OutboundAdminDistributionDelegate();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BitsFilteredDocIdSet;
import org.apache.lucene.search.CollectionTerminatedException;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.XFilteredDocIdSetIterator;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.FloatArray;
import org.elasticsearch.common.util.IntArray;
import org.elasticsearch.common.util.LongHash;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException;
import java.util.Set;
/**
* A query implementation that executes the wrapped child query and connects all the matching child docs to the related
* parent documents using {@link ParentChildIndexFieldData}.
* <p/>
* This query is executed in two rounds. The first round resolves all the matching child documents and groups these
* documents by parent uid value. Also the child scores are aggregated per parent uid value. During the second round
* all parent documents having the same uid value that is collected in the first phase are emitted as hit including
* a score based on the aggregated child scores and score type.
*/
public class ChildrenQuery extends Query {
protected final ParentChildIndexFieldData ifd;
protected final String parentType;
protected final String childType;
protected final Filter parentFilter;
protected final ScoreType scoreType;
protected Query originalChildQuery;
protected final int minChildren;
protected final int maxChildren;
protected final int shortCircuitParentDocSet;
protected final BitDocIdSetFilter nonNestedDocsFilter;
protected Query rewrittenChildQuery;
protected IndexReader rewriteIndexReader;
public ChildrenQuery(ParentChildIndexFieldData ifd, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int minChildren, int maxChildren, int shortCircuitParentDocSet, BitDocIdSetFilter nonNestedDocsFilter) {
this.ifd = ifd;
this.parentType = parentType;
this.childType = childType;
this.parentFilter = parentFilter;
this.originalChildQuery = childQuery;
this.scoreType = scoreType;
this.shortCircuitParentDocSet = shortCircuitParentDocSet;
this.nonNestedDocsFilter = nonNestedDocsFilter;
assert maxChildren == 0 || minChildren <= maxChildren;
this.minChildren = minChildren > 1 ? minChildren : 0;
this.maxChildren = maxChildren;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || obj.getClass() != this.getClass()) {
return false;
}
ChildrenQuery that = (ChildrenQuery) obj;
if (!originalChildQuery.equals(that.originalChildQuery)) {
return false;
}
if (!childType.equals(that.childType)) {
return false;
}
if (getBoost() != that.getBoost()) {
return false;
}
if (minChildren != that.minChildren) {
return false;
}
if (maxChildren != that.maxChildren) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = originalChildQuery.hashCode();
result = 31 * result + childType.hashCode();
result = 31 * result + Float.floatToIntBits(getBoost());
result = 31 * result + minChildren;
result = 31 * result + maxChildren;
return result;
}
@Override
public String toString(String field) {
int max = maxChildren == 0 ? Integer.MAX_VALUE : maxChildren;
return "ChildrenQuery[min(" + Integer.toString(minChildren) + ") max(" + Integer.toString(max) + ")of " + childType + "/"
+ parentType + "](" + originalChildQuery.toString(field) + ')' + ToStringUtils.boost(getBoost());
}
@Override
// See TopChildrenQuery#rewrite
public Query rewrite(IndexReader reader) throws IOException {
if (rewrittenChildQuery == null) {
rewriteIndexReader = reader;
rewrittenChildQuery = originalChildQuery.rewrite(reader);
}
return this;
}
@Override
public Query clone() {
ChildrenQuery q = (ChildrenQuery) super.clone();
q.originalChildQuery = originalChildQuery.clone();
if (q.rewrittenChildQuery != null) {
q.rewrittenChildQuery = rewrittenChildQuery.clone();
}
return q;
}
@Override
public void extractTerms(Set<Term> terms) {
rewrittenChildQuery.extractTerms(terms);
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
SearchContext sc = SearchContext.current();
assert rewrittenChildQuery != null;
assert rewriteIndexReader == searcher.getIndexReader() : "not equal, rewriteIndexReader=" + rewriteIndexReader
+ " searcher.getIndexReader()=" + searcher.getIndexReader();
final Query childQuery = rewrittenChildQuery;
IndexParentChildFieldData globalIfd = ifd.loadGlobal(searcher.getIndexReader());
if (globalIfd == null) {
// No docs of the specified type exist on this shard
return Queries.newMatchNoDocsQuery().createWeight(searcher, needsScores);
}
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
boolean abort = true;
long numFoundParents;
ParentCollector collector = null;
try {
if (minChildren == 0 && maxChildren == 0 && scoreType != ScoreType.NONE) {
switch (scoreType) {
case MIN:
collector = new MinCollector(globalIfd, sc, parentType);
break;
case MAX:
collector = new MaxCollector(globalIfd, sc, parentType);
break;
case SUM:
collector = new SumCollector(globalIfd, sc, parentType);
break;
}
}
if (collector == null) {
switch (scoreType) {
case MIN:
collector = new MinCountCollector(globalIfd, sc, parentType);
break;
case MAX:
collector = new MaxCountCollector(globalIfd, sc, parentType);
break;
case SUM:
case AVG:
collector = new SumCountAndAvgCollector(globalIfd, sc, parentType);
break;
case NONE:
collector = new CountCollector(globalIfd, sc, parentType);
break;
default:
throw new RuntimeException("Are we missing a score type here? -- " + scoreType);
}
}
indexSearcher.search(childQuery, collector);
numFoundParents = collector.foundParents();
if (numFoundParents == 0) {
return Queries.newMatchNoDocsQuery().createWeight(searcher, needsScores);
}
abort = false;
} finally {
if (abort) {
Releasables.close(collector);
}
}
sc.addReleasable(collector, Lifetime.COLLECTION);
final Filter parentFilter;
if (numFoundParents <= shortCircuitParentDocSet) {
parentFilter = ParentIdsFilter.createShortCircuitFilter(nonNestedDocsFilter, sc, parentType, collector.values,
collector.parentIdxs, numFoundParents);
} else {
parentFilter = this.parentFilter;
}
return new ParentWeight(this, rewrittenChildQuery.createWeight(searcher, needsScores), parentFilter, numFoundParents, collector, minChildren,
maxChildren);
}
protected class ParentWeight extends Weight {
protected final Weight childWeight;
protected final Filter parentFilter;
protected final ParentCollector collector;
protected final int minChildren;
protected final int maxChildren;
protected long remaining;
protected float queryNorm;
protected float queryWeight;
protected ParentWeight(Query query, Weight childWeight, Filter parentFilter, long remaining, ParentCollector collector, int minChildren, int maxChildren) {
super(query);
this.childWeight = childWeight;
this.parentFilter = parentFilter;
this.remaining = remaining;
this.collector = collector;
this.minChildren = minChildren;
this.maxChildren = maxChildren;
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return new Explanation(getBoost(), "not implemented yet...");
}
@Override
public void normalize(float norm, float topLevelBoost) {
this.queryNorm = norm * topLevelBoost;
queryWeight *= this.queryNorm;
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
if (scoreType == ScoreType.NONE) {
return queryWeight * queryWeight;
}
float sum = childWeight.getValueForNormalization();
sum *= queryWeight * queryWeight;
return sum;
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
DocIdSet parentsSet = parentFilter.getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(parentsSet) || remaining == 0) {
return null;
}
// We can't be sure of the fact that liveDocs have been applied, so we apply it here. The "remaining"
// count down (short circuit) logic will then work as expected.
DocIdSetIterator parents = BitsFilteredDocIdSet.wrap(parentsSet, context.reader().getLiveDocs()).iterator();
if (parents != null) {
SortedDocValues bytesValues = collector.globalIfd.load(context).getOrdinalsValues(parentType);
if (bytesValues == null) {
return null;
}
if (minChildren > 0 || maxChildren != 0 || scoreType == ScoreType.NONE) {
switch (scoreType) {
case NONE:
DocIdSetIterator parentIdIterator = new CountParentOrdIterator(this, parents, collector, bytesValues,
minChildren, maxChildren);
return ConstantScorer.create(parentIdIterator, this, queryWeight);
case AVG:
return new AvgParentCountScorer(this, parents, collector, bytesValues, minChildren, maxChildren);
default:
return new ParentCountScorer(this, parents, collector, bytesValues, minChildren, maxChildren);
}
}
switch (scoreType) {
case AVG:
return new AvgParentScorer(this, parents, collector, bytesValues);
default:
return new ParentScorer(this, parents, collector, bytesValues);
}
}
return null;
}
}
protected abstract static class ParentCollector extends NoopCollector implements Releasable {
protected final IndexParentChildFieldData globalIfd;
protected final LongHash parentIdxs;
protected final BigArrays bigArrays;
protected final SearchContext searchContext;
protected final String parentType;
protected SortedDocValues values;
protected Scorer scorer;
protected ParentCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
this.globalIfd = globalIfd;
this.searchContext = searchContext;
this.bigArrays = searchContext.bigArrays();
this.parentIdxs = new LongHash(512, bigArrays);
this.parentType = parentType;
}
@Override
public final void collect(int doc) throws IOException {
if (values != null) {
final long globalOrdinal = values.getOrd(doc);
if (globalOrdinal >= 0) {
long parentIdx = parentIdxs.add(globalOrdinal);
if (parentIdx >= 0) {
newParent(parentIdx);
} else {
parentIdx = -1 - parentIdx;
existingParent(parentIdx);
}
}
}
}
protected void newParent(long parentIdx) throws IOException {
}
protected void existingParent(long parentIdx) throws IOException {
}
public long foundParents() {
return parentIdxs.size();
}
@Override
protected void doSetNextReader(LeafReaderContext context) throws IOException {
values = globalIfd.load(context).getOrdinalsValues(parentType);
}
@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
}
@Override
public void close() throws ElasticsearchException {
Releasables.close(parentIdxs);
}
}
protected abstract static class ParentScoreCollector extends ParentCollector implements Releasable {
protected FloatArray scores;
protected ParentScoreCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
this.scores = this.bigArrays.newFloatArray(512, false);
}
@Override
public boolean needsScores() {
return true;
}
@Override
protected void newParent(long parentIdx) throws IOException {
scores = bigArrays.grow(scores, parentIdx + 1);
scores.set(parentIdx, scorer.score());
}
@Override
public void close() throws ElasticsearchException {
Releasables.close(parentIdxs, scores);
}
}
protected abstract static class ParentScoreCountCollector extends ParentScoreCollector implements Releasable {
protected IntArray occurrences;
protected ParentScoreCountCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
this.occurrences = bigArrays.newIntArray(512, false);
}
@Override
protected void newParent(long parentIdx) throws IOException {
scores = bigArrays.grow(scores, parentIdx + 1);
scores.set(parentIdx, scorer.score());
occurrences = bigArrays.grow(occurrences, parentIdx + 1);
occurrences.set(parentIdx, 1);
}
@Override
public void close() throws ElasticsearchException {
Releasables.close(parentIdxs, scores, occurrences);
}
}
private final static class CountCollector extends ParentCollector implements Releasable {
protected IntArray occurrences;
protected CountCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
this.occurrences = bigArrays.newIntArray(512, false);
}
@Override
protected void newParent(long parentIdx) throws IOException {
occurrences = bigArrays.grow(occurrences, parentIdx + 1);
occurrences.set(parentIdx, 1);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
occurrences.increment(parentIdx, 1);
}
@Override
public void close() throws ElasticsearchException {
Releasables.close(parentIdxs, occurrences);
}
}
private final static class SumCollector extends ParentScoreCollector {
private SumCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
scores.increment(parentIdx, scorer.score());
}
}
private final static class MaxCollector extends ParentScoreCollector {
private MaxCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
float currentScore = scorer.score();
if (currentScore > scores.get(parentIdx)) {
scores.set(parentIdx, currentScore);
}
}
}
private final static class MinCollector extends ParentScoreCollector {
private MinCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
float currentScore = scorer.score();
if (currentScore < scores.get(parentIdx)) {
scores.set(parentIdx, currentScore);
}
}
}
private final static class MaxCountCollector extends ParentScoreCountCollector {
private MaxCountCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
float currentScore = scorer.score();
if (currentScore > scores.get(parentIdx)) {
scores.set(parentIdx, currentScore);
}
occurrences.increment(parentIdx, 1);
}
}
private final static class MinCountCollector extends ParentScoreCountCollector {
private MinCountCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
float currentScore = scorer.score();
if (currentScore < scores.get(parentIdx)) {
scores.set(parentIdx, currentScore);
}
occurrences.increment(parentIdx, 1);
}
}
private final static class SumCountAndAvgCollector extends ParentScoreCountCollector {
SumCountAndAvgCollector(IndexParentChildFieldData globalIfd, SearchContext searchContext, String parentType) {
super(globalIfd, searchContext, parentType);
}
@Override
protected void existingParent(long parentIdx) throws IOException {
scores.increment(parentIdx, scorer.score());
occurrences.increment(parentIdx, 1);
}
}
private static class ParentScorer extends Scorer {
final ParentWeight parentWeight;
final LongHash parentIds;
final FloatArray scores;
final SortedDocValues globalOrdinals;
final DocIdSetIterator parentsIterator;
int currentDocId = -1;
float currentScore;
ParentScorer(ParentWeight parentWeight, DocIdSetIterator parentsIterator, ParentCollector collector, SortedDocValues globalOrdinals) {
super(parentWeight);
this.parentWeight = parentWeight;
this.globalOrdinals = globalOrdinals;
this.parentsIterator = parentsIterator;
this.parentIds = collector.parentIdxs;
this.scores = ((ParentScoreCollector) collector).scores;
}
@Override
public float score() throws IOException {
return currentScore;
}
protected boolean acceptAndScore(long parentIdx) {
currentScore = scores.get(parentIdx);
return true;
}
@Override
public int freq() throws IOException {
// We don't have the original child query hit info here...
// But the freq of the children could be collector and returned here, but makes this Scorer more expensive.
return 1;
}
@Override
public int docID() {
return currentDocId;
}
@Override
public int nextDoc() throws IOException {
if (parentWeight.remaining == 0) {
return currentDocId = NO_MORE_DOCS;
}
while (true) {
currentDocId = parentsIterator.nextDoc();
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
final int globalOrdinal = globalOrdinals.getOrd(currentDocId);
if (globalOrdinal < 0) {
continue;
}
final long parentIdx = parentIds.find(globalOrdinal);
if (parentIdx != -1) {
parentWeight.remaining--;
if (acceptAndScore(parentIdx)) {
return currentDocId;
}
}
}
}
@Override
public int advance(int target) throws IOException {
if (parentWeight.remaining == 0) {
return currentDocId = NO_MORE_DOCS;
}
currentDocId = parentsIterator.advance(target);
if (currentDocId == DocIdSetIterator.NO_MORE_DOCS) {
return currentDocId;
}
final long globalOrdinal = globalOrdinals.getOrd(currentDocId);
if (globalOrdinal < 0) {
return nextDoc();
}
final long parentIdx = parentIds.find(globalOrdinal);
if (parentIdx != -1) {
parentWeight.remaining--;
if (acceptAndScore(parentIdx)) {
return currentDocId;
}
}
return nextDoc();
}
@Override
public long cost() {
return parentsIterator.cost();
}
}
private static class ParentCountScorer extends ParentScorer {
protected final IntArray occurrences;
protected final int minChildren;
protected final int maxChildren;
ParentCountScorer(ParentWeight parentWeight, DocIdSetIterator parentsIterator, ParentCollector collector, SortedDocValues globalOrdinals, int minChildren, int maxChildren) {
super(parentWeight, parentsIterator, (ParentScoreCollector) collector, globalOrdinals);
this.minChildren = minChildren;
this.maxChildren = maxChildren == 0 ? Integer.MAX_VALUE : maxChildren;
this.occurrences = ((ParentScoreCountCollector) collector).occurrences;
}
@Override
protected boolean acceptAndScore(long parentIdx) {
int count = occurrences.get(parentIdx);
if (count < minChildren || count > maxChildren) {
return false;
}
return super.acceptAndScore(parentIdx);
}
}
private static final class AvgParentScorer extends ParentCountScorer {
AvgParentScorer(ParentWeight weight, DocIdSetIterator parentsIterator, ParentCollector collector, SortedDocValues globalOrdinals) {
super(weight, parentsIterator, collector, globalOrdinals, 0, 0);
}
@Override
protected boolean acceptAndScore(long parentIdx) {
currentScore = scores.get(parentIdx);
currentScore /= occurrences.get(parentIdx);
return true;
}
}
private static final class AvgParentCountScorer extends ParentCountScorer {
AvgParentCountScorer(ParentWeight weight, DocIdSetIterator parentsIterator, ParentCollector collector, SortedDocValues globalOrdinals, int minChildren, int maxChildren) {
super(weight, parentsIterator, collector, globalOrdinals, minChildren, maxChildren);
}
@Override
protected boolean acceptAndScore(long parentIdx) {
int count = occurrences.get(parentIdx);
if (count < minChildren || count > maxChildren) {
return false;
}
currentScore = scores.get(parentIdx);
currentScore /= occurrences.get(parentIdx);
return true;
}
}
private final static class CountParentOrdIterator extends XFilteredDocIdSetIterator {
private final LongHash parentIds;
protected final IntArray occurrences;
private final int minChildren;
private final int maxChildren;
private final SortedDocValues ordinals;
private final ParentWeight parentWeight;
private CountParentOrdIterator(ParentWeight parentWeight, DocIdSetIterator innerIterator, ParentCollector collector, SortedDocValues ordinals, int minChildren, int maxChildren) {
super(innerIterator);
this.parentIds = ((CountCollector) collector).parentIdxs;
this.occurrences = ((CountCollector) collector).occurrences;
this.ordinals = ordinals;
this.parentWeight = parentWeight;
this.minChildren = minChildren;
this.maxChildren = maxChildren == 0 ? Integer.MAX_VALUE : maxChildren;
}
@Override
protected boolean match(int doc) {
if (parentWeight.remaining == 0) {
throw new CollectionTerminatedException();
}
final long parentOrd = ordinals.getOrd(doc);
if (parentOrd >= 0) {
final long parentIdx = parentIds.find(parentOrd);
if (parentIdx != -1) {
parentWeight.remaining--;
int count = occurrences.get(parentIdx);
if (count >= minChildren && count <= maxChildren) {
return true;
}
}
}
return false;
}
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
/**
* <p>
* Describes the status of a moving Elastic IP address.
* </p>
*/
public class MovingAddressStatus implements Serializable, Cloneable {
/**
* The Elastic IP address.
*/
private String publicIp;
/**
* The status of the Elastic IP address that's being moved to the EC2-VPC
* platform, or restored to the EC2-Classic platform.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>movingToVpc, restoringToClassic
*/
private String moveStatus;
/**
* The Elastic IP address.
*
* @return The Elastic IP address.
*/
public String getPublicIp() {
return publicIp;
}
/**
* The Elastic IP address.
*
* @param publicIp The Elastic IP address.
*/
public void setPublicIp(String publicIp) {
this.publicIp = publicIp;
}
/**
* The Elastic IP address.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param publicIp The Elastic IP address.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public MovingAddressStatus withPublicIp(String publicIp) {
this.publicIp = publicIp;
return this;
}
/**
* The status of the Elastic IP address that's being moved to the EC2-VPC
* platform, or restored to the EC2-Classic platform.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>movingToVpc, restoringToClassic
*
* @return The status of the Elastic IP address that's being moved to the EC2-VPC
* platform, or restored to the EC2-Classic platform.
*
* @see MoveStatus
*/
public String getMoveStatus() {
return moveStatus;
}
/**
* The status of the Elastic IP address that's being moved to the EC2-VPC
* platform, or restored to the EC2-Classic platform.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>movingToVpc, restoringToClassic
*
* @param moveStatus The status of the Elastic IP address that's being moved to the EC2-VPC
* platform, or restored to the EC2-Classic platform.
*
* @see MoveStatus
*/
public void setMoveStatus(String moveStatus) {
this.moveStatus = moveStatus;
}
/**
* The status of the Elastic IP address that's being moved to the EC2-VPC
* platform, or restored to the EC2-Classic platform.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>movingToVpc, restoringToClassic
*
* @param moveStatus The status of the Elastic IP address that's being moved to the EC2-VPC
* platform, or restored to the EC2-Classic platform.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see MoveStatus
*/
public MovingAddressStatus withMoveStatus(String moveStatus) {
this.moveStatus = moveStatus;
return this;
}
/**
* The status of the Elastic IP address that's being moved to the EC2-VPC
* platform, or restored to the EC2-Classic platform.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>movingToVpc, restoringToClassic
*
* @param moveStatus The status of the Elastic IP address that's being moved to the EC2-VPC
* platform, or restored to the EC2-Classic platform.
*
* @see MoveStatus
*/
public void setMoveStatus(MoveStatus moveStatus) {
this.moveStatus = moveStatus.toString();
}
/**
* The status of the Elastic IP address that's being moved to the EC2-VPC
* platform, or restored to the EC2-Classic platform.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>movingToVpc, restoringToClassic
*
* @param moveStatus The status of the Elastic IP address that's being moved to the EC2-VPC
* platform, or restored to the EC2-Classic platform.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see MoveStatus
*/
public MovingAddressStatus withMoveStatus(MoveStatus moveStatus) {
this.moveStatus = moveStatus.toString();
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPublicIp() != null) sb.append("PublicIp: " + getPublicIp() + ",");
if (getMoveStatus() != null) sb.append("MoveStatus: " + getMoveStatus() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getPublicIp() == null) ? 0 : getPublicIp().hashCode());
hashCode = prime * hashCode + ((getMoveStatus() == null) ? 0 : getMoveStatus().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof MovingAddressStatus == false) return false;
MovingAddressStatus other = (MovingAddressStatus)obj;
if (other.getPublicIp() == null ^ this.getPublicIp() == null) return false;
if (other.getPublicIp() != null && other.getPublicIp().equals(this.getPublicIp()) == false) return false;
if (other.getMoveStatus() == null ^ this.getMoveStatus() == null) return false;
if (other.getMoveStatus() != null && other.getMoveStatus().equals(this.getMoveStatus()) == false) return false;
return true;
}
@Override
public MovingAddressStatus clone() {
try {
return (MovingAddressStatus) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ha;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.Socket;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.hadoop.net.ServerSocketUtil;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time;
import org.apache.zookeeper.TestableZooKeeper;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.Watcher.Event.KeeperState;
import org.apache.zookeeper.ZKTestCase;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.server.ServerCnxnFactory;
import org.apache.zookeeper.server.ServerCnxnFactoryAccessor;
import org.apache.zookeeper.server.ZKDatabase;
import org.apache.zookeeper.server.ZooKeeperServer;
import org.apache.zookeeper.server.persistence.FileTxnLog;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
/**
* Copy-paste of ClientBase from ZooKeeper, but without any of the
* JMXEnv verification. There seems to be a bug ZOOKEEPER-1438
* which causes spurious failures in the JMXEnv verification when
* we run these tests with the upstream ClientBase.
*/
public abstract class ClientBaseWithFixes extends ZKTestCase {
protected static final Logger LOG = LoggerFactory.getLogger(ClientBaseWithFixes.class);
public static int CONNECTION_TIMEOUT = 30000;
static final File BASETEST = GenericTestUtils.getTestDir();
static {
// The 4-letter-words commands are simple diagnostics telnet commands in
// ZooKeeper. Since ZooKeeper 3.5, these are disabled by default due to
// security concerns: https://issues.apache.org/jira/browse/ZOOKEEPER-2693
// We are enabling them for the tests here, as some tests in hadoop or in
// other projects might still use them
System.setProperty("zookeeper.4lw.commands.whitelist", "*");
}
protected final String hostPort = initHostPort();
protected int maxCnxns = 0;
protected ServerCnxnFactory serverFactory = null;
protected File tmpDir = null;
long initialFdCount;
/**
* In general don't use this. Only use in the special case that you
* want to ignore results (for whatever reason) in your test. Don't
* use empty watchers in real code!
*
*/
protected class NullWatcher implements Watcher {
@Override
public void process(WatchedEvent event) { /* nada */ }
}
protected static class CountdownWatcher implements Watcher {
// XXX this doesn't need to be volatile! (Should probably be final)
volatile CountDownLatch clientConnected;
volatile boolean connected;
protected ZooKeeper client;
public void initializeWatchedClient(ZooKeeper zk) {
if (client != null) {
throw new RuntimeException("Watched Client was already set");
}
client = zk;
}
public CountdownWatcher() {
reset();
}
synchronized public void reset() {
clientConnected = new CountDownLatch(1);
connected = false;
}
@Override
synchronized public void process(WatchedEvent event) {
if (event.getState() == KeeperState.SyncConnected ||
event.getState() == KeeperState.ConnectedReadOnly) {
connected = true;
notifyAll();
clientConnected.countDown();
} else {
connected = false;
notifyAll();
}
}
synchronized boolean isConnected() {
return connected;
}
@VisibleForTesting
public synchronized void waitForConnected(long timeout)
throws InterruptedException, TimeoutException {
long expire = Time.now() + timeout;
long left = timeout;
while(!connected && left > 0) {
wait(left);
left = expire - Time.now();
}
if (!connected) {
throw new TimeoutException("Did not connect");
}
}
@VisibleForTesting
public synchronized void waitForDisconnected(long timeout)
throws InterruptedException, TimeoutException {
long expire = Time.now() + timeout;
long left = timeout;
while(connected && left > 0) {
wait(left);
left = expire - Time.now();
}
if (connected) {
throw new TimeoutException("Did not disconnect");
}
}
}
protected TestableZooKeeper createClient()
throws IOException, InterruptedException
{
return createClient(hostPort);
}
protected TestableZooKeeper createClient(String hp)
throws IOException, InterruptedException
{
CountdownWatcher watcher = new CountdownWatcher();
return createClient(watcher, hp);
}
private LinkedList<ZooKeeper> allClients;
private boolean allClientsSetup = false;
protected TestableZooKeeper createClient(CountdownWatcher watcher, String hp)
throws IOException, InterruptedException
{
return createClient(watcher, hp, CONNECTION_TIMEOUT);
}
protected TestableZooKeeper createClient(CountdownWatcher watcher,
String hp, int timeout)
throws IOException, InterruptedException
{
watcher.reset();
TestableZooKeeper zk = new TestableZooKeeper(hp, timeout, watcher);
if (!watcher.clientConnected.await(timeout, TimeUnit.MILLISECONDS))
{
Assert.fail("Unable to connect to server");
}
synchronized(this) {
if (!allClientsSetup) {
LOG.error("allClients never setup");
Assert.fail("allClients never setup");
}
if (allClients != null) {
allClients.add(zk);
} else {
// test done - close the zk, not needed
zk.close();
}
}
watcher.initializeWatchedClient(zk);
return zk;
}
public static class HostPort {
String host;
int port;
public HostPort(String host, int port) {
this.host = host;
this.port = port;
}
}
public static List<HostPort> parseHostPortList(String hplist) {
ArrayList<HostPort> alist = new ArrayList<HostPort>();
for (String hp: hplist.split(",")) {
int idx = hp.lastIndexOf(':');
String host = hp.substring(0, idx);
int port;
try {
port = Integer.parseInt(hp.substring(idx + 1));
} catch(RuntimeException e) {
throw new RuntimeException("Problem parsing " + hp + e.toString());
}
alist.add(new HostPort(host,port));
}
return alist;
}
/**
* Send the 4letterword
* @param host the destination host
* @param port the destination port
* @param cmd the 4letterword
* @return
* @throws IOException
*/
public static String send4LetterWord(String host, int port, String cmd)
throws IOException
{
LOG.info("connecting to " + host + " " + port);
Socket sock = new Socket(host, port);
BufferedReader reader = null;
try {
OutputStream outstream = sock.getOutputStream();
outstream.write(cmd.getBytes());
outstream.flush();
// this replicates NC - close the output stream before reading
sock.shutdownOutput();
reader =
new BufferedReader(
new InputStreamReader(sock.getInputStream()));
StringBuilder sb = new StringBuilder();
String line;
while((line = reader.readLine()) != null) {
sb.append(line + "\n");
}
return sb.toString();
} finally {
sock.close();
if (reader != null) {
reader.close();
}
}
}
public static boolean waitForServerUp(String hp, long timeout) {
long start = Time.now();
while (true) {
try {
// if there are multiple hostports, just take the first one
HostPort hpobj = parseHostPortList(hp).get(0);
String result = send4LetterWord(hpobj.host, hpobj.port, "stat");
if (result.startsWith("Zookeeper version:") &&
!result.contains("READ-ONLY")) {
return true;
}
} catch (IOException e) {
// ignore as this is expected
LOG.info("server " + hp + " not up " + e);
}
if (Time.now() > start + timeout) {
break;
}
try {
Thread.sleep(250);
} catch (InterruptedException e) {
// ignore
}
}
return false;
}
public static boolean waitForServerDown(String hp, long timeout) {
long start = Time.now();
while (true) {
try {
HostPort hpobj = parseHostPortList(hp).get(0);
send4LetterWord(hpobj.host, hpobj.port, "stat");
} catch (IOException e) {
return true;
}
if (Time.now() > start + timeout) {
break;
}
try {
Thread.sleep(250);
} catch (InterruptedException e) {
// ignore
}
}
return false;
}
public static File createTmpDir() throws IOException {
return createTmpDir(BASETEST);
}
static File createTmpDir(File parentDir) throws IOException {
File tmpFile = File.createTempFile("test", ".junit", parentDir);
// don't delete tmpFile - this ensures we don't attempt to create
// a tmpDir with a duplicate name
File tmpDir = new File(tmpFile + ".dir");
Assert.assertFalse(tmpDir.exists()); // never true if tmpfile does it's job
Assert.assertTrue(tmpDir.mkdirs());
return tmpDir;
}
private static int getPort(String hostPort) {
String[] split = hostPort.split(":");
String portstr = split[split.length-1];
String[] pc = portstr.split("/");
if (pc.length > 1) {
portstr = pc[0];
}
return Integer.parseInt(portstr);
}
static ServerCnxnFactory createNewServerInstance(File dataDir,
ServerCnxnFactory factory, String hostPort, int maxCnxns)
throws IOException, InterruptedException
{
ZooKeeperServer zks = new ZooKeeperServer(dataDir, dataDir, 3000);
final int PORT = getPort(hostPort);
if (factory == null) {
factory = ServerCnxnFactory.createFactory(PORT, maxCnxns);
}
factory.startup(zks);
Assert.assertTrue("waiting for server up",
ClientBaseWithFixes.waitForServerUp("127.0.0.1:" + PORT,
CONNECTION_TIMEOUT));
return factory;
}
static void shutdownServerInstance(ServerCnxnFactory factory,
String hostPort)
{
if (factory != null) {
ZKDatabase zkDb;
{
ZooKeeperServer zs = getServer(factory);
zkDb = zs.getZKDatabase();
}
factory.shutdown();
try {
zkDb.close();
} catch (IOException ie) {
LOG.warn("Error closing logs ", ie);
}
final int PORT = getPort(hostPort);
Assert.assertTrue("waiting for server down",
ClientBaseWithFixes.waitForServerDown("127.0.0.1:" + PORT,
CONNECTION_TIMEOUT));
}
}
/**
* Test specific setup
*/
public static void setupTestEnv() {
// during the tests we run with 100K prealloc in the logs.
// on windows systems prealloc of 64M was seen to take ~15seconds
// resulting in test Assert.failure (client timeout on first session).
// set env and directly in order to handle static init/gc issues
System.setProperty("zookeeper.preAllocSize", "100");
FileTxnLog.setPreallocSize(100 * 1024);
}
protected void setUpAll() throws Exception {
allClients = new LinkedList<ZooKeeper>();
allClientsSetup = true;
}
@Before
public void setUp() throws Exception {
BASETEST.mkdirs();
setupTestEnv();
setUpAll();
tmpDir = createTmpDir(BASETEST);
startServer();
LOG.info("Client test setup finished");
}
private String initHostPort() {
BASETEST.mkdirs();
int port = 0;
try {
port = ServerSocketUtil.getPort(port, 100);
} catch (IOException e) {
throw new RuntimeException(e);
}
return "127.0.0.1:" + port;
}
protected void startServer() throws Exception {
LOG.info("STARTING server");
serverFactory = createNewServerInstance(tmpDir, serverFactory, hostPort, maxCnxns);
}
protected void stopServer() throws Exception {
LOG.info("STOPPING server");
shutdownServerInstance(serverFactory, hostPort);
serverFactory = null;
}
protected static ZooKeeperServer getServer(ServerCnxnFactory fac) {
ZooKeeperServer zs = ServerCnxnFactoryAccessor.getZkServer(fac);
return zs;
}
protected void tearDownAll() throws Exception {
synchronized (this) {
if (allClients != null) for (ZooKeeper zk : allClients) {
try {
if (zk != null)
zk.close();
} catch (InterruptedException e) {
LOG.warn("ignoring interrupt", e);
}
}
allClients = null;
}
}
@After
public void tearDown() throws Exception {
LOG.info("tearDown starting");
tearDownAll();
stopServer();
if (tmpDir != null) {
Assert.assertTrue("delete " + tmpDir.toString(), recursiveDelete(tmpDir));
}
// This has to be set to null when the same instance of this class is reused between test cases
serverFactory = null;
}
public static boolean recursiveDelete(File d) {
if (d.isDirectory()) {
File children[] = d.listFiles();
for (File f : children) {
Assert.assertTrue("delete " + f.toString(), recursiveDelete(f));
}
}
return d.delete();
}
}
| |
/*
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.cas.authentication.handler.support;
import org.jasig.cas.authentication.HandlerResult;
import org.jasig.cas.authentication.PreventedException;
import org.jasig.cas.authentication.UsernamePasswordCredential;
import org.jasig.cas.authentication.principal.Principal;
import org.springframework.util.Assert;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.validation.constraints.NotNull;
import java.security.GeneralSecurityException;
import java.util.Set;
/**
* JAAS Authentication Handler for CAAS. This is a simple bridge from CAS'
* authentication to JAAS.
*
* <p>
* Using the JAAS Authentication Handler requires you to configure the
* appropriate JAAS modules. You can specify the location of a jass.conf file
* using the following VM parameter:
* <pre>
* -Djava.security.auth.login.config=$PATH_TO_JAAS_CONF/jaas.conf
* </pre>
*
* <p>
* This example jaas.conf would try Kerberos based authentication, then try LDAP
* authentication:
* <pre>
* CAS {
* com.sun.security.auth.module.Krb5LoginModule sufficient
* client=TRUE
* debug=FALSE
* useTicketCache=FALSE;
* edu.uconn.netid.jaas.LDAPLoginModule sufficient
* java.naming.provider.url="ldap://ldapserver.my.edu:389/dc=my,dc=edu"
* java.naming.security.principal="uid=jaasauth,dc=my,dc=edu"
* java.naming.security.credentials="password"
* Attribute="uid"
* startTLS="true";
* };
* </pre>
*
* @author <a href="mailto:dotmatt@uconn.edu">Matthew J. Smith</a>
* @author Marvin S. Addison
* @author Misagh Moayyed
*
* @see javax.security.auth.callback.CallbackHandler
* @see javax.security.auth.callback.PasswordCallback
* @see javax.security.auth.callback.NameCallback
* @since 3.0.0.5
*/
public class JaasAuthenticationHandler extends AbstractUsernamePasswordAuthenticationHandler {
/** If no realm is specified, we default to CAS. */
private static final String DEFAULT_REALM = "CAS";
/**
* System property key to specify kerb5 realm.
*/
private static final String SYS_PROP_KRB5_REALM = "java.security.krb5.realm";
/**
* System property key to specify kerb5 kdc.
*/
private static final String SYS_PROP_KERB5_KDC = "java.security.krb5.kdc";
/** The realm that contains the login module information. */
@NotNull
private String realm = DEFAULT_REALM;
/** System property value to overwrite the realm in krb5 config. */
private String kerberosRealmSystemProperty;
/** System property value to overwrite the kdc in krb5 config. */
private String kerberosKdcSystemProperty;
/**
* Instantiates a new Jaas authentication handler,
* and attempts to load/verify the configuration.
*/
public JaasAuthenticationHandler() {
Assert.notNull(Configuration.getConfiguration(),
"Static Configuration cannot be null. Did you remember to specify \"java.security.auth.login.config\"?");
}
/**
* {@inheritDoc}
*/
@Override
protected final HandlerResult authenticateUsernamePasswordInternal(final UsernamePasswordCredential credential)
throws GeneralSecurityException, PreventedException {
if (this.kerberosKdcSystemProperty != null) {
logger.debug("Setting kerberos system property {} to {}", SYS_PROP_KERB5_KDC, this.kerberosKdcSystemProperty);
System.setProperty(SYS_PROP_KERB5_KDC, this.kerberosKdcSystemProperty);
}
if (this.kerberosRealmSystemProperty != null) {
logger.debug("Setting kerberos system property {} to {}", SYS_PROP_KRB5_REALM, this.kerberosRealmSystemProperty);
System.setProperty(SYS_PROP_KRB5_REALM, this.kerberosRealmSystemProperty);
}
final String username = credential.getUsername();
final String password = getPasswordEncoder().encode(credential.getPassword());
final LoginContext lc = new LoginContext(
this.realm,
new UsernamePasswordCallbackHandler(username, password));
try {
logger.debug("Attempting authentication for: {}", username);
lc.login();
} finally {
lc.logout();
}
Principal principal = null;
final Set<java.security.Principal> principals = lc.getSubject().getPrincipals();
if (principals != null && principals.size() > 0) {
principal = this.principalFactory.createPrincipal(principals.iterator().next().getName());
}
return createHandlerResult(credential, principal, null);
}
public void setRealm(final String realm) {
this.realm = realm;
}
/**
* Typically, the default realm and the KDC for that realm are indicated in the Kerberos <code>krb5.conf</code> configuration file.
* However, if you like, you can instead specify the realm value by setting this following system property value.
* <p>If you set the realm property, you SHOULD also configure the {@link #setKerberosKdcSystemProperty(String)}.
* <p>Also note that if you set these properties, then no cross-realm authentication is possible unless
* a <code>krb5.conf</code> file is also provided from which the additional information required for cross-realm authentication
* may be obtained.
* <p>If you set values for these properties, then they override the default realm and KDC values specified
* in <code>krb5.conf</code> (if such a file is found). The <code>krb5.conf</code> file is still consulted if values for items
* other than the default realm and KDC are needed. If no <code>krb5.conf</code> file is found,
* then the default values used for these items are implementation-specific.
* @param kerberosRealmSystemProperty system property to indicate realm.
* @see <a href="http://docs.oracle.com/javase/7/docs/technotes/guides/security/jgss/tutorials/KerberosReq.html">
* Oracle documentation</a>
* @since 4.1.0
*/
public final void setKerberosRealmSystemProperty(final String kerberosRealmSystemProperty) {
this.kerberosRealmSystemProperty = kerberosRealmSystemProperty;
}
/**
* Typically, the default realm and the KDC for that realm are indicated in the Kerberos <code>krb5.conf</code> configuration file.
* However, if you like, you can instead specify the kdc value by setting this system property value.
* <p>If you set the realm property, you SHOULD also configure the {@link #setKerberosRealmSystemProperty(String)}.
* <p>Also note that if you set these properties, then no cross-realm authentication is possible unless
* a <code>krb5.conf</code> file is also provided from which the additional information required for cross-realm authentication
* may be obtained.
* <p>If you set values for these properties, then they override the default realm and KDC values specified
* in <code>krb5.conf</code> (if such a file is found). The <code>krb5.conf</code> file is still consulted if values for items
* other than the default realm and KDC are needed. If no <code>krb5.conf</code> file is found,
* then the default values used for these items are implementation-specific.
* @param kerberosKdcSystemProperty system property to indicate kdc
* @see <a href="http://docs.oracle.com/javase/7/docs/technotes/guides/security/jgss/tutorials/KerberosReq.html">
* Oracle documentation</a>
* @since 4.1.0
*/
public final void setKerberosKdcSystemProperty(final String kerberosKdcSystemProperty) {
this.kerberosKdcSystemProperty = kerberosKdcSystemProperty;
}
/**
* A simple JAAS CallbackHandler which accepts a Name String and Password
* String in the constructor. Only NameCallbacks and PasswordCallbacks are
* accepted in the callback array. This code based loosely on example given
* in Sun's javadoc for CallbackHandler interface.
*/
protected static final class UsernamePasswordCallbackHandler implements CallbackHandler {
/** The username of the principal we are trying to authenticate. */
private final String userName;
/** The password of the principal we are trying to authenticate. */
private final String password;
/**
* Constructor accepts name and password to be used for authentication.
*
* @param userName name to be used for authentication
* @param password Password to be used for authentication
*/
protected UsernamePasswordCallbackHandler(final String userName,
final String password) {
this.userName = userName;
this.password = password;
}
@Override
public void handle(final Callback[] callbacks)
throws UnsupportedCallbackException {
for (final Callback callback : callbacks) {
if (callback.getClass().equals(NameCallback.class)) {
((NameCallback) callback).setName(this.userName);
} else if (callback.getClass().equals(PasswordCallback.class)) {
((PasswordCallback) callback).setPassword(this.password
.toCharArray());
} else {
throw new UnsupportedCallbackException(callback,
"Unrecognized Callback");
}
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ngrinder.model;
import javax.persistence.*;
import com.google.gson.annotations.Expose;
import net.grinder.common.processidentity.AgentIdentity;
import net.grinder.message.console.AgentControllerState;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.hibernate.annotations.Type;
import static org.ngrinder.common.util.AccessUtils.getSafe;
/**
* Agent model.
*
* @author Tobi
* @author JunHo Yoon
* @since 3.0
*/
@SuppressWarnings({"deprecation", "UnusedDeclaration", "JpaDataSourceORMInspection"})
@Entity
@Table(name = "AGENT")
public class AgentInfo extends BaseEntity<AgentInfo> {
/**
* UUID.
*/
private static final long serialVersionUID = 677610999461391813L;
/**
* Agent IP.
*/
@Expose
private String ip;
/**
* agent application port. It's only available when the connection is
* re-established.
*/
@Expose
private Integer port;
@Transient
private AgentIdentity agentIdentity;
/**
* Host name of the agent machine.
*/
@Expose
private String hostName;
@Expose
@Enumerated(EnumType.STRING)
private AgentControllerState state;
@Expose
@Column(name = "system_stat", length = 2000)
private String systemStat;
@Expose
private String region;
@Transient
private Integer number;
@Expose
@Type(type = "true_false")
@Column(columnDefinition = "char(1) default 'F'")
private Boolean approved;
@Expose
private String version;
@PrePersist
public void init() {
this.approved = getSafe(this.approved, false);
this.version = getSafe(this.version, "");
this.region = getSafe(this.region, "");
}
public String getIp() {
return ip;
}
public void setIp(String ip) {
this.ip = ip;
}
public AgentControllerState getState() {
return state;
}
public void setState(AgentControllerState status) {
this.state = status;
}
public String getRegion() {
return region;
}
public void setRegion(String region) {
this.region = region;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + port;
result = prime * result + ((ip == null) ? 0 : ip.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
AgentInfo other = (AgentInfo) obj;
if (ip == null) {
if (other.ip != null) {
return false;
}
} else if (!ip.equals(other.ip)) {
return false;
}
return true;
}
public Integer getPort() {
return port;
}
public void setPort(Integer port) {
this.port = port;
}
/**
* Get host name.
*
* @return host name
* @deprecated use {@link #getName()} instead.
*/
public String getHostName() {
return hostName;
}
/**
* Set host name.
*
* @param hostName host name
* @deprecated use {@link #setName(String)} instead
*/
public void setHostName(String hostName) {
this.hostName = hostName;
}
public String getName() {
return getHostName();
}
/**
* Set name.
*
* @param name name
*/
public void setName(String name) {
setHostName(name);
}
public AgentIdentity getAgentIdentity() {
return agentIdentity;
}
public void setAgentIdentity(AgentIdentity agentIdentity) {
this.agentIdentity = agentIdentity;
}
public boolean isApproved() {
return approved == null ? false : approved;
}
public Boolean getApproved() {
return approved;
}
public void setApproved(Boolean approved) {
this.approved = approved;
}
/**
* @return the number
* @deprecated unused now.
*/
public Integer getNumber() {
return number;
}
public void setNumber(Integer number) {
this.number = number;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
public String getSystemStat() {
return systemStat;
}
public void setSystemStat(String systemStat) {
this.systemStat = systemStat;
}
public void setVersion(String version) {
this.version = version;
}
public String getVersion() {
return this.version;
}
}
| |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.developh.misland;
import javax.microedition.khronos.opengles.GL10;
import javax.microedition.khronos.opengles.GL11Ext;
/**
* Draws a screen-aligned bitmap to the screen.
*/
public class DrawableBitmap extends DrawableObject {
private Texture mTexture;
private int mWidth;
private int mHeight;
private int mCrop[];
private int mViewWidth;
private int mViewHeight;
private float mOpacity;
DrawableBitmap(Texture texture, int width, int height) {
super();
mTexture = texture;
mWidth = width;
mHeight = height;
mCrop = new int[4];
mViewWidth = 0;
mViewHeight = 0;
mOpacity = 1.0f;
setCrop(0, height, width, height);
}
public void reset() {
mTexture = null;
mViewWidth = 0;
mViewHeight = 0;
mOpacity = 1.0f;
}
public void setViewSize(int width, int height) {
mViewHeight = height;
mViewWidth = width;
}
public void setOpacity(float opacity) {
mOpacity = opacity;
}
/**
* Begins drawing bitmaps. Sets the OpenGL state for rapid drawing.
*
* @param gl A pointer to the OpenGL context.
* @param viewWidth The width of the screen.
* @param viewHeight The height of the screen.
*/
public static void beginDrawing(GL10 gl, float viewWidth, float viewHeight) {
gl.glShadeModel(GL10.GL_FLAT);
gl.glEnable(GL10.GL_BLEND);
gl.glBlendFunc(GL10.GL_ONE, GL10.GL_ONE_MINUS_SRC_ALPHA);
gl.glColor4x(0x10000, 0x10000, 0x10000, 0x10000);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glPushMatrix();
gl.glLoadIdentity();
gl.glOrthof(0.0f, viewWidth, 0.0f, viewHeight, 0.0f, 1.0f);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glPushMatrix();
gl.glLoadIdentity();
gl.glEnable(GL10.GL_TEXTURE_2D);
}
/**
* Draw the bitmap at a given x,y position, expressed in pixels, with the
* lower-left-hand-corner of the view being (0,0).
*
* @param gl A pointer to the OpenGL context
* @param x The number of pixels to offset this drawable's origin in the x-axis.
* @param y The number of pixels to offset this drawable's origin in the y-axis
* @param scaleX The horizontal scale factor between the bitmap resolution and the display resolution.
* @param scaleY The vertical scale factor between the bitmap resolution and the display resolution.
*/
@Override
public void draw(float x, float y, float scaleX, float scaleY) {
GL10 gl = OpenGLSystem.getGL();
final Texture texture = mTexture;
if (gl != null && texture != null) {
assert texture.loaded;
final float snappedX = (int) x;
final float snappedY = (int) y;
final float opacity = mOpacity;
final float width = mWidth;
final float height = mHeight;
final float viewWidth = mViewWidth;
final float viewHeight = mViewHeight;
boolean cull = false;
if (viewWidth > 0) {
if (snappedX + width < 0.0f
|| snappedX > viewWidth
|| snappedY + height < 0.0f
|| snappedY > viewHeight
|| opacity == 0.0f
|| !texture.loaded) {
cull = true;
}
}
if (!cull) {
OpenGLSystem.bindTexture(GL10.GL_TEXTURE_2D, texture.name);
// This is necessary because we could be drawing the same texture with different
// crop (say, flipped horizontally) on the same frame.
OpenGLSystem.setTextureCrop(mCrop);
if (opacity < 1.0f) {
gl.glColor4f(opacity, opacity, opacity, opacity);
}
((GL11Ext) gl).glDrawTexfOES(snappedX * scaleX, snappedY * scaleY,
getPriority(), width * scaleX, height * scaleY);
if (opacity < 1.0f) {
gl.glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
}
}
}
}
/**
* Ends the drawing and restores the OpenGL state.
*
* @param gl A pointer to the OpenGL context.
*/
public static void endDrawing(GL10 gl) {
gl.glDisable(GL10.GL_BLEND);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glPopMatrix();
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glPopMatrix();
}
public void resize(int width, int height) {
mWidth = width;
mHeight = height;
setCrop(0, height, width, height);
}
public int getWidth() {
return mWidth;
}
public void setWidth(int width) {
mWidth = width;
}
public int getHeight() {
return mHeight;
}
public void setHeight(int height) {
mHeight = height;
}
/**
* Changes the crop parameters of this bitmap. Note that the underlying OpenGL texture's
* parameters are not changed immediately The crop is updated on the
* next call to draw(). Note that the image may be flipped by providing a negative width or
* height.
*
* @param left
* @param bottom
* @param width
* @param height
*/
public void setCrop(int left, int bottom, int width, int height) {
// Negative width and height values will flip the image.
mCrop[0] = left;
mCrop[1] = bottom;
mCrop[2] = width;
mCrop[3] = -height;
}
public int[] getCrop() {
return mCrop;
}
public void setTexture(Texture texture) {
mTexture = texture;
}
@Override
public Texture getTexture() {
return mTexture;
}
@Override
public boolean visibleAtPosition(Vector2 position) {
boolean cull = false;
if (mViewWidth > 0) {
if (position.x + mWidth < 0 || position.x > mViewWidth
|| position.y + mHeight < 0 || position.y > mViewHeight) {
cull = true;
}
}
return !cull;
}
protected final void setFlip(boolean horzFlip, boolean vertFlip) {
setCrop(horzFlip ? mWidth : 0,
vertFlip ? 0 : mHeight,
horzFlip ? -mWidth : mWidth,
vertFlip ? -mHeight : mHeight);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.ipojo.test;
import java.util.Dictionary;
import java.util.Hashtable;
import org.apache.felix.ipojo.ComponentFactory;
import org.apache.felix.ipojo.ComponentInstance;
import org.apache.felix.ipojo.ConfigurationException;
import org.apache.felix.ipojo.MissingHandlerException;
import org.apache.felix.ipojo.UnacceptableConfiguration;
import org.apache.felix.ipojo.junit4osgi.OSGiTestCase;
import org.apache.felix.ipojo.metadata.Attribute;
import org.apache.felix.ipojo.metadata.Element;
import org.apache.felix.ipojo.parser.ManifestMetadataParser;
import org.apache.felix.ipojo.parser.ParseException;
import org.apache.felix.ipojo.test.donut.Donut;
import org.apache.felix.ipojo.test.donut.DonutConsumer;
import org.apache.felix.ipojo.test.donut.DonutProvider;
import org.apache.felix.ipojo.test.util.EahTestUtils;
import org.apache.felix.ipojo.test.util.IPojoTestUtils;
import org.osgi.framework.ServiceReference;
/**
* Test the good behaviour of the EventAdminHandler.
*
* @author <a href="mailto:dev@felix.apache.org">Felix Project Team</a>
*/
public class BadTests extends OSGiTestCase {
/**
* The utility class instance.
*/
public EahTestUtils m_utils;
/**
* The namespace of the Event admin handler.
*/
private static final String NAMESPACE = "org.apache.felix.ipojo.handlers.event";
/**
* The available components.
*/
private Element[] m_components;
/**
* The description of a component that uses an event publisher.
*/
private Element m_provider;
/**
* The event publisher description.
*/
private Element m_publisher;
/**
* The name attribute of the event publisher.
*/
private Attribute m_publisherName;
/**
* The field attribute of the event publisher.
*/
private Attribute m_publisherField;
/**
* The topics attribute of the event publisher.
*/
private Attribute m_publisherTopics;
/**
* The data-key attribute of the event publisher.
*/
private Attribute m_publisherDataKey;
/**
* The synchronous attribute of the event publisher.
*/
private Attribute m_publisherSynchronous;
/**
* The description of a component that uses an event subscriber.
*/
private Element m_consumer;
/**
* The event subscriber description.
*/
private Element m_subscriber;
/**
* The name attribute of the event subscriber.
*/
private Attribute m_subscriberName;
/**
* The callback attribute of the event subscriber.
*/
private Attribute m_subscriberCallback;
/**
* The topics attribute of the event subscriber.
*/
private Attribute m_subscriberTopics;
/**
* The data-key attribute of the event subscriber.
*/
private Attribute m_subscriberDataKey;
/**
* The data-type attribute of the event subscriber.
*/
private Attribute m_subscriberDataType;
private Element getManipulationForComponent(String compName) {
for (int i = 0; i < m_components.length; i++) {
if (m_components[i].containsAttribute("name")
&& m_components[i].getAttribute("name").equals(compName)) {
return m_components[i].getElements("manipulation")[0];
}
}
return null;
}
/**
* Initialization before test cases.
*
* Create all the instances
*
*/
public void setUp() {
m_utils = new EahTestUtils(getContext());
/**
* Get the list of available components.
*/
try {
String header = (String) getContext().getBundle().getHeaders().get(
"iPOJO-Components");
m_components = ManifestMetadataParser.parseHeaderMetadata(header)
.getElements("component");
} catch (ParseException e) {
fail("Parse Exception when parsing iPOJO-Component");
}
/**
* Initialize the standard publishing component (based on the
* asynchronous donut provider).
*/
m_provider = new Element("component", "");
m_provider.addAttribute(new Attribute("className",
"org.apache.felix.ipojo.test.donut.DonutProviderImpl"));
m_provider.addAttribute(new Attribute("name",
"standard donut provider for bad tests"));
// The provided service of the publisher
Element providesDonutProvider = new Element("provides", "");
providesDonutProvider.addAttribute(new Attribute("interface",
"org.apache.felix.ipojo.test.donut.DonutProvider"));
Element providesDonutProviderProperty = new Element("property", "");
providesDonutProviderProperty
.addAttribute(new Attribute("name", "name"));
providesDonutProviderProperty.addAttribute(new Attribute("field",
"m_name"));
providesDonutProviderProperty.addAttribute(new Attribute("value",
"Unknown donut vendor"));
providesDonutProvider.addElement(providesDonutProviderProperty);
m_provider.addElement(providesDonutProvider);
// The event publisher, corresponding to the following description :
// <ev:publisher name="donut-publisher" field="m_publisher"
// topics="food/donuts" data-key="food" synchronous="false"/>
m_publisher = new Element("publisher", NAMESPACE);
m_publisherName = new Attribute("name", "donut-publisher");
m_publisherField = new Attribute("field", "m_publisher");
m_publisherTopics = new Attribute("topics", "food/donuts");
m_publisherDataKey = new Attribute("data-key", "food");
m_publisherSynchronous = new Attribute("synchronous", "false");
m_publisher.addAttribute(m_publisherName);
m_publisher.addAttribute(m_publisherField);
m_publisher.addAttribute(m_publisherTopics);
m_publisher.addAttribute(m_publisherDataKey);
m_publisher.addAttribute(m_publisherSynchronous);
m_provider.addElement(m_publisher);
m_provider.addElement(getManipulationForComponent("donut-provider"));
/**
* Initialize the standard subscribing component (based on the donut
* consumer).
*/
m_consumer = new Element("component", "");
m_consumer.addAttribute(new Attribute("className",
"org.apache.felix.ipojo.test.donut.DonutConsumerImpl"));
m_consumer.addAttribute(new Attribute("name",
"standard donut consumer for bad tests"));
// The provided service of the publisher
Element providesDonutConsumer = new Element("provides", "");
providesDonutConsumer.addAttribute(new Attribute("interface",
"org.apache.felix.ipojo.test.donut.DonutConsumer"));
Element providesDonutConsumerNameProperty = new Element("property", "");
providesDonutConsumerNameProperty.addAttribute(new Attribute("name",
"name"));
providesDonutConsumerNameProperty.addAttribute(new Attribute("field",
"m_name"));
providesDonutConsumerNameProperty.addAttribute(new Attribute("value",
"Unknown donut consumer"));
providesDonutConsumer.addElement(providesDonutConsumerNameProperty);
Element providesDonutConsumerSlowProperty = new Element("property", "");
providesDonutConsumerSlowProperty.addAttribute(new Attribute("name",
"slow"));
providesDonutConsumerSlowProperty.addAttribute(new Attribute("field",
"m_isSlow"));
providesDonutConsumerSlowProperty.addAttribute(new Attribute("value",
"false"));
providesDonutConsumer.addElement(providesDonutConsumerSlowProperty);
m_consumer.addElement(providesDonutConsumer);
// The event publisher, corresponding to the following description :
// <ev:subscriber name="donut-subscriber" callback="receiveDonut"
// topics="food/donuts" data-key="food"
// data-type="org.apache.felix.ipojo.test.donut.Donut"/>
m_subscriber = new Element("subscriber", NAMESPACE);
m_subscriberName = new Attribute("name", "donut-subscriber");
m_subscriberCallback = new Attribute("callback", "receiveDonut");
m_subscriberTopics = new Attribute("topics", "food/donuts");
m_subscriberDataKey = new Attribute("data-key", "food");
m_subscriberDataType = new Attribute("data-type",
"org.apache.felix.ipojo.test.donut.Donut");
m_subscriber.addAttribute(m_subscriberName);
m_subscriber.addAttribute(m_subscriberCallback);
m_subscriber.addAttribute(m_subscriberTopics);
m_subscriber.addAttribute(m_subscriberDataKey);
m_subscriber.addAttribute(m_subscriberDataType);
m_consumer.addElement(m_subscriber);
m_consumer.addElement(getManipulationForComponent("donut-consumer"));
}
/**
* Test the base configuration is correct to be sure the bad tests will fail
* because of they are really bad, and not because of an other application
* error.
*
* This test simply create a provider and a consumer instance, send one
* event and check it is received.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testGoodConfig()
throws ConfigurationException, UnacceptableConfiguration,
MissingHandlerException {
/**
* Create the provider and the consumer instances.
*/
Dictionary properties = new Hashtable();
// Provider
ComponentFactory providerFactory = new ComponentFactory(getContext(),
m_provider);
providerFactory.start();
properties.put("instance.name","Emperor of donuts");
ComponentInstance providerInstance = providerFactory
.createComponentInstance(properties);
ServiceReference providerService = IPojoTestUtils
.getServiceReferenceByName(getContext(), DonutProvider.class
.getName(), providerInstance.getInstanceName());
DonutProvider provider = (DonutProvider) getContext()
.getService(providerService);
// The consumer
properties = new Hashtable();
ComponentFactory consumerFactory = new ComponentFactory(getContext(),
m_consumer);
consumerFactory.start();
properties.put("instance.name","Homer Simpson");
properties.put("slow", "false");
ComponentInstance consumerInstance = consumerFactory
.createComponentInstance(properties);
ServiceReference consumerService = IPojoTestUtils
.getServiceReferenceByName(getContext(), DonutConsumer.class
.getName(), consumerInstance.getInstanceName());
DonutConsumer consumer = (DonutConsumer) getContext()
.getService(consumerService);
/**
* Test the normal behaviour of the instances.
*/
consumer.clearDonuts();
Donut sentDonut = provider.sellDonut();
Donut receivedDonut = consumer.waitForDonut();
assertEquals("The received donut must be the same as the sent one.",
sentDonut, receivedDonut);
/**
* Destroy component's instances.
*/
getContext().ungetService(providerService);
providerInstance.dispose();
getContext().ungetService(consumerService);
consumerInstance.dispose();
providerFactory.stop();
consumerFactory.stop();
}
/**
* Try to create a publisher with no name.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testPublisherWithoutName()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the name attribute of the publisher
m_publisher.removeAttribute(m_publisherName);
// Create and try to start the factory
ComponentFactory fact = new ComponentFactory(getContext(), m_provider);
try {
fact.start();
// Should not be executed
fact.stop();
fail("The factory must not start when no name is specified.");
} catch (IllegalStateException e) {
// OK
} finally {
// Restore the original state of the publisher
m_publisher.addAttribute(m_publisherName);
}
}
/**
* Try to create a publisher with no field.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testPublisherWithoutField()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the name attribute of the publisher
m_publisher.removeAttribute(m_publisherField);
// Create and try to start the factory
ComponentFactory fact = new ComponentFactory(getContext(), m_provider);
try {
fact.start();
// Should not be executed
fact.stop();
fail("The factory must not start when no field is specified.");
} catch (IllegalStateException e) {
// OK
} finally {
// Restore the original state of the publisher
m_publisher.addAttribute(m_publisherField);
}
}
/**
* Try to create a publisher with an unexisting field.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testPublisherWithUnexistingField()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the name attribute of the publisher and replace with an
// unexisting field name
m_publisher.removeAttribute(m_publisherField);
Attribute unexistingField = new Attribute("field", "m_unexistingField");
m_publisher.addAttribute(unexistingField);
// Create and try to start the factory
ComponentFactory fact = new ComponentFactory(getContext(), m_provider);
try {
fact.start();
// Should not be executed
fact.stop();
fail("The factory must not start when an unexisting field is specified.");
} catch (IllegalStateException e) {
// OK
} finally {
// Restore the original state of the publisher
m_publisher.removeAttribute(unexistingField);
m_publisher.addAttribute(m_publisherField);
}
}
/**
* Try to create a publisher with a bad typed field.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testPublisherWithBadTypedField()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the name attribute of the publisher and replace with an
// bad typed field name
m_publisher.removeAttribute(m_publisherField);
Attribute badTypedField = new Attribute("field", "m_name");
m_publisher.addAttribute(badTypedField);
// Create and try to start the factory
ComponentFactory fact = new ComponentFactory(getContext(), m_provider);
try {
fact.start();
// Should not be executed
fact.stop();
fail("The factory must not start when an bad typed field is specified.");
} catch (IllegalStateException e) {
// OK
} finally {
// Restore the original state of the publisher
m_publisher.removeAttribute(badTypedField);
m_publisher.addAttribute(m_publisherField);
}
}
/**
* Try to create a publisher instance without topics.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testPublisherWithoutTopics()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the topics attribute of the publisher
m_publisher.removeAttribute(m_publisherTopics);
ComponentFactory fact = new ComponentFactory(getContext(), m_provider);
fact.start();
// Try to create an instance without specified topics
Dictionary conf = new Hashtable();
conf.put("instance.name","provider without topics");
ComponentInstance instance;
try {
instance = fact.createComponentInstance(conf);
// Should not be executed
instance.dispose();
fail("The factory must not create instance without specified topics.");
} catch (ConfigurationException e) {
// OK
} finally {
fact.stop();
// Restore the original state of the publisher
m_publisher.addAttribute(m_publisherTopics);
}
}
/**
* Try to create a publisher with malformed topics.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testPublisherWithMalformedTopics()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the topics attribute of the publisher and replace with a
// malformed one
m_publisher.removeAttribute(m_publisherTopics);
Attribute malformedTopics = new Attribute("topics",
"| |\\| \\/ /-\\ |_ | |)");
m_publisher.addAttribute(malformedTopics);
// Create and try to start the factory
ComponentFactory fact = new ComponentFactory(getContext(), m_provider);
try {
fact.start();
// Should not be executed
fact.stop();
fail("The factory must not start when invalid topics are specified.");
} catch (IllegalStateException e) {
// OK
} finally {
// Restore the original state of the publisher
m_publisher.removeAttribute(malformedTopics);
m_publisher.addAttribute(m_publisherTopics);
}
}
/**
* Try to create a publisher with a pattern topic (ending with '*') instead of a fixed topic.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testPublisherWithPatternTopic()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the topics attribute of the publisher and replace with a
// malformed one
m_publisher.removeAttribute(m_publisherTopics);
Attribute malformedTopics = new Attribute("topics",
"a/pattern/topic/*");
m_publisher.addAttribute(malformedTopics);
// Create and try to start the factory
ComponentFactory fact = new ComponentFactory(getContext(), m_provider);
try {
fact.start();
// Should not be executed
fact.stop();
fail("The factory must not start when invalid topics are specified.");
} catch (IllegalStateException e) {
// OK
} finally {
// Restore the original state of the publisher
m_publisher.removeAttribute(malformedTopics);
m_publisher.addAttribute(m_publisherTopics);
}
}
/**
* Try to create a publisher with malformed instance topics.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testPublisherWithMalformedInstanceTopics()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the topics attribute of the publisher and replace with a
// malformed one
m_publisher.removeAttribute(m_publisherTopics);
ComponentFactory fact = new ComponentFactory(getContext(), m_provider);
fact.start();
// Try to create an instance with malformed specified topics
Dictionary conf = new Hashtable();
conf.put("instance.name","provider with malformed topics");
Dictionary topics = new Hashtable();
topics.put("donut-publisher", "| |\\| \\/ /-\\ |_ | |)");
conf.put("event.topics", topics);
ComponentInstance instance;
try {
instance = fact.createComponentInstance(conf);
// Should not be executed
instance.dispose();
fail("The factory must not create instance with invalid specified topics.");
} catch (ConfigurationException e) {
// OK
} finally {
fact.stop();
// Restore the original state of the publisher
m_publisher.addAttribute(m_publisherTopics);
}
}
/**
* Try to create a subscriber with no name.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testSubscriberWithoutName()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the name attribute of the publisher
m_subscriber.removeAttribute(m_subscriberName);
// Create and try to start the factory
ComponentFactory fact = new ComponentFactory(getContext(), m_consumer);
try {
fact.start();
// Should not be executed
fact.stop();
fail("The factory must not start when no name is specified.");
} catch (IllegalStateException e) {
// OK
} finally {
// Restore the original state of the publisher
m_subscriber.addAttribute(m_subscriberName);
}
}
/**
* Try to create a subscriber with no callback.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testSubscriberWithoutCallback()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the name attribute of the publisher
m_subscriber.removeAttribute(m_subscriberCallback);
// Create and try to start the factory
ComponentFactory fact = new ComponentFactory(getContext(), m_consumer);
try {
fact.start();
// Should not be executed
fact.stop();
fail("The factory must not start when no callback is specified.");
} catch (IllegalStateException e) {
// OK
} finally {
// Restore the original state of the publisher
m_subscriber.addAttribute(m_subscriberCallback);
}
}
/**
* Try to create a subscriber instance without topics.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testSubscriberWithoutTopics()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the topics attribute of the subscriber
m_subscriber.removeAttribute(m_subscriberTopics);
ComponentFactory fact = new ComponentFactory(getContext(), m_consumer);
fact.start();
// Try to create an instance without specified topics
Dictionary conf = new Hashtable();
conf.put("instance.name","consumer without topics");
conf.put("slow", "false");
ComponentInstance instance;
try {
instance = fact.createComponentInstance(conf);
// Should not be executed
instance.dispose();
fail("The factory must not create instance without specified topics.");
} catch (ConfigurationException e) {
// OK
} finally {
fact.stop();
// Restore the original state of the subscriber
m_subscriber.addAttribute(m_subscriberTopics);
}
}
/**
* Try to create a subscriber with malformed topics.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testSubscriberWithMalformedTopics()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the topics attribute of the subscriber and replace with a
// malformed one
m_subscriber.removeAttribute(m_subscriberTopics);
Attribute malformedTopics = new Attribute("topics",
"| |\\| \\/ /-\\ |_ | |)");
m_subscriber.addAttribute(malformedTopics);
// Create and try to start the factory
ComponentFactory fact = new ComponentFactory(getContext(), m_consumer);
try {
fact.start();
// Should not be executed
fact.stop();
fail("The factory must not start when invalid topics are specified.");
} catch (IllegalStateException e) {
// OK
} finally {
// Restore the original state of the subscriber
m_subscriber.removeAttribute(malformedTopics);
m_subscriber.addAttribute(m_subscriberTopics);
}
}
/**
* Try to create a subscriber with malformed instance topics.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testSubscriberWithMalformedInstanceTopics()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the topics attribute of the subscriber and replace with a
// malformed one
m_subscriber.removeAttribute(m_subscriberTopics);
ComponentFactory fact = new ComponentFactory(getContext(), m_consumer);
fact.start();
// Try to create an instance with malformed specified topics
Dictionary conf = new Hashtable();
conf.put("instance.name","consumer with malformed topics");
Dictionary topics = new Hashtable();
topics.put("donut-subscriber", "| |\\| \\/ /-\\ |_ | |)");
conf.put("event.topics", topics);
ComponentInstance instance;
try {
instance = fact.createComponentInstance(conf);
// Should not be executed
instance.dispose();
fail("The factory must not create instance with invalid specified topics.");
} catch (ConfigurationException e) {
// OK
} finally {
fact.stop();
// Restore the original state of the subscriber
m_subscriber.addAttribute(m_subscriberTopics);
}
}
/**
* Try to create a subscriber with unknown data type.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testSubscriberWithUnknownDataType()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the data-type attribute of the subscriber and replace with a
// malformed one
m_subscriber.removeAttribute(m_subscriberDataType);
Attribute unknownType = new Attribute("data-type", "org.unknown.Clazz");
m_subscriber.addAttribute(unknownType);
// Create and try to start the factory
ComponentFactory fact = new ComponentFactory(getContext(), m_consumer);
try {
fact.start();
// Should not be executed
fact.stop();
fail("The factory must not start when unknown data type is specified.");
} catch (IllegalStateException e) {
// OK
} finally {
// Restore the original state of the subscriber
m_subscriber.removeAttribute(unknownType);
m_subscriber.addAttribute(m_subscriberDataType);
}
}
/**
* Try to create a subscriber with a data type that does not match with the
* callback parameter type.
*
* @throws ConfigurationException
* something bad happened
* @throws MissingHandlerException
* something bad happened
* @throws UnacceptableConfiguration
* something bad happened
*/
public void testSubscriberWithUnappropriatedDataType()
throws ConfigurationException, MissingHandlerException,
UnacceptableConfiguration {
// Remove the data-type attribute of the subscriber and replace with a
// malformed one
m_subscriber.removeAttribute(m_subscriberDataType);
Attribute unknownType = new Attribute("data-type", "java.lang.String");
m_subscriber.addAttribute(unknownType);
// Create and try to start the factory
ComponentFactory fact = new ComponentFactory(getContext(), m_consumer);
try {
fact.start();
// Should not be executed
fact.stop();
fail("The factory must not start when unappropriated data type is specified.");
} catch (IllegalStateException e) {
// OK
} finally {
// Restore the original state of the subscriber
m_subscriber.removeAttribute(unknownType);
m_subscriber.addAttribute(m_subscriberDataType);
}
}
/**
* Finalization after test cases.
*
* Release all services references and destroy instances.
*/
public void tearDown() {
}
// DEBUG
public void dumpElement(String message, Element root) {
System.err.println(message + "\n" + dumpElement(0, root));
}
// DEBUG
private String dumpElement(int level, Element element) {
StringBuilder sb = new StringBuilder();
// Enter tag
for (int i = 0; i < level; i++) {
sb.append(" ");
}
sb.append('<');
sb.append(element.getName());
Attribute[] attributes = element.getAttributes();
for (int i = 0; i < attributes.length; i++) {
Attribute attribute = attributes[i];
sb.append(' ');
sb.append(attribute.getName());
sb.append('=');
sb.append(attribute.getValue());
}
sb.append(">\n");
// Children
Element[] elements = element.getElements();
for (int i = 0; i < elements.length; i++) {
sb.append(dumpElement(level + 1, elements[i]));
}
// Exit tag
for (int i = 0; i < level; i++) {
sb.append(" ");
}
sb.append("</" + element.getName() + ">\n");
return sb.toString();
}
/**
* Creates a subscriber listening on a pattern topic (ending with '*').
* @throws ConfigurationException something bad happened.
* @throws MissingHandlerException something bad happened.
* @throws UnacceptableConfiguration something bad happened.
*/
public void testSubscriberWithPatternTopic() throws UnacceptableConfiguration, MissingHandlerException, ConfigurationException {
Dictionary properties = new Hashtable();
Dictionary topics = new Hashtable();
// Create the donut consumer instance, listening on a pattern topic
properties.put("instance.name","subscriber with pattern topic");
topics.put("donut-subscriber", "a/pattern/topic/*rf");
properties.put("event.topics", topics);
ComponentInstance instance = null;
try {
instance = m_utils.getDonutConsumerFactory()
.createComponentInstance(properties);
// Should not been executed
instance.dispose();
fail("An invalid topic scope was accepted)");
} catch (ConfigurationException e) {
// Nothing to do
}
properties = new Hashtable();
topics = new Hashtable();
// Create the donut consumer instance, listening on a pattern topic
properties.put("instance.name","subscriber with pattern topic");
topics.put("donut-subscriber", "a/pattern/*topic/rf");
properties.put("event.topics", topics);
try {
instance = m_utils.getDonutConsumerFactory()
.createComponentInstance(properties);
instance.dispose();
fail("An invalid topic scope was accepted (2)");
} catch (ConfigurationException e) {
// Nothing to do
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.impl.builtin;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.pig.EvalFunc;
import org.apache.pig.ExecType;
import org.apache.pig.ExecTypeProvider;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.executionengine.Launcher;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.io.BufferedPositionedInputStream;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.impl.streaming.InputHandler;
import org.apache.pig.impl.streaming.OutputHandler;
import org.apache.pig.impl.streaming.PigStreamingUDF;
import org.apache.pig.impl.streaming.StreamingCommand;
import org.apache.pig.impl.streaming.StreamingUDFException;
import org.apache.pig.impl.streaming.StreamingUDFInputHandler;
import org.apache.pig.impl.streaming.StreamingUDFOutputHandler;
import org.apache.pig.impl.streaming.StreamingUDFOutputSchemaException;
import org.apache.pig.impl.streaming.StreamingUtil;
import org.apache.pig.impl.util.UDFContext;
import org.apache.pig.impl.util.Utils;
import org.apache.pig.parser.ParserException;
import org.apache.pig.scripting.ScriptingOutputCapturer;
import com.google.common.base.Charsets;
public class StreamingUDF extends EvalFunc<Object> {
private static final Log log = LogFactory.getLog(StreamingUDF.class);
private static final String PYTHON_CONTROLLER_JAR_PATH = "/python/streaming/controller.py"; //Relative to root of pig jar.
private static final String PYTHON_PIG_UTIL_PATH = "/python/streaming/pig_util.py"; //Relative to root of pig jar.
//Indexes for arguments being passed to external process
private static final int UDF_LANGUAGE = 0;
private static final int PATH_TO_CONTROLLER_FILE = 1;
private static final int UDF_FILE_NAME = 2; //Name of file where UDF function is defined
private static final int UDF_FILE_PATH = 3; //Path to directory containing file where UDF function is defined
private static final int UDF_NAME = 4; //Name of UDF function being called.
private static final int PATH_TO_FILE_CACHE = 5; //Directory where required files (like pig_util) are cached on cluster nodes.
private static final int STD_OUT_OUTPUT_PATH = 6; //File for output from when user writes to standard output.
private static final int STD_ERR_OUTPUT_PATH = 7; //File for output from when user writes to standard error.
private static final int CONTROLLER_LOG_FILE_PATH = 8; //Controller log file logs progress through the controller script not user code.
private static final int IS_ILLUSTRATE = 9; //Controller captures output differently in illustrate vs running.
private String language;
private String filePath;
private String funcName;
private Schema schema;
private ExecType execType;
private String isIllustrate;
private boolean initialized = false;
private ScriptingOutputCapturer soc;
private Process process; // Handle to the external process
private ProcessErrorThread stderrThread; // thread to get process stderr
private ProcessInputThread stdinThread; // thread to send input to process
private ProcessOutputThread stdoutThread; //thread to read output from process
private InputHandler inputHandler;
private OutputHandler outputHandler;
private BlockingQueue<Tuple> inputQueue;
private BlockingQueue<Object> outputQueue;
private DataOutputStream stdin; // stdin of the process
private InputStream stdout; // stdout of the process
private InputStream stderr; // stderr of the process
private static final Object ERROR_OUTPUT = new Object();
private static final Object NULL_OBJECT = new Object(); //BlockingQueue can't have null. Use place holder object instead.
private volatile StreamingUDFException outerrThreadsError;
public static final String TURN_ON_OUTPUT_CAPTURING = "TURN_ON_OUTPUT_CAPTURING";
public StreamingUDF(String language,
String filePath, String funcName,
String outputSchemaString, String schemaLineNumber,
String execType, String isIllustrate)
throws StreamingUDFOutputSchemaException, ExecException {
this.language = language;
this.filePath = filePath;
this.funcName = funcName;
try {
this.schema = Utils.getSchemaFromString(outputSchemaString);
//ExecTypeProvider.fromString doesn't seem to load the ExecTypes in
//mapreduce mode so we'll try to figure out the exec type ourselves.
if (execType.equals("local")) {
this.execType = ExecType.LOCAL;
} else if (execType.equals("mapreduce")) {
this.execType = ExecType.MAPREDUCE;
} else {
//Not sure what exec type - try to get it from the string.
this.execType = ExecTypeProvider.fromString(execType);
}
} catch (ParserException pe) {
throw new StreamingUDFOutputSchemaException(pe.getMessage(), Integer.valueOf(schemaLineNumber));
} catch (IOException ioe) {
String errorMessage = "Invalid exectype passed to StreamingUDF. Should be local or mapreduce";
log.error(errorMessage, ioe);
throw new ExecException(errorMessage, ioe);
}
this.isIllustrate = isIllustrate;
}
@Override
public Object exec(Tuple input) throws IOException {
if (!initialized) {
initialize();
initialized = true;
}
return getOutput(input);
}
private void initialize() throws ExecException, IOException {
inputQueue = new ArrayBlockingQueue<Tuple>(1);
outputQueue = new ArrayBlockingQueue<Object>(2);
soc = new ScriptingOutputCapturer(execType);
startUdfController();
createInputHandlers();
setStreams();
startThreads();
}
private StreamingCommand startUdfController() throws IOException {
StreamingCommand sc = new StreamingCommand(null, constructCommand());
ProcessBuilder processBuilder = StreamingUtil.createProcess(sc);
process = processBuilder.start();
Runtime.getRuntime().addShutdownHook(new Thread(new ProcessKiller() ) );
return sc;
}
private String[] constructCommand() throws IOException {
String[] command = new String[10];
Configuration conf = UDFContext.getUDFContext().getJobConf();
String jarPath = conf.get("mapred.jar");
String jobDir;
if (jarPath != null) {
jobDir = new File(jarPath).getParent();
} else {
jobDir = "";
}
String standardOutputRootWriteLocation = soc.getStandardOutputRootWriteLocation();
String controllerLogFileName, outFileName, errOutFileName;
if (execType.isLocal()) {
controllerLogFileName = standardOutputRootWriteLocation + funcName + "_python.log";
outFileName = standardOutputRootWriteLocation + "cpython_" + funcName + "_" + ScriptingOutputCapturer.getRunId() + ".out";
errOutFileName = standardOutputRootWriteLocation + "cpython_" + funcName + "_" + ScriptingOutputCapturer.getRunId() + ".err";
} else {
controllerLogFileName = standardOutputRootWriteLocation + funcName + "_python.log";
outFileName = standardOutputRootWriteLocation + funcName + ".out";
errOutFileName = standardOutputRootWriteLocation + funcName + ".err";
}
soc.registerOutputLocation(funcName, outFileName);
command[UDF_LANGUAGE] = language;
command[PATH_TO_CONTROLLER_FILE] = getControllerPath(jobDir);
int lastSeparator = filePath.lastIndexOf(File.separator) + 1;
command[UDF_FILE_NAME] = filePath.substring(lastSeparator);
command[UDF_FILE_PATH] = lastSeparator <= 0 ?
"." :
filePath.substring(0, lastSeparator - 1);
command[UDF_NAME] = funcName;
command[PATH_TO_FILE_CACHE] = "\"" + jobDir + filePath.substring(0, lastSeparator) + "\"";
command[STD_OUT_OUTPUT_PATH] = outFileName;
command[STD_ERR_OUTPUT_PATH] = errOutFileName;
command[CONTROLLER_LOG_FILE_PATH] = controllerLogFileName;
command[IS_ILLUSTRATE] = isIllustrate;
return command;
}
private void createInputHandlers() throws ExecException, FrontendException {
PigStreamingUDF serializer = new PigStreamingUDF();
this.inputHandler = new StreamingUDFInputHandler(serializer);
PigStreamingUDF deserializer = new PigStreamingUDF(schema.getField(0));
this.outputHandler = new StreamingUDFOutputHandler(deserializer);
}
private void setStreams() throws IOException {
stdout = new DataInputStream(new BufferedInputStream(process
.getInputStream()));
outputHandler.bindTo("", new BufferedPositionedInputStream(stdout),
0, Long.MAX_VALUE);
stdin = new DataOutputStream(new BufferedOutputStream(process
.getOutputStream()));
inputHandler.bindTo(stdin);
stderr = new DataInputStream(new BufferedInputStream(process
.getErrorStream()));
}
private void startThreads() {
stdinThread = new ProcessInputThread();
stdinThread.start();
stdoutThread = new ProcessOutputThread();
stdoutThread.start();
stderrThread = new ProcessErrorThread();
stderrThread.start();
}
/**
* Find the path to the controller file for the streaming language.
*
* First check path to job jar and if the file is not found (like in the
* case of running hadoop in standalone mode) write the necessary files
* to temporary files and return that path.
*
* @param language
* @param jarPath
* @return
* @throws IOException
*/
private String getControllerPath(String jarPath) throws IOException {
if (language.toLowerCase().equals("python")) {
String controllerPath = jarPath + PYTHON_CONTROLLER_JAR_PATH;
File controller = new File(controllerPath);
if (!controller.exists()) {
File controllerFile = File.createTempFile("controller", ".py");
InputStream pythonControllerStream = Launcher.class.getResourceAsStream(PYTHON_CONTROLLER_JAR_PATH);
try {
FileUtils.copyInputStreamToFile(pythonControllerStream, controllerFile);
} finally {
pythonControllerStream.close();
}
controllerFile.deleteOnExit();
File pigUtilFile = new File(controllerFile.getParent() + "/pig_util.py");
pigUtilFile.deleteOnExit();
InputStream pythonUtilStream = Launcher.class.getResourceAsStream(PYTHON_PIG_UTIL_PATH);
try {
FileUtils.copyInputStreamToFile(pythonUtilStream, pigUtilFile);
} finally {
pythonUtilStream.close();
}
controllerPath = controllerFile.getAbsolutePath();
}
return controllerPath;
} else {
throw new ExecException("Invalid language: " + language);
}
}
/**
* Returns a list of file names (relative to root of pig jar) of files that need to be
* included in the jar shipped to the cluster.
*
* Will need to be smarter as more languages are added and the controller files are large.
*
* @return
*/
public static List<String> getResourcesForJar() {
List<String> files = new ArrayList<String>();
files.add(PYTHON_CONTROLLER_JAR_PATH);
files.add(PYTHON_PIG_UTIL_PATH);
return files;
}
private Object getOutput(Tuple input) throws ExecException {
if (outputQueue == null) {
throw new ExecException("Process has already been shut down. No way to retrieve output for input: " + input);
}
if (ScriptingOutputCapturer.isClassCapturingOutput() &&
!soc.isInstanceCapturingOutput()) {
Tuple t = TupleFactory.getInstance().newTuple(TURN_ON_OUTPUT_CAPTURING);
try {
inputQueue.put(t);
} catch (InterruptedException e) {
throw new ExecException("Failed adding capture input flag to inputQueue");
}
soc.setInstanceCapturingOutput(true);
}
try {
if (this.getInputSchema() == null || this.getInputSchema().size() == 0) {
//When nothing is passed into the UDF the tuple
//being sent is the full tuple for the relation.
//We want it to be nothing (since that's what the user wrote).
input = TupleFactory.getInstance().newTuple(0);
}
inputQueue.put(input);
} catch (Exception e) {
throw new ExecException("Failed adding input to inputQueue", e);
}
Object o = null;
try {
if (outputQueue != null) {
o = outputQueue.take();
if (o == NULL_OBJECT) {
o = null;
}
}
} catch (Exception e) {
throw new ExecException("Problem getting output", e);
}
if (o == ERROR_OUTPUT) {
outputQueue = null;
if (outerrThreadsError == null) {
outerrThreadsError = new StreamingUDFException(this.language, "Problem with streaming udf. Can't recreate exception.");
}
throw outerrThreadsError;
}
return o;
}
@Override
public Schema outputSchema(Schema input) {
return this.schema;
}
/**
* The thread which consumes input and feeds it to the the Process
*/
class ProcessInputThread extends Thread {
ProcessInputThread() {
setDaemon(true);
}
public void run() {
try {
log.debug("Starting PIT");
while (true) {
Tuple inputTuple = inputQueue.take();
inputHandler.putNext(inputTuple);
try {
stdin.flush();
} catch(Exception e) {
return;
}
}
} catch (Exception e) {
log.error(e);
}
}
}
private static final int WAIT_FOR_ERROR_LENGTH = 500;
private static final int MAX_WAIT_FOR_ERROR_ATTEMPTS = 5;
/**
* The thread which consumes output from process
*/
class ProcessOutputThread extends Thread {
ProcessOutputThread() {
setDaemon(true);
}
public void run() {
Object o = null;
try{
log.debug("Starting POT");
//StreamUDFToPig wraps object in single element tuple
o = outputHandler.getNext().get(0);
while (o != OutputHandler.END_OF_OUTPUT) {
if (o != null)
outputQueue.put(o);
else
outputQueue.put(NULL_OBJECT);
o = outputHandler.getNext().get(0);
}
} catch(Exception e) {
if (outputQueue != null) {
try {
//Give error thread a chance to check the standard error output
//for an exception message.
int attempt = 0;
while (stderrThread.isAlive() && attempt < MAX_WAIT_FOR_ERROR_ATTEMPTS) {
Thread.sleep(WAIT_FOR_ERROR_LENGTH);
attempt++;
}
//Only write this if no other error. Don't want to overwrite
//an error from the error thread.
if (outerrThreadsError == null) {
outerrThreadsError = new StreamingUDFException(language, "Error deserializing output. Please check that the declared outputSchema for function " +
funcName + " matches the data type being returned.", e);
}
outputQueue.put(ERROR_OUTPUT); //Need to wake main thread.
} catch(InterruptedException ie) {
log.error(ie);
}
}
}
}
}
class ProcessErrorThread extends Thread {
public ProcessErrorThread() {
setDaemon(true);
}
public void run() {
try {
log.debug("Starting PET");
Integer lineNumber = null;
StringBuffer error = new StringBuffer();
String errInput;
BufferedReader reader = new BufferedReader(
new InputStreamReader(stderr, Charsets.UTF_8));
while ((errInput = reader.readLine()) != null) {
//First line of error stream is usually the line number of error.
//If its not a number just treat it as first line of error message.
if (lineNumber == null) {
try {
lineNumber = Integer.valueOf(errInput);
} catch (NumberFormatException nfe) {
error.append(errInput + "\n");
}
} else {
error.append(errInput + "\n");
}
}
outerrThreadsError = new StreamingUDFException(language, error.toString(), lineNumber);
if (outputQueue != null) {
outputQueue.put(ERROR_OUTPUT); //Need to wake main thread.
}
if (stderr != null) {
stderr.close();
stderr = null;
}
} catch (IOException e) {
log.debug("Process Ended");
} catch (Exception e) {
log.error("standard error problem", e);
}
}
}
public class ProcessKiller implements Runnable {
public void run() {
process.destroy();
}
}
}
| |
/*
* Copyright (C) 2012, 2013 the diamond:dogs|group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.diamonddogs.util;
import java.io.File;
import java.io.FileNotFoundException;
import java.lang.reflect.Array;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ExecutionException;
import org.apache.commons.codec.binary.Hex;
import android.app.ActivityManager;
import android.app.ActivityManager.RunningAppProcessInfo;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Process;
import android.util.Base64;
import at.diamonddogs.data.adapter.database.DatabaseAdapter;
/**
* Collection of general util methods
*/
public class Utils {
private static final String TAG = Utils.class.getSimpleName();
/**
* Returns an array
*
* @param <T> generic type of the items to place into an array
* @param clazz the class of generic type <T>
* @param values an arbitrary number of values
* @return an array containing all values passed to this method
* @deprecated do not use this method, the implementation doesn't make a lot
* of sense and furthermore, the whole method is somewhat
* pointless.
*/
@Deprecated
public static <T> T[] asArray(Class<T> clazz, T... values) {
@SuppressWarnings("unchecked")
T[] array = (T[]) Array.newInstance(clazz, values.length);
for (int i = 0; i < values.length; i++) {
array[i] = values[i];
}
return array;
}
/**
* Checks if a {@link Collection} is not <code>null</code> and not empty
*
* @param collection the collection to check
* @return <code>true</code> if the collection wasn't null and is not empty,
* <code>false</code> otherwise
*/
public static boolean isEmpty(Collection<?> collection) {
return (collection == null || collection.size() == 0);
}
/**
* Checks if an is not <code>null</code> and not empty
*
* @param <T> generic type of the array
* @param array the array to check
* @return <code>true</code> if the array wasn't null and is not empty,
* <code>false</code> otherwise
*/
public static <T> boolean isEmptyArray(T[] array) {
return array == null || array.length == 0;
}
/**
* Creates a {@link List} from a {@link Cursor}
*
* @param <T> the generic type of the {@link List}
* @param cursor the {@link Cursor} to be converted to a {@link List}
* @param databaseAdapter the {@link DatabaseAdapter} that will be used for conversion
* @return a {@link List} containing objects created from the input
* {@link Cursor}
*/
public static <T> List<T> convertCursorToList(Cursor cursor, DatabaseAdapter<T> databaseAdapter) {
List<T> list = new ArrayList<>();
if (!checkCursor(cursor)) {
return null;
}
cursor.moveToFirst();
do {
list.add(databaseAdapter.deserialize(cursor));
} while (cursor.moveToNext());
return list;
}
/**
* Creates a {@link String} {@link List} from a {@link Cursor}
*
* @param cursor the input {@link Cursor}
* @param name the name of the colum
* @return a {@link String} {@link List}
*/
public static List<String> convertColumnToList(Cursor cursor, String name) {
List<String> list = new ArrayList<>();
if (!checkCursor(cursor)) {
return null;
}
cursor.moveToFirst();
do {
list.add(cursor.getString(cursor.getColumnIndex(name)));
} while (cursor.moveToNext());
return list;
}
/**
* Get available cache directory
*
* @param context a {@link Context}
* @return a {@link File} pointing to a the external or internal cache
* directory
*/
public static File getCacheDir(Context context) {
File path = context.getExternalCacheDir();
if (path == null) {
path = context.getCacheDir();
}
return path;
}
/**
* Brings up the MAIN/LAUNCHER activity and clears the top
*
* @param context a {@link Context}
*/
public static void returnToHome(Context context) {
PackageManager pm = context.getPackageManager();
Intent intent = new Intent(Intent.ACTION_MAIN);
intent.addCategory(Intent.CATEGORY_LAUNCHER);
intent.setPackage(context.getPackageName());
List<ResolveInfo> activities = pm.queryIntentActivities(intent, 0);
Intent homeIntent = new Intent("android.intent.action.MAIN");
homeIntent.addCategory("android.intent.category.LAUNCHER");
homeIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
homeIntent.setComponent(new ComponentName(context.getPackageName(), activities.get(0).activityInfo.name));
context.startActivity(homeIntent);
}
/**
* Checks if a timestamp (ms) is in the current month
*
* @param when a ms timestamp
* @return <code>true</code> if it is <code>false</code> otherwise
*/
public static boolean isSameMonth(long when) {
Calendar tmp = Calendar.getInstance();
tmp.setTimeInMillis(when);
// cal.get(FIELD) does not work :( fields are not recomputed
int thenYear = tmp.get(Calendar.YEAR);
int thenMonth = tmp.get(Calendar.MONTH);
tmp.setTimeInMillis(System.currentTimeMillis());
int nowYear = tmp.get(Calendar.YEAR);
int nowMonth = tmp.get(Calendar.MONTH);
Log.d(TAG, "comparing: " + nowMonth + "." + nowYear + " / " + thenMonth + "." + thenYear);
return (thenYear == nowYear) && (thenMonth == nowMonth);
}
/**
* Computes a MD5 hash from an input string
*
* @param input the input string
* @return the MD5 hash or <code>null</code> if an error occured
*/
public static String getMD5Hash(String input) {
MessageDigest md = null;
try {
md = MessageDigest.getInstance("MD5");
return new String(Hex.encodeHex(md.digest(input.getBytes())));
} catch (NoSuchAlgorithmException e) {
Log.e(TAG, e.getMessage(), e);
}
return null;
}
/**
* Creates and fills an {@link Integer} array with integer values
*
* @param to size of array / max number to be added to array
* @return an {@link Integer} array
*/
public static Integer[] getDigitArray(int to) {
to++;
Integer[] ret = new Integer[to];
for (int i = 0; i < to; i++) {
ret[i] = i;
}
return ret;
}
/**
* Computes a MD5 hash from an byte array
*
* @param data the input data
* @return the MD5 hash or <code>null</code> if an error occured
*/
public static String getMD5Hash(byte[] data) {
try {
MessageDigest md = MessageDigest.getInstance("MD5");
return new String(Hex.encodeHex(md.digest(data)));
} catch (Throwable tr) {
Log.w(TAG, "Could not md5 data");
return null;
}
}
/**
* Checks a cursor for validity
*
* @param c the {@link Cursor} to check
* @return <code>true</code> if the cursor is not <code>null</code>, not
* closed and not empty, <code>false</code> otherwise
*/
public static boolean checkCursor(Cursor c) {
if (c == null || c.isClosed()) {
return false;
}
if (c.getCount() <= 0) {
c.close();
return false;
}
return true;
}
/**
* String to base64
*
* @param inMsg the message to be converted to base64
* @return the base64 string
*/
public static String encrypt(String inMsg) {
return encrypt(inMsg.getBytes());
}
/**
* Byte array to base64
*
* @param inMsg the message to be converted to base64
* @return the base64 string
*/
public static String encrypt(byte[] inMsg) {
return Base64.encodeToString(inMsg, Base64.DEFAULT);
}
/**
* Base64 string to string
*
* @param inMsg the message to be converted from base64
* @return the string
*/
public static String decrypt(String inMsg) {
return decrypt(inMsg.getBytes());
}
/**
* Base64 byte array to string
*
* @param encMsg the message to be converted from base64
* @return the string
*/
public static String decrypt(byte[] encMsg) {
return new String(Base64.decode(encMsg, Base64.DEFAULT));
}
/**
* Checks if the current process is a foreground process (visible by the
* user)
*
* @param context a {@link Context}
* @return <code>true</code> if the process is visible, <code>false</code>
* otherwise
*/
public static boolean isInForground(Context context) {
ActivityManager activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
List<RunningAppProcessInfo> appProcesses = activityManager.getRunningAppProcesses();
for (RunningAppProcessInfo appProcess : appProcesses) {
if (appProcess.importance == RunningAppProcessInfo.IMPORTANCE_VISIBLE && appProcess.pid == Process.myPid()) {
Log.i(TAG, "visible");
return true;
}
}
Log.i(TAG, "Running in background");
return false;
}
/**
* Converts input values to a {@link Calendar}
*
* @param dayOfWeek
* @param hourOfDay
* @param minute
* @param second
* @return a {@link Calendar}r with the provided date
*/
public static final Calendar getScheduledDate(int dayOfWeek, int hourOfDay, int minute, int second) {
Calendar c = Calendar.getInstance();
int weekDay = c.get(Calendar.DAY_OF_WEEK);
int days = dayOfWeek - weekDay;
if (days < 0) {
days += 7;
}
c.add(Calendar.DAY_OF_YEAR, days);
c.set(Calendar.HOUR_OF_DAY, hourOfDay);
c.set(Calendar.MINUTE, minute);
c.set(Calendar.SECOND, second);
return c;
}
/**
* Checks if the current process is a foreground process and kills it if it
* is not
*
* @param c a {@link Context}
*/
public static final void commitCarefulSuicide(Context c) {
try {
if (!new ForegroundCheckTask().execute(c).get()) {
android.os.Process.killProcess(android.os.Process.myPid());
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
/**
* Same as {@link Utils#commitCarefulSuicide(Context)} but threaded (non
* blocking)
*
* @param context
*/
public static final void commitCarefulSuicideThreaded(final Context context) {
new Thread(new Runnable() {
@Override
public void run() {
ActivityManager activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
List<RunningAppProcessInfo> appProcesses = activityManager.getRunningAppProcesses();
if (appProcesses == null) {
return;
}
Log.d(TAG, "app counter: " + appProcesses.size());
final String packageName = context.getPackageName();
for (RunningAppProcessInfo appProcess : appProcesses) {
Log.d(TAG, "process: " + appProcess.processName);
if (appProcess.importance == RunningAppProcessInfo.IMPORTANCE_FOREGROUND && appProcess.processName.equals(packageName)) {
Log.d(TAG, "isINF: " + true);
return;
}
}
Log.d(TAG, "isINF: " + false);
commitSuicide();
}
}).start();
}
/**
* Kills the process without asking questions
*/
public static final void commitSuicide() {
android.os.Process.killProcess(android.os.Process.myPid());
}
/**
* Creates a bitmap from an input uri
*
* @param c a {@link Context}
* @param uri an image {@link Uri}
* @param inSampleSize the sample size to be used when creating the bitmap
* @return a {@link Bitmap}
* @throws FileNotFoundException if the image file could not be found
*/
public static final Bitmap getBitmapFromUri(Context c, Uri uri, int inSampleSize) throws FileNotFoundException {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = inSampleSize;
return BitmapFactory.decodeStream(c.getContentResolver().openInputStream(uri), null, options);
}
/**
* Creates a bitmap from an input uri
*
* @param c a {@link Context}
* @param uri an image {@link Uri}
* @return a {@link Bitmap}
* @throws FileNotFoundException if the image file could not be found
*/
public static final Bitmap getBitmapFromUri(Context c, String uri) throws FileNotFoundException {
return BitmapFactory.decodeStream(c.getContentResolver().openInputStream(Uri.parse(uri)));
}
private static class ForegroundCheckTask extends AsyncTask<Context, Void, Boolean> {
private static final String TAG = ForegroundCheckTask.class.getSimpleName();
@Override
protected Boolean doInBackground(Context... params) {
final Context context = params[0].getApplicationContext();
boolean isInForeground = isAppOnForeground(context);
Log.d(TAG, "isINF: " + isInForeground);
return isInForeground;
}
private boolean isAppOnForeground(Context context) {
ActivityManager activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
List<RunningAppProcessInfo> appProcesses = activityManager.getRunningAppProcesses();
if (appProcesses == null) {
return false;
}
final String packageName = context.getPackageName();
for (RunningAppProcessInfo appProcess : appProcesses) {
Log.d(TAG, "process: " + appProcess.processName);
if (appProcess.importance == RunningAppProcessInfo.IMPORTANCE_FOREGROUND && appProcess.processName.equals(packageName)) {
return true;
}
}
return false;
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.grinder.scriptengine.groovy.junit;
import net.grinder.engine.process.JUnitThreadContextInitializer;
import net.grinder.engine.process.JUnitThreadContextUpdater;
import net.grinder.scriptengine.exception.AbstractExceptionProcessor;
import net.grinder.scriptengine.groovy.GroovyExceptionProcessor;
import net.grinder.scriptengine.groovy.junit.annotation.*;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.internal.AssumptionViolatedException;
import org.junit.internal.runners.model.EachTestNotifier;
import org.junit.internal.runners.model.MultipleFailureException;
import org.junit.internal.runners.statements.RunAfters;
import org.junit.internal.runners.statements.RunBefores;
import org.junit.rules.MethodRule;
import org.junit.runner.Description;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runner.notification.Failure;
import org.junit.runner.notification.RunListener;
import org.junit.runner.notification.RunNotifier;
import org.junit.runner.notification.StoppedByUserException;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.Statement;
import org.junit.runners.model.TestClass;
import java.lang.annotation.Annotation;
import java.util.*;
/**
* Grinder JUnit Runner. Grinder JUnit Runner is the custom {@link Runner} which lets the user can
* run the Grinder script in the JUnit context.
*
* This runner has a little bit different characteristic from conventional JUnit test.
* <ul>
* <li>All Test annotated tests are executed with a single instance.</li>
* <li>{@link BeforeProcess} and {@link AfterProcess} annotated methods are executed per each
* process.</li>
* <li>{@link BeforeThread} and {@link AfterThread} annotated methods are executed per each thread.</li>
* <li>{@link Repeat} annotated
* </ul>
*
* In addition, it contains a little different behavior from generic grinder test script.
* <ul>
* <li>It only initiates only 1 process and 1 thread.</li>
* <li>Each <code>@test</code> annotated method are independent to run. So one failure from one
* method doesn't block the other methods' runs</li>
* </ul>
*
* @author JunHo Yoon
* @author Mavlarn
* @see BeforeProcess
* @see BeforeThread
* @see AfterThread
* @see AfterProcess
* @see Repeat
* @since 1.0
*/
public class GrinderRunner extends BlockJUnit4ClassRunner {
private final TestObjectFactory testTargetFactory;
private final AbstractExceptionProcessor exceptionProcessor = new GroovyExceptionProcessor();
private final Map<FrameworkMethod, Statement> frameworkMethodCache = new HashMap<>();
private JUnitThreadContextInitializer threadContextInitializer;
private JUnitThreadContextUpdater threadContextUpdater;
private PerThreadStatement finalPerThreadStatement;
private boolean enableRateRunner = true;
/**
* Constructor.
*
* @param klass klass
* @throws InitializationError class initialization error.
*/
public GrinderRunner(Class<?> klass) throws InitializationError {
super(klass);
this.testTargetFactory = new TestObjectFactory() {
@Override
public TestClass getTestClass() {
return GrinderRunner.this.getTestClass();
}
@Override
public Object createTest() throws Exception {
return GrinderRunner.this.createTest();
}
};
initializeGrinderContext();
}
/**
* Constructor.
*
* @param klass klass
* @param runner runner class
* @throws InitializationError class initialization error.
*/
public GrinderRunner(Class<?> klass, final Object runner) throws InitializationError {
super(klass);
this.testTargetFactory = new TestObjectFactory() {
@Override
public TestClass getTestClass() {
return GrinderRunner.this.getTestClass();
}
@Override
public Object createTest() {
return runner;
}
};
initializeGrinderContext();
}
protected void initializeGrinderContext() {
this.threadContextInitializer = new JUnitThreadContextInitializer();
this.threadContextInitializer.initialize();
this.threadContextUpdater = threadContextInitializer.getThreadContextUpdater();
this.finalPerThreadStatement = new PerThreadStatement() {
@Override
void before() {
attachWorker();
}
@Override
void after() {
detachWorker();
}
};
}
@Override
protected List<FrameworkMethod> getChildren() {
return super.getChildren();
}
@Override
public void run(RunNotifier notifier) {
registerRunNotifierListener(notifier);
Description description = getDescription();
enableRateRunner = isRateRunnerEnabled();
EachTestNotifier testNotifier = new EachTestNotifier(notifier, description);
try {
Statement statement = classBlock(notifier);
statement.evaluate();
} catch (AssumptionViolatedException e) {
testNotifier.fireTestIgnored();
} catch (StoppedByUserException e) {
throw e;
} catch (Throwable e) {
testNotifier.addFailure(e);
}
}
/**
* Check if the rate runner should be enabled.
*
* @return true if enabled;
*/
protected boolean isRateRunnerEnabled() {
Description description = getDescription();
return description.testCount() > 1 && isRepeatRunnerEnabled();
}
private boolean isRepeatRunnerEnabled() {
Annotation[] annotations = getTestClass().getAnnotations();
boolean repeatAnnotation = false;
for (Annotation each : annotations) {
if (each.annotationType().equals(Repeat.class)) {
repeatAnnotation = true;
}
}
return repeatAnnotation;
}
@Override
protected Statement classBlock(RunNotifier notifier) {
Statement statement = childrenInvoker(notifier);
statement = withRepeat(statement);
statement = withBeforeThread(statement);
statement = withBeforeProcess(statement);
statement = withAfterThread(statement);
statement = withAfterProcess(statement);
return statement;
}
protected Statement withRepeat(Statement statement) {
Annotation[] annotations = getTestClass().getAnnotations();
int repetition = 1;
for (Annotation each : annotations) {
if (each.annotationType().equals(Repeat.class)) {
repetition = ((Repeat) each).value();
}
}
return new RepetitionStatement(statement, repetition, threadContextUpdater);
}
@SuppressWarnings("deprecation")
protected Statement methodBlock(FrameworkMethod method) {
Statement statement = frameworkMethodCache.get(method);
if (statement != null) {
return statement;
}
Object testObject = testTargetFactory.getTestObject();
statement = methodInvoker(method, testObject);
statement = possiblyExpectingExceptions(method, testObject, statement);
statement = withPotentialTimeout(method, testObject, statement);
statement = withBefores(method, testObject, statement);
statement = withAfters(method, testObject, statement);
statement = withRules(method, testObject, statement);
if (enableRateRunner) {
statement = withRunRate(method, testObject, statement);
}
frameworkMethodCache.put(method, statement);
return statement;
}
protected Statement withRunRate(FrameworkMethod method, @SuppressWarnings("UnusedParameters") Object target, Statement statement) {
RunRate runRate = method.getAnnotation(RunRate.class);
return runRate == null ? statement : new RunRateStatement(statement, runRate.value());
}
private Statement withRules(FrameworkMethod method, Object target, Statement statement) {
Statement result = statement;
for (MethodRule each : getTestClass().getAnnotatedFieldValues(target, Rule.class, MethodRule.class)) {
result = each.apply(result, method, target);
}
return result;
}
/**
* Returns a {@link Statement}: run all non-overridden {@code @BeforeClass} methods on this
* class and superclasses before executing {@code statement}; if any throws an Exception, stop
* execution and pass the exception on.
*
* @param statement statement
* @return wrapped statement
*/
protected Statement withBeforeProcess(Statement statement) {
TestClass testClass = getTestClass();
List<FrameworkMethod> befores = new ArrayList<>(testClass.getAnnotatedMethods(BeforeProcess.class));
befores.addAll(testClass.getAnnotatedMethods(BeforeClass.class));
return befores.isEmpty() ? statement : new RunBefores(statement, befores, null);
}
/**
* Returns a {@link Statement}: run all non-overridden {@code @AfterClass} methods on this class
* and superclasses before executing {@code statement}; all AfterClass methods are always
* executed: exceptions thrown by previous steps are combined, if necessary, with exceptions
* from AfterClass methods into a {@link MultipleFailureException}.
*
* @param statement statement
* @return wrapped statement
*/
protected Statement withAfterProcess(Statement statement) {
TestClass testClass = getTestClass();
List<FrameworkMethod> afters = new ArrayList<>(testClass.getAnnotatedMethods(AfterProcess.class));
afters.addAll(testClass.getAnnotatedMethods(AfterClass.class));
return afters.isEmpty() ? statement : new RunAfters(statement, afters, null);
}
protected Statement withAfterThread(Statement statement) {
List<FrameworkMethod> afterThreads = getTestClass().getAnnotatedMethods(AfterThread.class);
return new RunAfterThreads(statement, afterThreads, testTargetFactory, finalPerThreadStatement);
}
protected Statement withBeforeThread(Statement statement) {
List<FrameworkMethod> beforeThreads = getTestClass().getAnnotatedMethods(BeforeThread.class);
return new RunBeforeThreads(statement, beforeThreads, testTargetFactory, finalPerThreadStatement);
}
protected void registerRunNotifierListener(RunNotifier notifier) {
notifier.addFirstListener(new RunListener() {
@Override
public void testStarted(Description description) {
}
@Override
public void testRunStarted(Description description) {
attachWorker();
}
@Override
public void testRunFinished(Result result) {
detachWorker();
}
@Override
public void testFailure(Failure failure) {
Throwable exception = failure.getException();
Throwable filtered = exceptionProcessor.filterException(exception);
if (exception != filtered) {
exception.initCause(filtered);
}
}
});
}
void attachWorker() {
this.threadContextInitializer.attachWorkerThreadContext();
}
void detachWorker() {
this.threadContextInitializer.detachWorkerThreadContext();
}
}
| |
package org.mifos.framework.hibernate.helper;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
import java.util.Map;
import java.util.Properties;
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="SQL", justification="This is a wrapper class so the bugs found are wrong")
@SuppressWarnings("PMD")
public class TestDbConnection implements Connection {
private Connection connection;
public TestDbConnection(Connection connection) {
this.connection = connection;
}
@Override
public Statement createStatement() throws SQLException {
return connection.createStatement();
}
@Override
public PreparedStatement prepareStatement(final String s) throws SQLException {
return connection.prepareStatement(s);
}
@Override
public CallableStatement prepareCall(String s) throws SQLException {
return connection.prepareCall(s);
}
@Override
public String nativeSQL(String s) throws SQLException {
return connection.nativeSQL(s);
}
@Override
public void setAutoCommit(boolean b) throws SQLException {
connection.setAutoCommit(b);
}
@Override
public boolean getAutoCommit() throws SQLException {
return connection.getAutoCommit();
}
@Override
public void commit() throws SQLException {
}
@Override
public void rollback() throws SQLException {
connection.rollback();
}
@Override
public void close() throws SQLException {
connection.close();
}
@Override
public boolean isClosed() throws SQLException {
return connection.isClosed();
}
@Override
public DatabaseMetaData getMetaData() throws SQLException {
return connection.getMetaData();
}
@Override
public void setReadOnly(boolean b) throws SQLException {
connection.setReadOnly(b);
}
@Override
public boolean isReadOnly() throws SQLException {
return connection.isReadOnly();
}
@Override
public void setCatalog(String s) throws SQLException {
connection.setCatalog(s);
}
@Override
public String getCatalog() throws SQLException {
return connection.getCatalog();
}
@Override
public void setTransactionIsolation(int i) throws SQLException {
connection.setTransactionIsolation(i);
}
@Override
public int getTransactionIsolation() throws SQLException {
return connection.getTransactionIsolation();
}
@Override
public SQLWarning getWarnings() throws SQLException {
return connection.getWarnings();
}
@Override
public void clearWarnings() throws SQLException {
connection.clearWarnings();
}
@Override
public Statement createStatement(int i, int i1) throws SQLException {
return connection.createStatement(i, i1);
}
@Override
public PreparedStatement prepareStatement(final String s, int i, int i1) throws SQLException {
return connection.prepareStatement(s, i, i1);
}
@Override
public CallableStatement prepareCall(String s, int i, int i1) throws SQLException {
return connection.prepareCall(s, i, i1);
}
@Override
public Map<String, Class<?>> getTypeMap() throws SQLException {
return connection.getTypeMap();
}
@Override
public void setTypeMap(Map<String, Class<?>> stringClassMap) throws SQLException {
connection.setTypeMap(stringClassMap);
}
@Override
public void setHoldability(int i) throws SQLException {
connection.setHoldability(i);
}
@Override
public int getHoldability() throws SQLException {
return connection.getHoldability();
}
@Override
public Savepoint setSavepoint() throws SQLException {
return connection.setSavepoint();
}
@Override
public Savepoint setSavepoint(String s) throws SQLException {
return connection.setSavepoint(s);
}
@Override
public void rollback(Savepoint savepoint) throws SQLException {
connection.rollback(savepoint);
}
@Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
connection.releaseSavepoint(savepoint);
}
@Override
public Statement createStatement(int i, int i1, int i2) throws SQLException {
return connection.createStatement(i, i1, i2);
}
@Override
public PreparedStatement prepareStatement(final String s, int i, int i1, int i2) throws SQLException {
return connection.prepareStatement(s, i, i1, i2);
}
@Override
public CallableStatement prepareCall(String s, int i, int i1, int i2) throws SQLException {
return connection.prepareCall(s, i, i1, i2);
}
@Override
public PreparedStatement prepareStatement(final String s, int i) throws SQLException {
return connection.prepareStatement(s, i);
}
@Override
public PreparedStatement prepareStatement(final String s, int[] ints) throws SQLException {
return connection.prepareStatement(s, ints);
}
@Override
public PreparedStatement prepareStatement(final String s, String[] strings) throws SQLException {
return connection.prepareStatement(s, strings);
}
@Override
public Clob createClob() throws SQLException {
return connection.createClob();
}
@Override
public Blob createBlob() throws SQLException {
return connection.createBlob();
}
@Override
public NClob createNClob() throws SQLException {
return connection.createNClob();
}
@Override
public SQLXML createSQLXML() throws SQLException {
return connection.createSQLXML();
}
@Override
public boolean isValid(int i) throws SQLException {
return connection.isValid(i);
}
@Override
public void setClientInfo(String s, String s1) throws SQLClientInfoException {
connection.setClientInfo(s, s1);
}
@Override
public void setClientInfo(Properties properties) throws SQLClientInfoException {
connection.setClientInfo(properties);
}
@Override
public String getClientInfo(String s) throws SQLException {
return connection.getClientInfo(s);
}
@Override
public Properties getClientInfo() throws SQLException {
return connection.getClientInfo();
}
@Override
public Array createArrayOf(String s, Object[] objects) throws SQLException {
return connection.createArrayOf(s, objects);
}
@Override
public Struct createStruct(String s, Object[] objects) throws SQLException {
return connection.createStruct(s, objects);
}
@Override
public <T> T unwrap(Class<T> tClass) throws SQLException {
return connection.unwrap(tClass);
}
@Override
public boolean isWrapperFor(Class<?> aClass) throws SQLException {
return connection.isWrapperFor(aClass);
}
}
| |
/*
* The MIT License
*
* Copyright 2014 SBPrime.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.primesoft.mcpainter.commands;
import java.awt.image.BufferedImage;
import java.util.stream.Stream;
import org.primesoft.mcpainter.blocksplacer.BlockLoger;
import org.primesoft.mcpainter.configuration.ConfigProvider;
import org.primesoft.mcpainter.drawing.filters.CropFilter;
import org.primesoft.mcpainter.drawing.filters.FilterManager;
import org.primesoft.mcpainter.drawing.ImageHelper;
import org.primesoft.mcpainter.FoundManager;
import org.primesoft.mcpainter.Help;
import org.primesoft.mcpainter.mapdrawer.MapHelper;
import org.primesoft.mcpainter.PermissionManager;
import org.primesoft.mcpainter.MCPainterMain;
import org.primesoft.mcpainter.worldEdit.CuboidSelection;
import org.primesoft.mcpainter.worldEdit.IEditSession;
import org.primesoft.mcpainter.worldEdit.ILocalPlayer;
import org.primesoft.mcpainter.worldEdit.ILocalSession;
import org.primesoft.mcpainter.worldEdit.IWorldEdit;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Chunk;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.Rotation;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.block.BlockFace;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.ItemFrame;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.MapMeta;
import org.bukkit.map.MapView;
import org.primesoft.mcpainter.blocksplacer.IChange;
import org.primesoft.mcpainter.utils.Vector;
/**
* @author SBPrime
*/
public class HdImageCommand {
private final MapHelper m_mapHelper;
public HdImageCommand(MapHelper mapHelper) {
m_mapHelper = mapHelper;
}
public void Execute(MCPainterMain sender, Player player, IWorldEdit worldEdit, String[] args) {
if (args.length != 2 && args.length != 4) {
Help.ShowHelp(player, Commands.COMMAND_IMAGEHD);
return;
}
String url = args[1];
final CuboidSelection selection;
if (args.length == 2) {
selection = worldEdit.getSelection(player);
} else {
final World w = player.getWorld();
Vector v1 = Vector.parse(args[2]);
Vector v2 = Vector.parse(args[3]);
if (v1 == null || v2 == null) {
selection = null;
} else {
selection = new CuboidSelection(w, new Vector(
Math.min(v1.getX(), v2.getX()),
Math.min(v1.getY(), v2.getY()),
Math.min(v1.getZ(), v2.getZ())),
new Vector(
Math.max(v1.getX(), v2.getX()),
Math.max(v1.getY(), v2.getY()),
Math.max(v1.getZ(), v2.getZ())));
}
}
if (selection == null) {
MCPainterMain.say(player, ChatColor.RED + "No selection.");
return;
}
sender.getServer().getScheduler().runTaskAsynchronously(sender,
new CommandThread(this, sender, player, url, worldEdit, selection));
}
private static boolean isSolid(Material m) {
if (m == Material.AIR) {
return false;
}
if (m == Material.LAVA) {
return false;
}
if (m == Material.WATER) {
return false;
}
if (m == Material.END_PORTAL) {
return false;
}
if (m == Material.NETHER_PORTAL) {
return false;
}
return m.isSolid() && m.isBlock();
}
private class DrawMapCommand implements IChange {
private final Location m_location;
private final BufferedImage m_img;
private final MapHelper m_mapHelper;
private final BlockFace m_rotation;
private Material m_oldMaterial;
private ItemFrame m_frame;
private MapView m_mapView;
private DrawMapCommand(Location location, BlockFace face,
int offsetX, int offsetY, BufferedImage img,
MapHelper mapHelper) {
m_location = location;
int x = Math.min(offsetX + 127, img.getWidth() - 1);
int y = Math.min(offsetY + 127, img.getHeight() - 1);
m_img = CropFilter.crop(img, offsetX, offsetY, x, y, false);
m_mapHelper = mapHelper;
m_rotation = face;
}
@Override
public void redo() {
Chunk chunk = m_location.getChunk();
if (!chunk.isLoaded()) {
if (!chunk.load()) {
return;
}
}
World w = m_location.getWorld();
Block block = w.getBlockAt(m_location);
Material material = block.getType();
if (!isSolid(material)) {
m_oldMaterial = material;
block.setType(Material.BARRIER);
} else {
m_oldMaterial = null;
}
Location ifLocation = block.getRelative(m_rotation).getLocation();
ItemFrame tFrame = Stream.of(chunk.getEntities())
.filter(e -> EntityType.ITEM_FRAME.equals(e.getType()))
.map(e -> new Object() {
final ItemFrame frame = (ItemFrame) e;
final BlockFace facing = ((ItemFrame) e).getFacing();
final Location location = e.getLocation();
})
.filter(e -> m_rotation.equals(e.facing)
&& ifLocation.getBlockX() == e.location.getBlockX()
&& ifLocation.getBlockY() == e.location.getBlockY()
&& ifLocation.getBlockZ() == e.location.getBlockZ())
.map(e -> e.frame)
.findAny()
.orElse((ItemFrame) null);
if (tFrame == null) {
tFrame = (ItemFrame) w.spawn(ifLocation, ItemFrame.class);
tFrame.setFacingDirection(m_rotation, true);
tFrame.setRotation(Rotation.NONE);
}
m_frame = tFrame;
ItemStack frameContent = m_frame.getItem();
MapView mapView = null;
if (Material.FILLED_MAP.equals(frameContent.getType())) {
mapView = Bukkit.getMap((short) ((MapMeta) frameContent.getItemMeta()).getMapId());
} else {
mapView = Bukkit.createMap(w);
frameContent = new ItemStack(Material.FILLED_MAP, 1);
MapMeta mm = ((MapMeta)frameContent.getItemMeta());
mm .setMapId(mapView.getId());
frameContent.setItemMeta(mm);
m_frame.setItem(frameContent);
}
m_mapView = mapView;
m_mapHelper.storeMap(m_mapView, m_img);
m_mapHelper.drawImage(m_mapView, m_img);
}
@Override
public void undo() {
}
@Override
public Location getLocation() {
return m_location;
}
}
private class CommandThread implements Runnable {
private final CuboidSelection m_selection;
private final String m_url;
private final Player m_player;
private final MCPainterMain m_sender;
private final HdImageCommand m_this;
private final IEditSession m_session;
private final ILocalSession m_lSession;
private final BlockFace m_rotation;
private CommandThread(HdImageCommand command, MCPainterMain sender, Player player,
String url, IWorldEdit worldEdit, CuboidSelection selection) {
m_this = command;
m_sender = sender;
m_player = player;
m_url = url;
m_selection = selection;
ILocalPlayer localPlayer = worldEdit.wrapPlayer(player);
m_rotation = calcHeading(localPlayer.getYaw());
m_lSession = worldEdit.getSession(player);
m_session = m_lSession.createEditSession(localPlayer);
}
@Override
public void run() {
FilterManager fm = FilterManager.getFilterManager(m_player);
double price = ConfigProvider.getCommandPrice("imagehd") + fm.getPrice();
synchronized (FoundManager.getMutex()) {
if (price > 0 && FoundManager.getMoney(m_player) < price) {
MCPainterMain.say(m_player, ChatColor.RED + "You don't have sufficient funds to apply all the filters and draw the map.");
return;
}
MCPainterMain.say(m_player, "Loading image...");
BufferedImage img = ImageHelper.downloadImage(m_url);
if (img == null) {
MCPainterMain.say(m_player, ChatColor.RED + "Error downloading image " + ChatColor.WHITE + m_url);
return;
}
img = fm.applyFilters(img, null);
final BufferedImage fImg = img;
int imgH = img.getHeight();
int imgW = img.getWidth();
if (!PermissionManager.checkImage(m_player, imgW, imgH)) {
return;
}
Vector minPoint = m_selection.getMinimumPoint();
Vector maxPoint = m_selection.getMaximumPoint();
int dx = (int) (maxPoint.getX() - minPoint.getX());
int dz = (int) (maxPoint.getZ() - minPoint.getZ());
int dy = (int) (maxPoint.getY() - minPoint.getY());
int kx, kz;
if (dx > 1 && dz > 1) {
MCPainterMain.say(m_player, ChatColor.RED + "Invalid selection area.");
return;
} else if (dx > dz) {
kx = 1;
kz = 0;
} else {
kx = 0;
kz = 1;
}
int bHeight = imgH / 128 + (imgH % 128 != 0 ? 1 : 0);
int bWidth = imgW / 128 + (imgW % 128 != 0 ? 1 : 0);
if (dy < bHeight || (dx < bWidth && dz < bWidth)) {
MCPainterMain.say(m_player, ChatColor.RED + "The selection is to smal, required: " + bWidth + "x" + bHeight);
return;
}
Location pos = new Location(m_selection.getWorld(), minPoint.getBlockX(), maxPoint.getBlockY(), minPoint.getBlockZ());
if (m_rotation == BlockFace.NORTH
|| m_rotation == BlockFace.EAST) {
pos = pos.add(kx * (bWidth - 1), 0, kz * (bWidth - 1));
kx *= -1;
kz *= -1;
}
MCPainterMain.say(m_player, "Drawing image...");
BlockLoger loger = new BlockLoger(m_player, m_lSession, m_session, m_sender);
for (int py = 0; py < bHeight; py++) {
Location tmp = pos.clone();
for (int px = 0; px < bWidth; px++) {
loger.logChange(new DrawMapCommand(tmp.clone(), m_rotation,
px * 128, py * 128, fImg, m_mapHelper));
tmp = tmp.add(kx, 0, kz);
}
pos = pos.add(0, -1, 0);
}
loger.logMessage("Drawing image done.");
loger.logEndSession();
loger.flush();
FoundManager.subtractMoney(m_player, price);
}
}
private BlockFace calcHeading(double yaw) {
yaw = (yaw + 360) % 360;
if (yaw < 45) {
return BlockFace.NORTH;
} else if (yaw < 135) {
return BlockFace.EAST;
} else if (yaw < 225) {
return BlockFace.SOUTH;
} else if (yaw < 315) {
return BlockFace.WEST;
} else {
return BlockFace.NORTH;
}
}
}
}
| |
package com.rackspacecloud.blueflood.io;
import com.rackspacecloud.blueflood.cache.MetadataCache;
import com.rackspacecloud.blueflood.exceptions.CacheException;
import com.rackspacecloud.blueflood.io.astyanax.ABasicMetricsRW;
import com.rackspacecloud.blueflood.io.datastax.DBasicMetricsRW;
import com.rackspacecloud.blueflood.io.datastax.DDelayedLocatorIO;
import com.rackspacecloud.blueflood.io.datastax.DLocatorIO;
import com.rackspacecloud.blueflood.rollup.Granularity;
import com.rackspacecloud.blueflood.rollup.SlotKey;
import com.rackspacecloud.blueflood.service.Configuration;
import com.rackspacecloud.blueflood.service.CoreConfig;
import com.rackspacecloud.blueflood.service.SingleRollupWriteContext;
import com.rackspacecloud.blueflood.types.*;
import com.rackspacecloud.blueflood.utils.DefaultClockImpl;
import com.rackspacecloud.blueflood.utils.TimeValue;
import com.rackspacecloud.blueflood.utils.Util;
import org.junit.Before;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.TimeUnit;
/**
* Base class for gesting BasicMetricsRW implementations for writing/reading basic metrics (SimpleNumber, String,
* Booleans). This class mostly creates:
* <ul>
* <li>the sample data which is read and written using the MetricsRW implementations
* <li>helper methods for creating {@link com.rackspacecloud.blueflood.service.SingleRollupWriteContext}
* </ul>
*/
public class BasicMetricsRWIntegrationTest extends IntegrationTestBase {
protected static final double EPSILON = .5;
private static final String TENANT1 = "123456";
private static final String TENANT2 = "987654";
private static final String TENANT3 = "123789";
private static final TimeValue TTL = new TimeValue(24, TimeUnit.HOURS);
protected LocatorIO locatorIO = new DLocatorIO();
protected DelayedLocatorIO delayedLocatorIO = new DDelayedLocatorIO();
protected MetricsRW datastaxMetricsRW = new DBasicMetricsRW(locatorIO, delayedLocatorIO, false, new DefaultClockImpl());
protected MetricsRW astyanaxMetricsRW = new ABasicMetricsRW(false, new DefaultClockImpl());
protected Map<Locator, IMetric> numericMap = new HashMap<Locator, IMetric>();
protected Map<Locator, IMetric> stringMap = new HashMap<Locator, IMetric>();
protected Map<Locator, IMetric> boolMap = new HashMap<Locator, IMetric>();
protected static final long MAX_AGE_ALLOWED = Configuration.getInstance().getLongProperty(CoreConfig.ROLLUP_DELAY_MILLIS);
protected static Granularity DELAYED_METRICS_STORAGE_GRANULARITY =
Granularity.getRollupGranularity(Configuration.getInstance().getStringProperty(CoreConfig.DELAYED_METRICS_STORAGE_GRANULARITY));
/**
* Generate numeric, string and boolean metrics to be used by the tests.
*
* @throws CacheException
*/
@Before
public void generateMetrics() throws CacheException {
String className = getClass().getSimpleName();
for( String tid : Arrays.asList( TENANT1, TENANT2, TENANT3 ) ) {
// Numeric
Locator locator = Locator.createLocatorFromPathComponents( tid, className + ".numeric.metric." + System.currentTimeMillis() );
Metric metric = new Metric( locator,
new Long( System.currentTimeMillis() % 100 ),
System.currentTimeMillis(),
new TimeValue(1, TimeUnit.DAYS),
"unit" );
numericMap.put( locator, metric );
MetadataCache.getInstance().put( locator, MetricMetadata.TYPE.name().toLowerCase(), DataType.NUMERIC.toString() );
// String
locator = Locator.createLocatorFromPathComponents( tid, className + ".string.metric." + System.currentTimeMillis() );
metric = new Metric( locator,
"String_value." + (System.currentTimeMillis() % 100),
System.currentTimeMillis(),
new TimeValue(1, TimeUnit.DAYS), "unit" );
stringMap.put( locator, metric );
MetadataCache.getInstance().put( locator, MetricMetadata.TYPE.name().toLowerCase(), DataType.STRING.toString() );
// Boolean
locator = Locator.createLocatorFromPathComponents( tid, className + ".boolean.metric." + System.currentTimeMillis() );
metric = new Metric( locator,
System.currentTimeMillis() % 2 == 0 ? true : false,
System.currentTimeMillis(),
new TimeValue(1, TimeUnit.DAYS), "unit" );
boolMap.put( locator, metric );
MetadataCache.getInstance().put( locator, MetricMetadata.TYPE.name().toLowerCase(), DataType.BOOLEAN.toString() );
}
}
/**
* This method is to supply the granularity parameter to some test methods below
*
* @return
*/
protected Object getGranularitiesToTest() {
return Arrays.copyOfRange( Granularity.granularities(), 1, Granularity.granularities().length - 1);
}
/**
* Converts the input metrics from a map of locator -> IMetric to a list of
* {@link com.rackspacecloud.blueflood.service.SingleRollupWriteContext}
* objects
*
* @param inputMetrics
* @return
*/
protected List<SingleRollupWriteContext> toWriteContext( Collection<IMetric> inputMetrics, Granularity destGran) throws IOException {
List<SingleRollupWriteContext> resultList = new ArrayList<SingleRollupWriteContext>();
for ( IMetric metric : inputMetrics ) {
SingleRollupWriteContext writeContext = createSingleRollupWriteContext( destGran, metric );
resultList.add(writeContext);
}
return resultList;
}
/**
* Convert a list of {@link com.rackspacecloud.blueflood.types.Points} into a list of
* {@link com.rackspacecloud.blueflood.service.SingleRollupWriteContext} for the given
* Granularity and Locator.
*
* @param locator
* @param points
* @param gran
* @return
*/
protected List<SingleRollupWriteContext> toWriteContext( Locator locator, Points<Rollup> points, Granularity gran ) {
List<SingleRollupWriteContext> resultList = new ArrayList<SingleRollupWriteContext>();
for( Map.Entry<Long, Points.Point<Rollup>> entry : points.getPoints().entrySet() ) {
resultList.add( new SingleRollupWriteContext(
entry.getValue().getData(),
locator,
gran,
CassandraModel.getBasicColumnFamily( gran ),
entry.getKey() ) );
}
return resultList;
}
/**
* Create a single {@link com.rackspacecloud.blueflood.service.SingleRollupWriteContext} from the given
* {@link com.rackspacecloud.blueflood.types.IMetric} and Granularity.
*
* @param destGran
* @param metric
* @return
* @throws IOException
*/
protected SingleRollupWriteContext createSingleRollupWriteContext( Granularity destGran, IMetric metric ) throws IOException {
Locator locator = metric.getLocator();
Points<SimpleNumber> points = new Points<SimpleNumber>();
points.add( new Points.Point<SimpleNumber>( metric.getCollectionTime(), new SimpleNumber( metric.getMetricValue() ) ) );
BasicRollup rollup = BasicRollup.buildRollupFromRawSamples( points );
return new SingleRollupWriteContext(
rollup,
locator,
destGran,
CassandraModel.getBasicColumnFamily( destGran ),
metric.getCollectionTime());
}
/**
* For a given list of locators, figure the shard they belong to and for all those shards
* get all the locators in metric_locator column family
*
* @param ingestedLocators
* @return
* @throws IOException
*/
protected Set<Locator> retrieveLocators(Set<Locator> ingestedLocators) throws IOException {
Set<Long> shards = new HashSet<Long>();
for (Locator locator: ingestedLocators) {
long shard = (long) Util.getShard(locator.toString());
shards.add(shard);
}
Set<Locator> locatorsFromDB = new HashSet<Locator>();
for (Long shard: shards) {
locatorsFromDB.addAll(locatorIO.getLocators(shard));
}
return locatorsFromDB;
}
/**
* For a given list of metrics, figure out the shard and slot they belong to and for those
* shard and slot combinations, get all the locators from metrics_delayed_locator column family.
*
* @param metrics
* @return
* @throws IOException
*/
protected Set<Locator> retrieveLocatorsByShardAndSlot(List<IMetric> metrics) throws IOException {
Set<String> slotKeys = new HashSet<String>();
for (IMetric metric: metrics) {
int shard = Util.getShard(metric.getLocator().toString());
int slot = DELAYED_METRICS_STORAGE_GRANULARITY.slot(metric.getCollectionTime());
SlotKey slotKey = SlotKey.of(DELAYED_METRICS_STORAGE_GRANULARITY, slot, shard);
slotKeys.add(slotKey.toString());
}
Set<Locator> locatorsFromDB = new HashSet<Locator>();
for (String slotKeyStr: slotKeys) {
locatorsFromDB.addAll(delayedLocatorIO.getLocators(SlotKey.parse(slotKeyStr)));
}
return locatorsFromDB;
}
}
| |
/*
* Copyright 2013 Michael Mackenzie High
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package autumn.lang.compiler.ast.nodes;
import autumn.lang.compiler.ast.commons.ConstructList;
import autumn.lang.compiler.ast.commons.IAnnotated;
import autumn.lang.compiler.ast.commons.IBinaryOperation;
import autumn.lang.compiler.ast.commons.IConstruct;
import autumn.lang.compiler.ast.commons.IConversionOperation;
import autumn.lang.compiler.ast.commons.IDatum;
import autumn.lang.compiler.ast.commons.IDirective;
import autumn.lang.compiler.ast.commons.IDocumented;
import autumn.lang.compiler.ast.commons.IExpression;
import autumn.lang.compiler.ast.commons.IRecord;
import autumn.lang.compiler.ast.commons.IStatement;
import autumn.lang.compiler.ast.commons.IUnaryOperation;
import autumn.lang.compiler.ast.literals.BigDecimalLiteral;
import autumn.lang.compiler.ast.literals.BigIntegerLiteral;
import autumn.lang.compiler.ast.literals.ByteLiteral;
import autumn.lang.compiler.ast.literals.CharLiteral;
import autumn.lang.compiler.ast.literals.DoubleLiteral;
import autumn.lang.compiler.ast.literals.FloatLiteral;
import autumn.lang.compiler.ast.literals.IntLiteral;
import autumn.lang.compiler.ast.literals.LongLiteral;
import autumn.lang.compiler.ast.literals.ShortLiteral;
import java.io.File;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
/**
* An instance of this class is an AST node that represents a list comprehension.
*
* <p>
* <table border="1">
* <tr> <td> <b>Property Name</b> </td> <td> <b>Property Description</b> </td> </tr>
* <tr> <td> <code>modifier</code> </td> <td>This is the expression that transforms the value produced by the iterator.</td> </tr>
* <tr> <td> <code>variable</code> </td> <td>This is the variable that will store values produced by the iterator.</td> </tr>
* <tr> <td> <code>type</code> </td> <td>This is the static-type of the variable.</td> </tr>
* <tr> <td> <code>iterable</code> </td> <td>This expression produces the iterable that will be iterated over.</td> </tr>
* <tr> <td> <code>condition</code> </td> <td>(optional) This expression is used to determine whether an element should be skipped or not.</td> </tr>
* <tr> <td> <code>location</code> </td> <td>This is the source-location information regarding this construct.</td> </tr>
* </table>
* </p>
*
* <p> This file was auto-generated on (Sun May 31 11:54:12 EDT 2015).</p>
*/
@SuppressWarnings("unchecked")
public final class ListComprehensionExpression extends Object implements IExpression
{
private IExpression modifier;
private Variable variable;
private TypeSpecifier type;
private IExpression iterable;
private IExpression condition;
private SourceLocation location = new SourceLocation();
/**
* Setter.
*
* @param value is the new value of property <code>modifier</code>.
* @return a copy of this object with property <code>modifier</code> set to value.
*/
public ListComprehensionExpression setModifier(final IExpression value)
{
final ListComprehensionExpression result = this.copy();
result.modifier = value;
return result;
}
/**
* Getter.
*
* @return the value of property <code>modifier</code>.
*/
public IExpression getModifier()
{
final IExpression value = this.modifier;
return value;
}
/**
* Setter.
*
* @param value is the new value of property <code>variable</code>.
* @return a copy of this object with property <code>variable</code> set to value.
*/
public ListComprehensionExpression setVariable(final Variable value)
{
final ListComprehensionExpression result = this.copy();
result.variable = value;
return result;
}
/**
* Getter.
*
* @return the value of property <code>variable</code>.
*/
public Variable getVariable()
{
final Variable value = this.variable;
return value;
}
/**
* Setter.
*
* @param value is the new value of property <code>type</code>.
* @return a copy of this object with property <code>type</code> set to value.
*/
public ListComprehensionExpression setType(final TypeSpecifier value)
{
final ListComprehensionExpression result = this.copy();
result.type = value;
return result;
}
/**
* Getter.
*
* @return the value of property <code>type</code>.
*/
public TypeSpecifier getType()
{
final TypeSpecifier value = this.type;
return value;
}
/**
* Setter.
*
* @param value is the new value of property <code>iterable</code>.
* @return a copy of this object with property <code>iterable</code> set to value.
*/
public ListComprehensionExpression setIterable(final IExpression value)
{
final ListComprehensionExpression result = this.copy();
result.iterable = value;
return result;
}
/**
* Getter.
*
* @return the value of property <code>iterable</code>.
*/
public IExpression getIterable()
{
final IExpression value = this.iterable;
return value;
}
/**
* Setter.
*
* @param value is the new value of property <code>condition</code>.
* @return a copy of this object with property <code>condition</code> set to value.
*/
public ListComprehensionExpression setCondition(final IExpression value)
{
final ListComprehensionExpression result = this.copy();
result.condition = value;
return result;
}
/**
* Getter.
*
* @return the value of property <code>condition</code>.
*/
public IExpression getCondition()
{
final IExpression value = this.condition;
return value;
}
/**
* Setter.
*
* @param value is the new value of property <code>location</code>.
* @return a copy of this object with property <code>location</code> set to value.
*/
public ListComprehensionExpression setLocation(final SourceLocation value)
{
final ListComprehensionExpression result = this.copy();
result.location = value;
return result;
}
/**
* Getter.
*
* @return the value of property <code>location</code>.
*/
public SourceLocation getLocation()
{
final SourceLocation value = this.location;
return value;
}
/**
* This method creates a new instance of this class.
*
* @param modifier is the value for property <code>modifier</code>.
* @param variable is the value for property <code>variable</code>.
* @param type is the value for property <code>type</code>.
* @param iterable is the value for property <code>iterable</code>.
* @param condition is the value for property <code>condition</code>.
* @param location is the value for property <code>location</code>.
* @return a new instance of this class.
*/
public static ListComprehensionExpression create(IExpression modifier, Variable variable, TypeSpecifier type, IExpression iterable, IExpression condition, SourceLocation location)
{
ListComprehensionExpression object = new ListComprehensionExpression();
object = object.setModifier(modifier);
object = object.setVariable(variable);
object = object.setType(type);
object = object.setIterable(iterable);
object = object.setCondition(condition);
object = object.setLocation(location);
return object;
}
/**
* This method welcomes a visitor that wants to visit this object.
*
* @param visitor is the visitor that is visiting this object.
*/
public void accept(final IAstVisitor visitor)
{
visitor.visit(this);
}
/**
* This method creates a shallow copy of this object.
*
* @return a shallow copy of this object.
*/
public ListComprehensionExpression copy()
{
final ListComprehensionExpression result = new ListComprehensionExpression();
result.modifier = this.modifier;
result.variable = this.variable;
result.type = this.type;
result.iterable = this.iterable;
result.condition = this.condition;
result.location = this.location;
return result;
}
/**
* This method creates a map representation of this struct.
*
* <p>
* Each key is the name of a field.
* Each value is the result of calling the key field's getter.
* </p>
*
* @return a map containing the entries in this struct.
*/
public Map<String, Object> toMap()
{
final Map<String, Object> map = new TreeMap<String, Object>();
map.put("modifier", this.getModifier());
map.put("variable", this.getVariable());
map.put("type", this.getType());
map.put("iterable", this.getIterable());
map.put("condition", this.getCondition());
map.put("location", this.getLocation());
return map;
}
/**
* {@inheritDoc}
*/
@Override
public String toString()
{
return this.toMap().toString();
}
}
| |
package com.sylva.vdhr.widget;
import android.content.Context;
import android.support.v4.widget.ViewDragHelper;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.widget.FrameLayout;
/**
* Created by tongzhichao on 16-3-22.
* DrawerLayout extends FrameLayout by ViewDragHelper
*/
public class DrawerLayout extends FrameLayout {
private ViewDragHelper mDragger;
private ViewDragHelper.Callback mCallback;
private int mDefaultslideWidth;
private boolean mIsOpen = false;
private View mContentView;
private boolean mIsScrolled = false;
private boolean mIsAutoScrolled = false;
private OnStateChangedListener mOnStateChangedListener;
public DrawerLayout(Context context, AttributeSet attrs) {
super(context, attrs);
mCallback = new DrawerCallbak();
mDragger = ViewDragHelper.create(this, 1.0f, mCallback);
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
mContentView = getChildAt(1);
if (mContentView == null) {
throw new NullPointerException("contentview is null");
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
if (getChildAt(0) != null) {
mDefaultslideWidth = getChildAt(0).getWidth();
} else {
try {
mDefaultslideWidth = getChildAt(0).getWidth();
} catch (NullPointerException e) {
Log.e("DrawerLayout", "Layout has at least one child view!");
}
}
}
private class DrawerCallbak extends ViewDragHelper.Callback {
@Override
public boolean tryCaptureView(View child, int pointerId) {
if (mIsAutoScrolled) {
return false;
}
return child == mContentView;
}
@Override
public int clampViewPositionHorizontal(View child, int left, int dx) {
return Math.max(Math.min(mDefaultslideWidth, left), 0);
}
@Override
public int getViewHorizontalDragRange(View child) {
return Math.max(Math.min(mDefaultslideWidth, child.getLeft()), 0);
}
@Override
public int getViewVerticalDragRange(View child) {
return super.getViewVerticalDragRange(child);
}
@Override
public int clampViewPositionVertical(View child, int top, int dy) {
return 0;
}
@Override
public void onViewDragStateChanged(int state) {
switch (state) {
case ViewDragHelper.STATE_DRAGGING:
mIsScrolled = true;
break;
case ViewDragHelper.STATE_IDLE:
mIsAutoScrolled = false;
mIsScrolled = false;
if (mContentView.getLeft() == 0) {
mIsOpen = false;
} else {
mIsOpen = true;
}
if (mOnStateChangedListener != null) {
if (mIsOpen) {
mOnStateChangedListener.onOpen(mContentView);
} else {
mOnStateChangedListener.onClosed(mContentView);
}
}
break;
case ViewDragHelper.STATE_SETTLING:
mIsAutoScrolled = true;
break;
}
}
@Override
public void onViewPositionChanged(View changedView, int left, int top, int dx, int dy) {
if (changedView == mContentView) {
if (mOnStateChangedListener != null) {
mOnStateChangedListener.onScrolled(mContentView, (int) (((float) left / (float) mDefaultslideWidth) * 100));
}
}
super.onViewPositionChanged(changedView, left, top, dx, dy);
}
@Override
public void onViewReleased(View releasedChild, float xvel, float yvel) {
if (mIsAutoScrolled) {
return;
}
if (releasedChild == mContentView) {
if (mIsScrolled) {
if (xvel <= 0) {
mDragger.settleCapturedViewAt(0, 0);
} else {
mDragger.settleCapturedViewAt(mDefaultslideWidth, 0);
}
} else if (mIsOpen) {
if (xvel <= 0) {
mDragger.settleCapturedViewAt(0, 0);
}
} else {
if (xvel > 0) {
mDragger.settleCapturedViewAt(mDefaultslideWidth, 0);
}
}
invalidate();
} else {
super.onViewReleased(releasedChild, xvel, yvel);
}
}
}
@Override
public boolean onInterceptTouchEvent(MotionEvent event) {
return mDragger.shouldInterceptTouchEvent(event);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
mDragger.processTouchEvent(event);
return true;
}
@Override
public void computeScroll() {
if (mDragger.continueSettling(true)) {
invalidate();
}
}
public void open() {
if (!mIsOpen) {
controlView();
}
}
public void close() {
if (mIsOpen) {
controlView();
}
}
public void controlView() {
if (mIsScrolled || mIsAutoScrolled) {
return;
}
if (mIsOpen) {
mDragger.smoothSlideViewTo(mContentView, 0, 0);
} else {
mDragger.smoothSlideViewTo(mContentView, mDefaultslideWidth, 0);
}
invalidate();
}
public interface OnStateChangedListener {
void onOpen(View view);
void onClosed(View view);
void onScrolled(View view, int percentage);
}
public void setOnStateChangedListener(OnStateChangedListener onStateChangedListener) {
mOnStateChangedListener = onStateChangedListener;
}
public boolean isOpen() {
return mIsOpen;
}
}
| |
/**
* Copyright (C) 2007 Logan Johnson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package atunit.spring;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.Set;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.JUnitCore;
import org.junit.runner.Result;
import org.junit.runner.RunWith;
import atunit.AtUnit;
import atunit.Container;
import atunit.Mock;
import atunit.MockFramework;
import atunit.Stub;
import atunit.Unit;
import atunit.easymock.EasyMockFramework;
import atunit.jmock.JMockFramework;
public class SpringContainerTests {
JUnitCore junit;
@Before
public void setUp() {
junit = new JUnitCore();
}
@Test
public void tNoBeans() {
Result result = junit.run(TestClasses.NoBeans.class);
assertTrue(result.wasSuccessful());
assertEquals(1, result.getRunCount());
}
@Test
public void tUndefinedBeanFilledIn() {
Result result = junit.run(TestClasses.UndefinedBeanFilledIn.class);
assertEquals(1, result.getRunCount());
assertTrue(result.wasSuccessful());
}
@Test
public void tMockIntegration() {
Result result = junit.run(TestClasses.MockIntegration.class);
assertEquals(1, result.getRunCount());
assertTrue(result.wasSuccessful());
}
@Test
public void tInheritance() {
Result result = junit.run(TestClasses.Inheritance.class);
assertEquals(1, result.getRunCount());
assertTrue(result.wasSuccessful());
}
@Test
public void tNameBindings() {
Result result = junit.run(TestClasses.MockIntegration.class);
assertEquals(1, result.getRunCount());
assertTrue(result.wasSuccessful());
}
@Test
public void tXmlContext() {
Result result = junit.run(TestClasses.DefaultXmlContext.class);
assertEquals(1, result.getRunCount());
assertTrue(result.wasSuccessful());
}
@Test
public void tMergedContexts() {
Result result = junit.run(TestClasses.MergedContexts.class);
assertEquals(1, result.getRunCount());
assertTrue(result.wasSuccessful());
}
@Test
public void tBrokenXmlContext() {
Result result = junit.run(TestClasses.BrokenXmlContext.class);
assertEquals(1, result.getRunCount());
assertFalse(result.wasSuccessful());
assertNotNull(result.getFailures().get(0).getException());
}
@Test
public void tSpecifiedContext() {
Result result = junit.run(TestClasses.SpecifiedXmlContext.class);
assertEquals(1, result.getRunCount());
assertTrue(result.wasSuccessful());
}
@Test
public void tSpecifiedBadXmlContext() {
Result result = junit.run(TestClasses.SpecifiedBadXmlContext.class);
assertEquals(1, result.getRunCount());
assertFalse(result.wasSuccessful());
assertNotNull(result.getFailures().get(0).getException());
}
protected static class TestClasses {
@RunWith(AtUnit.class)
@Container(SpringContainer.class)
public static abstract class SpringTest {
}
public static class NoBeans extends SpringTest {
@Unit
String unit;
@Test
public void tPass() {
assertTrue(true);
}
}
public static class UndefinedBeanFilledIn extends SpringTest {
@Bean
@Unit
String unit;
@Test
public void tPass() {
assertTrue(true);
}
}
@MockFramework(EasyMockFramework.class)
public static class MockIntegration extends SpringTest {
@Unit
String unit;
@Bean
@Stub
List<String> stringList;
@Bean
@Mock
Set<String> stringSet;
@Test
public void tPass() {
assertNotNull(stringList);
assertNotNull(stringSet);
EasyMock.expect(stringSet.contains("mystring")).andReturn(true);
EasyMock.replay(stringSet);
assertTrue(stringSet.contains("mystring"));
}
}
public static class Inheritance extends MockIntegration {
}
@MockFramework(EasyMockFramework.class)
public static class BindByName extends SpringTest {
@Unit
String unit;
@Bean("stringList")
@Stub
List<String> stringList;
@Bean("stringList2")
@Stub
List<String> stringList2;
@Test
public void tPass() {
assertNotNull(stringList);
assertNotNull(stringList2);
assertNotSame(stringList, stringList2);
}
}
public static class DefaultXmlContext extends SpringTest {
@Unit
@Bean
String unit;
@Test
public void tPass() {
assertEquals("unit from context", unit);
}
}
@Context("DefaultXmlContext.xml")
public static class SpecifiedXmlContext extends SpringTest {
@Unit
@Bean
String unit;
@Test
public void tPass() {
assertEquals("unit from context", unit);
}
}
@Context("ThisContextDoesNotExist")
public static class SpecifiedBadXmlContext extends SpringTest {
@Unit
String unit;
@Test
public void tPass() {
assertTrue(true);
}
}
@MockFramework(JMockFramework.class)
public static class MergedContexts extends SpringTest {
@Bean
@Stub
List<String> list;
@Bean
@Unit
StringListHolder holder;
@Test
public void tMergedContexts() {
assertNotNull(list);
assertNotNull(holder);
assertSame(list, holder.list);
}
}
public static class BrokenXmlContext extends SpringTest {
@Bean
@Unit
String unit;
@Test
public void tXmlContext() {
assertNotNull(unit);
}
}
}
public static class StringListHolder {
protected List<String> list;
public void setList(List<String> list) {
this.list = list;
}
}
}
| |
/*
* Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.template.mgt.dao.impl;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.testng.PowerMockTestCase;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import org.wso2.carbon.identity.core.util.IdentityDatabaseUtil;
import org.wso2.carbon.identity.template.mgt.dao.TemplateManagerDAO;
import org.wso2.carbon.identity.template.mgt.exception.TemplateManagementException;
import org.wso2.carbon.identity.template.mgt.exception.TemplateManagementServerException;
import org.wso2.carbon.identity.template.mgt.model.Template;
import org.wso2.carbon.identity.template.mgt.model.TemplateInfo;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.powermock.api.mockito.PowerMockito.mock;
import static org.powermock.api.mockito.PowerMockito.when;
import static org.wso2.carbon.identity.template.mgt.util.TestUtils.closeH2Base;
import static org.wso2.carbon.identity.template.mgt.util.TestUtils.getConnection;
import static org.wso2.carbon.identity.template.mgt.util.TestUtils.initiateH2Base;
import static org.wso2.carbon.identity.template.mgt.util.TestUtils.mockDataSource;
import static org.wso2.carbon.identity.template.mgt.util.TestUtils.spyConnection;
@PrepareForTest(IdentityDatabaseUtil.class)
public class TemplateManagerDAOImplTest extends PowerMockTestCase {
private static final Integer SAMPLE_TENANT_ID = -1234;
private static final Integer SAMPLE_TENANT_ID2 = 1;
private static String sampleScript = "<!-- You can customize the user prompt template here... -->\n" +
"\t\n" +
"<div class=\"uppercase\">\n" +
" <h3>Welcome {{name}}</h3>\n" +
"</div>\n" +
"\n" +
"<div class=\"boarder-all \">\n" +
" <div class=\"clearfix\"></div>\n" +
" <div class=\"padding-double login-form\">\n" +
"\n" +
" <form id=\"template-form\" method=\"POST\"> <!-- *DO NOT CHANGE THIS* -->\n" +
" <div class=\"col-xs-12 col-sm-12 col-md-12 col-lg-12 form-group required\">\n" +
"\n" +
" <!-- Add the required input field/s here...\n" +
" It should follow the below mentioned format-->\n" +
"\n" +
" <label for=\"sampleInput\" class=\"control-label\">sample input</label>\n" +
" <input type=\"text\" id=\"sampleInput\" name=\"sample_input\" class=\"form-control\" placeholder=\"sample input placeholder\" />\n" +
"\n" +
" </div>\n" +
"\n" +
" <input type=\"hidden\" id=\"promptResp\" name=\"promptResp\" value=\"true\"> <!-- *DO NOT CHANGE THIS* -->\n" +
" <input type=\"hidden\" id=\"promptId\" name=\"promptId\"> <!-- *DO NOT CHANGE THIS* -->\n" +
"\n" +
" <div class=\"col-xs-12 col-sm-12 col-md-12 col-lg-12 form-group required\">\n" +
" <input type=\"submit\" class=\"wr-btn grey-bg col-xs-12 col-md-12 col-lg-12 uppercase font-extra-large\" value=\"Submit\">\n" +
" </div>\n" +
" </form>\n" +
" <div class=\"clearfix\"></div>\n" +
" </div>\n" +
"</div>";
private static List<Template> templates = new ArrayList<>();
@BeforeMethod
public void setUp() throws Exception {
initiateH2Base();
Template template1 = new Template(SAMPLE_TENANT_ID, "T1", "Description 1", sampleScript);
Template template2 = new Template(SAMPLE_TENANT_ID2, "T2", "Description 2", sampleScript);
templates.add(template1);
templates.add(template2);
}
@AfterMethod
public void tearDown() throws Exception {
closeH2Base();
}
@DataProvider(name = "TemplateDataProvider")
public Object[][] addTemplateData() throws Exception {
Template template1 = new Template(SAMPLE_TENANT_ID, "T1", "Description 1", sampleScript);
Template template2 = new Template(SAMPLE_TENANT_ID2, "T2", "Description 2", sampleScript);
Template template3 = new Template(SAMPLE_TENANT_ID, "T3", "Description 3", sampleScript);
return new Object[][]{
{
template1
},
{
template2
},
{
template3,
},
};
}
@DataProvider(name = "getTemplateByNameDataProvider")
public Object[][] getTemplateByNameData() throws Exception {
Template template1 = new Template(SAMPLE_TENANT_ID, "T1", "Description 1", sampleScript);
Template template2 = new Template(SAMPLE_TENANT_ID2, "T2", "Description 2", sampleScript);
Template template3 = new Template(SAMPLE_TENANT_ID, "T3", "Description 3", sampleScript);
return new Object[][]{
{
template1,
SAMPLE_TENANT_ID
},
{
template2,
SAMPLE_TENANT_ID2
},
{
template3,
SAMPLE_TENANT_ID
},
};
}
@DataProvider(name = "UpdateTemplateDataProvider")
public Object[][] updateTemplateData() throws Exception {
Template template1 = new Template(SAMPLE_TENANT_ID, "T1", "Description 1", sampleScript);
Template template2 = new Template(SAMPLE_TENANT_ID2, "T2", "Description 2", sampleScript);
Template template3 = new Template(SAMPLE_TENANT_ID, "T3", "Description 3", sampleScript);
Template template1New = new Template(SAMPLE_TENANT_ID, "T1 Updated", "Updated Description 1", sampleScript);
Template template2New = new Template(SAMPLE_TENANT_ID2, "T2 Updated", "Updated Description 2", sampleScript);
Template template3New = new Template(SAMPLE_TENANT_ID, "T3 Updated", "Updated Description 3", sampleScript);
return new Object[][]{
{
"T1",
template1,
template1New
},
{
"T2",
template2,
template2New
},
{
"T3",
template3,
template3New
},
};
}
@DataProvider(name = "templateListProvider")
public Object[][] provideListData() throws Exception {
return new Object[][]{
// limit, offset, tenantId, resultSize
{0, 0, -1234, 0},
{1, 1, -1234, 1},
{10, 0, -1234, 3}
};
}
@Test(dataProvider = "TemplateDataProvider")
public void testAddTemplate(Object template) throws Exception {
DataSource dataSource = mock(DataSource.class);
mockDataSource(dataSource);
try (Connection connection = getConnection()) {
when(dataSource.getConnection()).thenReturn(connection);
TemplateManagerDAO templateManagerDAO = new TemplateManagerDAOImpl();
Template templateResult = templateManagerDAO.addTemplate(((Template) template));
Assert.assertEquals(templateResult.getTemplateName(), ((Template) template).getTemplateName());
Assert.assertEquals(templateResult.getTenantId(), ((Template) template).getTenantId());
}
}
@Test(dataProvider = "TemplateDataProvider", expectedExceptions = TemplateManagementServerException.class)
public void testAddTemplateServerException(Object template) throws Exception {
DataSource dataSource = mock(DataSource.class);
mockDataSource(dataSource);
try (Connection connection = getConnection()) {
when(dataSource.getConnection()).thenReturn(connection);
}
TemplateManagerDAO templateManagerDAO = new TemplateManagerDAOImpl();
templateManagerDAO.addTemplate(((Template) template));
Assert.fail("Expected: " + TemplateManagementServerException.class.getName());
}
@Test(dataProvider = "UpdateTemplateDataProvider")
public void testUpdateTemplate(String oldTemplateName, Object oldtemplate, Object newTemplate) throws Exception {
DataSource dataSource = mock(DataSource.class);
mockDataSource(dataSource);
try (Connection connection = getConnection()) {
when(dataSource.getConnection()).thenReturn(connection);
TemplateManagerDAO templateManagerDAO = new TemplateManagerDAOImpl();
addTemplates(templateManagerDAO, Collections.singletonList(oldtemplate), dataSource);
try (Connection connection1 = getConnection()) {
when(dataSource.getConnection()).thenReturn(connection1);
Template updatedTemplate = templateManagerDAO.updateTemplate(oldTemplateName, ((Template) newTemplate));
Assert.assertEquals(((Template) newTemplate).getTenantId(), updatedTemplate.getTenantId());
Assert.assertEquals(((Template) newTemplate).getTemplateName(), updatedTemplate.getTemplateName());
}
}
}
@Test(dataProvider = "UpdateTemplateDataProvider", expectedExceptions = TemplateManagementServerException.class)
public void testUpdateTemplateServerException(String oldTemplateName, Object oldtemplate, Object newTemplate) throws Exception {
DataSource dataSource = mock(DataSource.class);
mockDataSource(dataSource);
try (Connection connection = getConnection()) {
when(dataSource.getConnection()).thenReturn(connection);
TemplateManagerDAO templateManagerDAO = new TemplateManagerDAOImpl();
addTemplates(templateManagerDAO, Collections.singletonList(oldtemplate), dataSource);
templateManagerDAO.updateTemplate(oldTemplateName, ((Template) newTemplate));
Assert.fail("Expected: " + TemplateManagementServerException.class.getName());
}
}
@Test(dataProvider = "getTemplateByNameDataProvider")
public void testGetTemplateByName(Object templateObject, Integer tenantId) throws Exception {
DataSource dataSource = mock(DataSource.class);
mockDataSource(dataSource);
try (Connection connection = getConnection()) {
when(dataSource.getConnection()).thenReturn(connection);
TemplateManagerDAO templateManagerDAO = new TemplateManagerDAOImpl();
addTemplates(templateManagerDAO, Collections.singletonList(templateObject), dataSource);
try (Connection connection1 = getConnection()) {
when(dataSource.getConnection()).thenReturn(connection1);
Template templateByName = templateManagerDAO.getTemplateByName(((Template) templateObject).getTemplateName(), tenantId);
Assert.assertEquals(((Template) templateObject).getTemplateName(), templateByName.getTemplateName());
Assert.assertEquals(((Template) templateObject).getDescription(), templateByName.getDescription());
Assert.assertEquals(((Template) templateObject).getTemplateScript(), templateByName.getTemplateScript());
}
}
}
@Test(dataProvider = "getTemplateByNameDataProvider", expectedExceptions = TemplateManagementServerException.class)
public void testGetTemplateByNameDataAccessException(Object templateObject, Integer tenantId) throws Exception {
DataSource dataSource = mock(DataSource.class);
mockDataSource(dataSource);
try (Connection connection = getConnection()) {
when(dataSource.getConnection()).thenReturn(connection);
TemplateManagerDAO templateManagerDAO = new TemplateManagerDAOImpl();
addTemplates(templateManagerDAO, Collections.singletonList(templateObject), dataSource);
templateManagerDAO.getTemplateByName(((Template) templateObject).getTemplateName(), tenantId);
Assert.fail("Expected: " + TemplateManagementServerException.class.getName());
}
}
@Test(dataProvider = "templateListProvider")
public void testGetTemplateList(Integer limit, Integer offset, Integer tenantId, int resultSize) throws Exception {
Template template1 = new Template(SAMPLE_TENANT_ID, "T1", "Description 1", sampleScript);
Template template2 = new Template(SAMPLE_TENANT_ID, "T2", "Description 2", sampleScript);
Template template3 = new Template(SAMPLE_TENANT_ID, "Template3", "Description 3", "Script 3");
DataSource dataSource = mock(DataSource.class);
mockDataSource(dataSource);
try (Connection connection = getConnection()) {
Connection spyConnection = spyConnection(connection);
when(dataSource.getConnection()).thenReturn(spyConnection);
TemplateManagerDAO templateManagerDAO = new TemplateManagerDAOImpl();
Template templateResult1 = templateManagerDAO.addTemplate(template1);
Assert.assertEquals(templateResult1.getTemplateName(), template1.getTemplateName());
Template templateResult2 = templateManagerDAO.addTemplate(template2);
Assert.assertEquals(templateResult2.getTemplateName(), template2.getTemplateName());
Template templateResult3 = templateManagerDAO.addTemplate(template3);
Assert.assertEquals(templateResult3.getTemplateName(), template3.getTemplateName());
List<TemplateInfo> templateList = templateManagerDAO.getAllTemplates(tenantId, limit, offset);
Assert.assertEquals(templateList.size(), resultSize);
}
}
@Test(dataProvider = "templateListProvider", expectedExceptions = TemplateManagementServerException.class)
public void testGetTemplateListDataAccessException(Integer limit, Integer offset, Integer tenantId, int resultSize) throws Exception {
Template template1 = new Template(SAMPLE_TENANT_ID, "T1", "Description 1", sampleScript);
Template template2 = new Template(SAMPLE_TENANT_ID, "T2", "Description 2", sampleScript);
Template template3 = new Template(SAMPLE_TENANT_ID, "Template3", "Description 3", "Script 3");
DataSource dataSource = mock(DataSource.class);
mockDataSource(dataSource);
TemplateManagerDAO templateManagerDAO = new TemplateManagerDAOImpl();
try (Connection connection = getConnection()) {
Connection spyConnection = spyConnection(connection);
when(dataSource.getConnection()).thenReturn(spyConnection);
Template templateResult1 = templateManagerDAO.addTemplate(template1);
Assert.assertEquals(templateResult1.getTemplateName(), template1.getTemplateName());
Template templateResult2 = templateManagerDAO.addTemplate(template2);
Assert.assertEquals(templateResult2.getTemplateName(), template2.getTemplateName());
Template templateResult3 = templateManagerDAO.addTemplate(template3);
Assert.assertEquals(templateResult3.getTemplateName(), template3.getTemplateName());
}
templateManagerDAO.getAllTemplates(tenantId, limit, offset);
Assert.fail("Expected: " + TemplateManagementServerException.class.getName());
}
@Test(dataProvider = "TemplateDataProvider")
public void testDeleteTemplate(Object template) throws Exception {
DataSource dataSource = mock(DataSource.class);
mockDataSource(dataSource);
try (Connection connection = getConnection()) {
Connection spyConnection = spyConnection(connection);
when(dataSource.getConnection()).thenReturn(spyConnection);
TemplateManagerDAO templateManagerDAO = new TemplateManagerDAOImpl();
Template templateResult = templateManagerDAO.addTemplate(((Template) template));
Assert.assertEquals(templateResult.getTemplateName(), ((Template) template).getTemplateName());
templateManagerDAO.deleteTemplate(templateResult.getTemplateName(), templateResult.getTenantId());
}
}
@Test(dataProvider = "TemplateDataProvider", expectedExceptions = TemplateManagementServerException.class)
public void testDeleteTemplateDataAccessException(Object template) throws Exception {
DataSource dataSource = mock(DataSource.class);
mockDataSource(dataSource);
TemplateManagerDAO templateManagerDAO = new TemplateManagerDAOImpl();
try (Connection connection = getConnection()) {
Connection spyConnection = spyConnection(connection);
when(dataSource.getConnection()).thenReturn(spyConnection);
Template templateResult = templateManagerDAO.addTemplate(((Template) template));
Assert.assertEquals(templateResult.getTemplateName(), ((Template) template).getTemplateName());
}
templateManagerDAO.deleteTemplate(((Template) template).getTemplateName(), ((Template) template).getTenantId());
Assert.fail("Expected: " + TemplateManagementServerException.class.getName());
}
private void addTemplates(TemplateManagerDAO templateManagerDAO, List<Object> templates, DataSource dataSource) throws SQLException, TemplateManagementException {
for (Object template : templates) {
templateManagerDAO.addTemplate((Template) template);
}
}
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.sql.impl.exec.sort;
import com.hazelcast.sql.impl.LoggingQueryOperationHandler;
import com.hazelcast.sql.impl.QueryId;
import com.hazelcast.sql.impl.SqlTestSupport;
import com.hazelcast.sql.impl.UpstreamExec;
import com.hazelcast.sql.impl.exec.IterationResult;
import com.hazelcast.sql.impl.exec.fetch.Fetch;
import com.hazelcast.sql.impl.exec.fetch.FetchExec;
import com.hazelcast.sql.impl.exec.io.InboundBatch;
import com.hazelcast.sql.impl.exec.io.ReceiveSortMergeExec;
import com.hazelcast.sql.impl.exec.io.StripedInbox;
import com.hazelcast.sql.impl.exec.io.flowcontrol.simple.SimpleFlowControl;
import com.hazelcast.sql.impl.expression.ConstantExpression;
import com.hazelcast.sql.impl.expression.Expression;
import com.hazelcast.sql.impl.expression.ExpressionEvalContext;
import com.hazelcast.sql.impl.expression.SimpleExpressionEvalContext;
import com.hazelcast.sql.impl.row.EmptyRowBatch;
import com.hazelcast.sql.impl.row.Row;
import com.hazelcast.sql.impl.row.RowBatch;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.UUID;
import static com.hazelcast.sql.impl.type.QueryDataType.BIGINT;
import static com.hazelcast.sql.impl.type.QueryDataType.VARCHAR;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelJVMTest.class})
public class SortExecTest extends SqlTestSupport {
@Test
public void testSortKey() {
Object[] key = new Object[]{1, null, "foo", Long.MAX_VALUE};
SortKey sortKey = new SortKey(key, 5);
assertTrue(Arrays.equals(sortKey.getKey(), key));
assertEquals(5, sortKey.getIndex());
SortKey sortKey2 = new SortKey(key, 5);
assertTrue(sortKey.equals(sortKey2));
assertTrue(sortKey2.equals(sortKey));
assertEquals(sortKey.hashCode(), sortKey2.hashCode());
SortKey sortKey3 = new SortKey(key, 6);
assertFalse(sortKey3.equals(sortKey));
SortKey sortKey4 = new SortKey(new Object[]{1, null, "foo1", Long.MAX_VALUE}, 5);
assertFalse(sortKey4.equals(sortKey));
}
@Test
public void testSortKeyComparatorAscending() {
boolean[] ascs = new boolean[]{true};
Comparator<SortKey> comparator = new SortKeyComparator(ascs);
SortKey sortKey1 = new SortKey(new Object[]{1}, 5);
SortKey sortKey2 = new SortKey(new Object[]{3}, 5);
int cmp = comparator.compare(sortKey1, sortKey2);
assertTrue(cmp < 0);
cmp = comparator.compare(sortKey2, sortKey1);
assertTrue(cmp > 0);
cmp = comparator.compare(sortKey1, sortKey1);
assertEquals(0, cmp);
SortKey sortKey3 = new SortKey(new Object[]{3}, 6);
cmp = comparator.compare(sortKey2, sortKey3);
assertTrue(cmp < 0);
SortKey sortKey4 = new SortKey(new Object[]{null}, 7);
cmp = comparator.compare(sortKey3, sortKey4);
assertTrue(cmp > 0);
}
@Test
public void testSortKeyComparatorDescending() {
boolean[] ascs = new boolean[]{false};
Comparator<SortKey> comparator = new SortKeyComparator(ascs);
SortKey sortKey1 = new SortKey(new Object[]{1}, 5);
SortKey sortKey2 = new SortKey(new Object[]{3}, 5);
int cmp = comparator.compare(sortKey1, sortKey2);
assertTrue(cmp > 0);
cmp = comparator.compare(sortKey2, sortKey1);
assertTrue(cmp < 0);
cmp = comparator.compare(sortKey1, sortKey1);
assertEquals(0, cmp);
SortKey sortKey3 = new SortKey(new Object[]{3}, 6);
cmp = comparator.compare(sortKey2, sortKey3);
assertTrue(cmp < 0);
SortKey sortKey4 = new SortKey(new Object[]{null}, 7);
cmp = comparator.compare(sortKey3, sortKey4);
assertTrue(cmp < 0);
}
@Test
public void testSortKeyComparatorComposite() {
boolean[] ascs = new boolean[]{true, true};
Comparator<SortKey> comparator = new SortKeyComparator(ascs);
SortKey sortKey1 = new SortKey(new Object[]{1, 1}, 5);
SortKey sortKey2 = new SortKey(new Object[]{3, 1}, 5);
int cmp = comparator.compare(sortKey1, sortKey2);
assertTrue(cmp < 0);
cmp = comparator.compare(sortKey2, sortKey1);
assertTrue(cmp > 0);
cmp = comparator.compare(sortKey1, sortKey1);
assertEquals(0, cmp);
SortKey sortKey3 = new SortKey(new Object[]{1, 3}, 5);
SortKey sortKey4 = new SortKey(new Object[]{1, 5}, 5);
cmp = comparator.compare(sortKey3, sortKey4);
assertTrue(cmp < 0);
SortKey sortKey5 = new SortKey(new Object[]{null, 5}, 5);
cmp = comparator.compare(sortKey5, sortKey3);
assertTrue(cmp < 0);
SortKey sortKey6 = new SortKey(new Object[]{2, null}, 5);
cmp = comparator.compare(sortKey3, sortKey6);
assertTrue(cmp < 0);
}
@Test
public void testStripedInbox() {
UUID localMemberId = UUID.randomUUID();
List<UUID> senderMemberIds = Arrays.asList(localMemberId, UUID.randomUUID(), UUID.randomUUID());
QueryId queryId = QueryId.create(UUID.randomUUID());
LoggingQueryOperationHandler operationHandler = new LoggingQueryOperationHandler();
StripedInbox inbox = new StripedInbox(
operationHandler,
queryId,
1,
true,
1000,
localMemberId,
senderMemberIds,
new SimpleFlowControl(1_000L, 0.5d)
);
inbox.setup();
assertEquals(3, inbox.getStripeCount());
inbox.onBatch(new InboundBatch(createMonotonicBatch(0, 10), 0, false, senderMemberIds.get(0)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(0, 12), 0, false, senderMemberIds.get(1)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(0, 14), 0, false, senderMemberIds.get(2)), 100L);
InboundBatch pollBatch = inbox.poll(0);
assertNotNull(pollBatch);
assertFalse(pollBatch.isLast());
assertEquals(localMemberId, pollBatch.getSenderId());
assertEquals(10, pollBatch.getBatch().getRowCount());
pollBatch = inbox.poll(1);
assertNotNull(pollBatch);
assertFalse(pollBatch.isLast());
assertEquals(senderMemberIds.get(1), pollBatch.getSenderId());
assertEquals(12, pollBatch.getBatch().getRowCount());
pollBatch = inbox.poll(2);
assertNotNull(pollBatch);
assertFalse(pollBatch.isLast());
assertEquals(senderMemberIds.get(2), pollBatch.getSenderId());
assertEquals(14, pollBatch.getBatch().getRowCount());
inbox.onBatch(new InboundBatch(createMonotonicBatch(100, 12), 1, true, senderMemberIds.get(1)), 100L);
pollBatch = inbox.poll(1);
assertNotNull(pollBatch);
assertTrue(pollBatch.isLast());
assertEquals(senderMemberIds.get(1), pollBatch.getSenderId());
assertEquals(12, pollBatch.getBatch().getRowCount());
assertNull(inbox.poll(0));
assertNull(inbox.poll(2));
}
@Test
public void testMergeSortSources() {
UUID localMemberId = UUID.randomUUID();
List<UUID> senderMemberIds = Arrays.asList(localMemberId, UUID.randomUUID(), UUID.randomUUID());
QueryId queryId = QueryId.create(UUID.randomUUID());
LoggingQueryOperationHandler operationHandler = new LoggingQueryOperationHandler();
StripedInbox inbox = new StripedInbox(
operationHandler,
queryId,
1,
true,
1000,
localMemberId,
senderMemberIds,
new SimpleFlowControl(1_000L, 0.5d)
);
inbox.setup();
ReceiveSortMergeExec receiveSortMergeExec =
new ReceiveSortMergeExec(
1,
inbox,
new int[]{0},
new boolean[]{true},
null,
null
);
MergeSortSource[] sources = receiveSortMergeExec.getMergeSort().getSources();
assertEquals(3, sources.length);
assertFalse(sources[0].advance());
assertFalse(sources[1].advance());
assertFalse(sources[2].advance());
assertFalse(sources[0].isDone());
assertFalse(sources[1].isDone());
assertFalse(sources[2].isDone());
assertNull(sources[0].peekKey());
assertNull(sources[1].peekKey());
assertNull(sources[2].peekKey());
assertNull(sources[0].peekRow());
assertNull(sources[1].peekRow());
assertNull(sources[2].peekRow());
inbox.onBatch(new InboundBatch(createMonotonicBatch(0, 10), 0, false, senderMemberIds.get(0)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(100, 10), 0, false, senderMemberIds.get(1)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(200, 10), 0, false, senderMemberIds.get(2)), 100L);
for (int i = 0; i < 10; ++i) {
assertTrue(sources[0].advance());
assertTrue(sources[1].advance());
assertTrue(sources[2].advance());
assertFalse(sources[0].isDone());
assertFalse(sources[1].isDone());
assertFalse(sources[2].isDone());
SortKey sortKey0 = sources[0].peekKey();
SortKey sortKey1 = sources[1].peekKey();
SortKey sortKey2 = sources[2].peekKey();
assertEquals(1, sortKey0.getKey().length);
assertEquals(1, sortKey1.getKey().length);
assertEquals(1, sortKey2.getKey().length);
assertEquals(i, sortKey0.getKey()[0]);
assertEquals(100 + i, sortKey1.getKey()[0]);
assertEquals(200 + i, sortKey2.getKey()[0]);
}
assertFalse(sources[0].advance());
assertFalse(sources[1].advance());
assertFalse(sources[2].advance());
assertFalse(sources[0].isDone());
assertFalse(sources[1].isDone());
assertFalse(sources[2].isDone());
inbox.onBatch(new InboundBatch(createMonotonicBatch(10, 10), 1, true, senderMemberIds.get(0)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(110, 10), 1, true, senderMemberIds.get(1)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(210, 10), 1, true, senderMemberIds.get(2)), 100L);
for (int i = 0; i < 10; ++i) {
assertTrue(sources[0].advance());
assertTrue(sources[1].advance());
assertTrue(sources[2].advance());
if (i == 9) {
assertTrue(sources[0].isDone());
assertTrue(sources[1].isDone());
assertTrue(sources[2].isDone());
} else {
assertFalse(sources[0].isDone());
assertFalse(sources[1].isDone());
assertFalse(sources[2].isDone());
}
SortKey sortKey0 = sources[0].peekKey();
SortKey sortKey1 = sources[1].peekKey();
SortKey sortKey2 = sources[2].peekKey();
assertEquals(1, sortKey0.getKey().length);
assertEquals(1, sortKey1.getKey().length);
assertEquals(1, sortKey2.getKey().length);
assertEquals(10 + i, sortKey0.getKey()[0]);
assertEquals(110 + i, sortKey1.getKey()[0]);
assertEquals(210 + i, sortKey2.getKey()[0]);
}
assertFalse(sources[0].advance());
assertFalse(sources[1].advance());
assertFalse(sources[2].advance());
}
@Test
public void testMergeSort() {
UUID localMemberId = UUID.randomUUID();
List<UUID> senderMemberIds = Arrays.asList(localMemberId, UUID.randomUUID(), UUID.randomUUID());
QueryId queryId = QueryId.create(UUID.randomUUID());
LoggingQueryOperationHandler operationHandler = new LoggingQueryOperationHandler();
StripedInbox inbox = new StripedInbox(
operationHandler,
queryId,
1,
true,
1000,
localMemberId,
senderMemberIds,
new SimpleFlowControl(1_000L, 0.5d)
);
inbox.setup();
ReceiveSortMergeExec receiveSortMergeExec =
new ReceiveSortMergeExec(
1,
inbox,
new int[]{0},
new boolean[]{true},
null,
null
);
receiveSortMergeExec.setup(emptyFragmentContext());
MergeSort mergeSort = receiveSortMergeExec.getMergeSort();
assertEquals(3, mergeSort.getSources().length);
assertFalse(mergeSort.isDone());
List<Row> batch = mergeSort.nextBatch();
assertNull(batch);
inbox.onBatch(new InboundBatch(createMonotonicBatch(0, 10), 0, false, senderMemberIds.get(0)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(20, 10), 0, false, senderMemberIds.get(1)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(30, 10), 0, false, senderMemberIds.get(2)), 100L);
assertFalse(mergeSort.isDone());
batch = mergeSort.nextBatch();
assertEquals(10, batch.size());
assertBatch(batch, 0, 9, true);
assertFalse(mergeSort.isDone());
assertNull(mergeSort.nextBatch());
inbox.onBatch(new InboundBatch(createMonotonicBatch(20, 10), 1, true, senderMemberIds.get(0)), 100L);
batch = mergeSort.nextBatch();
assertEquals(20, batch.size());
assertBatch(batch, 20, 29, false);
assertFalse(mergeSort.isDone());
assertNull(mergeSort.nextBatch());
inbox.onBatch(new InboundBatch(createMonotonicBatch(30, 20), 1, true, senderMemberIds.get(1)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(40, 10), 1, false, senderMemberIds.get(2)), 100L);
batch = mergeSort.nextBatch();
assertEquals(40, batch.size());
assertBatch(batch, 30, 49, false);
assertFalse(mergeSort.isDone());
assertNull(mergeSort.nextBatch());
inbox.onBatch(new InboundBatch(createMonotonicBatch(60, 10), 2, true, senderMemberIds.get(2)), 100L);
batch = mergeSort.nextBatch();
assertEquals(10, batch.size());
assertBatch(batch, 60, 69, false);
assertTrue(mergeSort.isDone());
assertNull(mergeSort.nextBatch());
}
@Test
public void testReceiveSortMergeExec() {
UUID localMemberId = UUID.randomUUID();
List<UUID> senderMemberIds = Arrays.asList(localMemberId, UUID.randomUUID(), UUID.randomUUID());
QueryId queryId = QueryId.create(UUID.randomUUID());
LoggingQueryOperationHandler operationHandler = new LoggingQueryOperationHandler();
StripedInbox inbox = new StripedInbox(
operationHandler,
queryId,
1,
true,
1000,
localMemberId,
senderMemberIds,
new SimpleFlowControl(1_000L, 0.5d)
);
inbox.setup();
ReceiveSortMergeExec exec =
new ReceiveSortMergeExec(
1,
inbox,
new int[]{0},
new boolean[]{true},
null,
null
);
exec.setup(emptyFragmentContext());
assertEquals(IterationResult.WAIT, exec.advance());
inbox.onBatch(new InboundBatch(createMonotonicBatch(7, 1), 0, false, senderMemberIds.get(0)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(3, 1), 0, false, senderMemberIds.get(1)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(5, 1), 0, false, senderMemberIds.get(2)), 100L);
assertEquals(IterationResult.FETCHED, exec.advance());
assertBatch(exec.currentBatch(), 1, 3, 3, true);
assertEquals(IterationResult.WAIT, exec.advance());
inbox.onBatch(new InboundBatch(createMonotonicBatch(9, 1), 1, false, senderMemberIds.get(1)), 100L);
assertEquals(IterationResult.FETCHED, exec.advance());
assertBatch(exec.currentBatch(), 1, 5, 5, true);
assertEquals(IterationResult.WAIT, exec.advance());
inbox.onBatch(new InboundBatch(createMonotonicBatch(11, 2), 1, false, senderMemberIds.get(2)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(13, 3), 2, false, senderMemberIds.get(1)), 100L);
assertEquals(IterationResult.FETCHED, exec.advance());
assertBatch(exec.currentBatch(), 1, 7, 7, true);
inbox.onBatch(new InboundBatch(createMonotonicBatch(15, 4), 1, true, senderMemberIds.get(0)), 100L);
assertEquals(IterationResult.FETCHED, exec.advance());
assertBatch(exec.currentBatch(), 3, 9, 12, true);
inbox.onBatch(new InboundBatch(createMonotonicBatch(0, 0), 2, true, senderMemberIds.get(2)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(21, 7), 3, true, senderMemberIds.get(1)), 100L);
assertEquals(IterationResult.FETCHED_DONE, exec.advance());
assertBatch(exec.currentBatch(), 14, 13, 27, false);
}
@Test
public void testLimitOffset() {
Fetch fetchProcessor = newFetch(2L, 5L);
RowBatch batch = fetchProcessor.apply(createMonotonicBatch(0, 10));
assertBatch(batch, 5, 2, 6, true);
fetchProcessor = newFetch(2L, 5L);
batch = fetchProcessor.apply(createMonotonicBatch(0, 0));
assertBatch(batch, 0, 0, 0, true);
fetchProcessor = newFetch(2L, 5L);
batch = fetchProcessor.apply(createMonotonicBatch(0, 1));
assertBatch(batch, 0, 0, 0, true);
fetchProcessor = newFetch(2L, 5L);
batch = fetchProcessor.apply(createMonotonicBatch(0, 4));
assertBatch(batch, 2, 2, 3, true);
fetchProcessor = newFetch(5L, 10L);
batch = fetchProcessor.apply(createMonotonicBatch(0, 12));
assertBatch(batch, 7, 5, 11, true);
}
@Test
public void testOffsetOnly() {
Fetch fetchProcessor = newFetch(2L, null);
RowBatch batch = fetchProcessor.apply(createMonotonicBatch(0, 12));
assertBatch(batch, 10, 2, 11, true);
fetchProcessor = newFetch(10L, null);
batch = fetchProcessor.apply(createMonotonicBatch(0, 5));
assertBatch(batch, 0, 0, 0, true);
fetchProcessor = newFetch(2L, null);
batch = fetchProcessor.apply(createMonotonicBatch(0, 20));
assertBatch(batch, 18, 2, 19, true);
}
@Test
public void testLimitOnly() {
Fetch fetchProcessor = newFetch(null, 10L);
RowBatch batch = fetchProcessor.apply(createMonotonicBatch(0, 12));
assertBatch(batch, 10, 0, 9, true);
fetchProcessor = newFetch(null, 10L);
batch = fetchProcessor.apply(createMonotonicBatch(5, 10));
assertBatch(batch, 10, 5, 14, true);
fetchProcessor = newFetch(null, 0L);
batch = fetchProcessor.apply(createMonotonicBatch(5, 10));
assertBatch(batch, 0, 0, 0, true);
}
@Test
public void testLimitOfsetInvalid() {
assertThrows(AssertionError.class, () -> newFetch(-1L, 10L));
assertThrows(AssertionError.class, () -> newFetch(1L, -10L));
assertThrows(AssertionError.class, () -> newFetch(-1L, -10L));
}
@Test
public void testFetchExec() {
UpstreamExec upstream = new UpstreamExec(1);
Expression<?> limit = ConstantExpression.create(10, BIGINT);
Expression<?> offset = ConstantExpression.create(5, BIGINT);
FetchExec exec = new FetchExec(2, upstream, limit, offset);
exec.setup(emptyFragmentContext());
// Test empty state.
assertEquals(IterationResult.WAIT, exec.advance());
upstream.addResult(IterationResult.FETCHED, EmptyRowBatch.INSTANCE);
assertEquals(IterationResult.WAIT, exec.advance());
// Consume several batches, still insufficient to produce a result.
upstream.addResult(IterationResult.FETCHED, createMonotonicBatch(0, 2));
upstream.addResult(IterationResult.FETCHED, createMonotonicBatch(2, 2));
assertEquals(IterationResult.WAIT, exec.advance());
// One more batch, finally producing some rows.
upstream.addResult(IterationResult.FETCHED, createMonotonicBatch(4, 5));
assertEquals(IterationResult.FETCHED, exec.advance());
assertBatch(exec.currentBatch(), 4, 5, 8, true);
// One more batch
upstream.addResult(IterationResult.FETCHED, createMonotonicBatch(9, 2));
assertEquals(IterationResult.FETCHED, exec.advance());
assertBatch(exec.currentBatch(), 2, 9, 10, true);
// Final batch to finalize the result
upstream.addResult(IterationResult.FETCHED, createMonotonicBatch(11, 20));
assertEquals(IterationResult.FETCHED_DONE, exec.advance());
assertBatch(exec.currentBatch(), 4, 11, 14, true);
}
@Test
public void testFetchExecInvalid() {
UpstreamExec upstream = new UpstreamExec(1);
Expression<?> limit = ConstantExpression.create(null, BIGINT);
Expression<?> offset = ConstantExpression.create(5, BIGINT);
FetchExec exec = new FetchExec(2, upstream, limit, offset);
assertThrows(AssertionError.class, () -> exec.setup(emptyFragmentContext()));
limit = ConstantExpression.create(5, BIGINT);
offset = ConstantExpression.create(null, BIGINT);
FetchExec exec2 = new FetchExec(2, upstream, limit, offset);
assertThrows(AssertionError.class, () -> exec2.setup(emptyFragmentContext()));
limit = ConstantExpression.create(5, BIGINT);
offset = ConstantExpression.create("foo", VARCHAR);
FetchExec exec3 = new FetchExec(2, upstream, limit, offset);
assertThrows(AssertionError.class, () -> exec3.setup(emptyFragmentContext()));
}
@Test
public void testReceiveSortMergeExecWithFetchAndOffset() {
UUID localMemberId = UUID.randomUUID();
List<UUID> senderMemberIds = Arrays.asList(localMemberId, UUID.randomUUID(), UUID.randomUUID());
QueryId queryId = QueryId.create(UUID.randomUUID());
LoggingQueryOperationHandler operationHandler = new LoggingQueryOperationHandler();
StripedInbox inbox = new StripedInbox(
operationHandler,
queryId,
1,
true,
1000,
localMemberId,
senderMemberIds,
new SimpleFlowControl(1_000L, 0.5d)
);
inbox.setup();
Expression<?> fetch = ConstantExpression.create(10, BIGINT);
Expression<?> offset = ConstantExpression.create(2, BIGINT);
ReceiveSortMergeExec exec =
new ReceiveSortMergeExec(
1,
inbox,
new int[]{0},
new boolean[]{true},
fetch,
offset
);
exec.setup(emptyFragmentContext());
assertEquals(IterationResult.WAIT, exec.advance());
// Consume several batches, still insufficient to produce a result.
inbox.onBatch(new InboundBatch(createMonotonicBatch(7, 2), 0, false, senderMemberIds.get(0)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(3, 2), 0, false, senderMemberIds.get(1)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(5, 2), 0, false, senderMemberIds.get(2)), 100L);
assertEquals(IterationResult.WAIT, exec.advance());
// Consume more batches
inbox.onBatch(new InboundBatch(createMonotonicBatch(13, 2), 1, false, senderMemberIds.get(0)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(11, 2), 1, false, senderMemberIds.get(1)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(9, 2), 1, false, senderMemberIds.get(2)), 100L);
assertEquals(IterationResult.FETCHED, exec.advance());
assertBatch(exec.currentBatch(), 6, 5, 10, true);
// Consume more batches
inbox.onBatch(new InboundBatch(createMonotonicBatch(15, 2), 2, false, senderMemberIds.get(0)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(19, 2), 2, false, senderMemberIds.get(1)), 100L);
inbox.onBatch(new InboundBatch(createMonotonicBatch(17, 2), 2, false, senderMemberIds.get(2)), 100L);
assertEquals(IterationResult.FETCHED_DONE, exec.advance());
assertBatch(exec.currentBatch(), 4, 11, 14, true);
}
private Fetch newFetch(Long offset, Long limit) {
Expression<?> limitExpr = limit == null ? null : ConstantExpression.create(limit, BIGINT);
Expression<?> offsetExpr = offset == null ? null : ConstantExpression.create(offset, BIGINT);
Fetch fetchProcessor = new Fetch(limitExpr, offsetExpr);
ExpressionEvalContext evalContext = SimpleExpressionEvalContext.create();
fetchProcessor.setup(evalContext);
return fetchProcessor;
}
private void assertBatch(List<Row> batch, int low, int high, boolean less) {
int actualLow = batch.get(0).get(0);
int actualHigh = batch.get(batch.size() - 1).get(0);
assertEquals(low, actualLow);
assertEquals(high, actualHigh);
Integer prev = null;
for (int i = 0; i < batch.size(); ++i) {
if (prev == null) {
assertEquals(low, actualLow);
prev = batch.get(i).get(0);
} else {
int actual = batch.get(i).get(0);
int cmp = Integer.compare(prev, actual);
if (less) {
assertTrue(cmp < 0);
} else {
assertTrue(cmp <= 0);
}
prev = actual;
}
}
}
private void assertBatch(RowBatch batch, int expectedCount, int low, int high, boolean less) {
assertEquals(expectedCount, batch.getRowCount());
if (batch.getRowCount() == 0) {
return;
}
int actualLow = batch.getRow(0).get(0);
int actualHigh = batch.getRow(batch.getRowCount() - 1).get(0);
assertEquals(low, actualLow);
assertEquals(high, actualHigh);
Integer prev = null;
for (int i = 0; i < batch.getRowCount(); ++i) {
if (prev == null) {
assertEquals(low, actualLow);
prev = batch.getRow(i).get(0);
} else {
int actual = batch.getRow(i).get(0);
int cmp = Integer.compare(prev, actual);
if (less) {
assertTrue(cmp < 0);
} else {
assertTrue(cmp <= 0);
}
prev = actual;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.appender;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.RandomAccessFile;
import java.io.Serializable;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import org.apache.logging.log4j.core.Layout;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.util.FileUtils;
import org.apache.logging.log4j.core.util.NullOutputStream;
/**
* Extends OutputStreamManager but instead of using a buffered output stream,
* this class uses a {@code ByteBuffer} and a {@code RandomAccessFile} to do the
* I/O.
*/
public class RandomAccessFileManager extends OutputStreamManager {
static final int DEFAULT_BUFFER_SIZE = 256 * 1024;
private static final RandomAccessFileManagerFactory FACTORY = new RandomAccessFileManagerFactory();
private final String advertiseURI;
private final RandomAccessFile randomAccessFile;
private final ThreadLocal<Boolean> isEndOfBatch = new ThreadLocal<>();
protected RandomAccessFileManager(final LoggerContext loggerContext, final RandomAccessFile file, final String fileName,
final OutputStream os, final int bufferSize, final String advertiseURI,
final Layout<? extends Serializable> layout, final boolean writeHeader) {
super(loggerContext, os, fileName, false, layout, writeHeader, ByteBuffer.wrap(new byte[bufferSize]));
this.randomAccessFile = file;
this.advertiseURI = advertiseURI;
this.isEndOfBatch.set(Boolean.FALSE);
}
/**
* Returns the RandomAccessFileManager.
*
* @param fileName The name of the file to manage.
* @param append true if the file should be appended to, false if it should
* be overwritten.
* @param isFlush true if the contents should be flushed to disk on every
* write
* @param bufferSize The buffer size.
* @param advertiseURI the URI to use when advertising the file
* @param layout The layout.
* @param configuration The configuration.
* @return A RandomAccessFileManager for the File.
*/
public static RandomAccessFileManager getFileManager(final String fileName, final boolean append,
final boolean isFlush, final int bufferSize, final String advertiseURI,
final Layout<? extends Serializable> layout, final Configuration configuration) {
return narrow(RandomAccessFileManager.class, getManager(fileName, new FactoryData(append,
isFlush, bufferSize, advertiseURI, layout, configuration), FACTORY));
}
public Boolean isEndOfBatch() {
return isEndOfBatch.get();
}
public void setEndOfBatch(final boolean endOfBatch) {
this.isEndOfBatch.set(Boolean.valueOf(endOfBatch));
}
@Override
protected void writeToDestination(final byte[] bytes, final int offset, final int length) {
try {
randomAccessFile.write(bytes, offset, length);
} catch (final IOException ex) {
final String msg = "Error writing to RandomAccessFile " + getName();
throw new AppenderLoggingException(msg, ex);
}
}
@Override
public synchronized void flush() {
flushBuffer(byteBuffer);
}
@Override
public synchronized boolean closeOutputStream() {
flush();
try {
randomAccessFile.close();
return true;
} catch (final IOException ex) {
logError("Unable to close RandomAccessFile", ex);
return false;
}
}
/**
* Returns the name of the File being managed.
*
* @return The name of the File being managed.
*/
public String getFileName() {
return getName();
}
/**
* Returns the buffer capacity.
* @return the buffer size
*/
public int getBufferSize() {
return byteBuffer.capacity();
}
/**
* Gets this FileManager's content format specified by:
* <p>
* Key: "fileURI" Value: provided "advertiseURI" param.
* </p>
*
* @return Map of content format keys supporting FileManager
*/
@Override
public Map<String, String> getContentFormat() {
final Map<String, String> result = new HashMap<>(
super.getContentFormat());
result.put("fileURI", advertiseURI);
return result;
}
/**
* Factory Data.
*/
private static class FactoryData extends ConfigurationFactoryData {
private final boolean append;
private final boolean immediateFlush;
private final int bufferSize;
private final String advertiseURI;
private final Layout<? extends Serializable> layout;
/**
* Constructor.
*
* @param append Append status.
* @param bufferSize size of the buffer
* @param configuration The configuration.
*/
public FactoryData(final boolean append, final boolean immediateFlush, final int bufferSize,
final String advertiseURI, final Layout<? extends Serializable> layout, final Configuration configuration) {
super(configuration);
this.append = append;
this.immediateFlush = immediateFlush;
this.bufferSize = bufferSize;
this.advertiseURI = advertiseURI;
this.layout = layout;
}
}
/**
* Factory to create a RandomAccessFileManager.
*/
private static class RandomAccessFileManagerFactory implements
ManagerFactory<RandomAccessFileManager, FactoryData> {
/**
* Create a RandomAccessFileManager.
*
* @param name The name of the File.
* @param data The FactoryData
* @return The RandomAccessFileManager for the File.
*/
@Override
public RandomAccessFileManager createManager(final String name, final FactoryData data) {
final File file = new File(name);
if (!data.append) {
file.delete();
}
final boolean writeHeader = !data.append || !file.exists();
final OutputStream os = NullOutputStream.getInstance();
RandomAccessFile raf;
try {
FileUtils.makeParentDirs(file);
raf = new RandomAccessFile(name, "rw");
if (data.append) {
raf.seek(raf.length());
} else {
raf.setLength(0);
}
return new RandomAccessFileManager(data.getLoggerContext(), raf, name,
os, data.bufferSize, data.advertiseURI, data.layout, writeHeader);
} catch (final Exception ex) {
LOGGER.error("RandomAccessFileManager (" + name + ") " + ex, ex);
}
return null;
}
}
}
| |
/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.hyracks.examples.btree.client;
import org.kohsuke.args4j.CmdLineParser;
import org.kohsuke.args4j.Option;
import edu.uci.ics.hyracks.api.client.HyracksConnection;
import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
import edu.uci.ics.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will insert tuples into the primary and secondary index using an insert pipeline
public class InsertPipelineExample {
private static class Options {
@Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
public String host;
@Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
public int port = 1098;
@Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
public String ncs;
@Option(name = "-num-tuples", usage = "Total number of tuples to to be generated for insertion", required = true)
public int numTuples;
@Option(name = "-primary-btreename", usage = "B-Tree file name of primary index", required = true)
public String primaryBTreeName;
@Option(name = "-secondary-btreename", usage = "B-Tree file name of secondary index", required = true)
public String secondaryBTreeName;
@Option(name = "-frame-size", usage = "Hyracks frame size (default: 32768)", required = false)
public int frameSize = 32768;
}
public static void main(String[] args) throws Exception {
Options options = new Options();
CmdLineParser parser = new CmdLineParser(options);
parser.parseArgument(args);
IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
JobSpecification job = createJob(options);
long start = System.currentTimeMillis();
JobId jobId = hcc.startJob(job);
hcc.waitForCompletion(jobId);
long end = System.currentTimeMillis();
System.err.println(start + " " + end + " " + (end - start));
}
private static JobSpecification createJob(Options options) {
JobSpecification spec = new JobSpecification(options.frameSize);
String[] splitNCs = options.ncs.split(",");
// schema of tuples to be generated: 4 fields with int, string, string,
// string
// we will use field 2 as primary key to fill a clustered index
RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
UTF8StringSerializerDeserializer.INSTANCE, // this field will
// not go into B-Tree
UTF8StringSerializerDeserializer.INSTANCE, // we will use this
// as payload
IntegerSerializerDeserializer.INSTANCE, // we will use this
// field as key
IntegerSerializerDeserializer.INSTANCE, // we will use this as
// payload
UTF8StringSerializerDeserializer.INSTANCE // we will use this as
// payload
});
// generate numRecords records with field 2 being unique, integer values
// in [0, 100000], and strings with max length of 10 characters, and
// random seed 100
DataGenOperatorDescriptor dataGen = new DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0,
100000, 10, 100);
// run data generator on first nodecontroller given
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dataGen, splitNCs[0]);
IIndexLifecycleManagerProvider lcManagerProvider = IndexLifecycleManagerProvider.INSTANCE;
IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
// prepare insertion into primary index
// tuples to be put into B-Tree shall have 4 fields
int primaryFieldCount = 4;
ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
primaryTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
primaryTypeTraits[2] = IntegerPointable.TYPE_TRAITS;
primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
// comparator factories for primary index
IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[1];
primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
// the B-Tree expects its keyfields to be at the front of its input
// tuple
int[] primaryFieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input
// tuple to field 0 of
// B-Tree tuple, etc.
IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory(true);
// create operator descriptor
TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, recDesc, storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits,
primaryComparatorFactories, null, primaryFieldPermutation, IndexOperation.INSERT,
dataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
JobHelper.createPartitionConstraint(spec, primaryInsert, splitNCs);
// prepare insertion into secondary index
// tuples to be put into B-Tree shall have 2 fields
int secondaryFieldCount = 2;
ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
secondaryTypeTraits[1] = IntegerPointable.TYPE_TRAITS;
// comparator factories for secondary index
IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[2];
secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
// the B-Tree expects its keyfields to be at the front of its input
// tuple
int[] secondaryFieldPermutation = { 1, 2 };
IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
options.secondaryBTreeName);
// create operator descriptor
TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, recDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
secondaryComparatorFactories, null, secondaryFieldPermutation, IndexOperation.INSERT,
dataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
JobHelper.createPartitionConstraint(spec, secondaryInsert, splitNCs);
// end the insert pipeline at this sink operator
NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
JobHelper.createPartitionConstraint(spec, nullSink, splitNCs);
// distribute the records from the datagen via hashing to the bulk load
// ops
IBinaryHashFunctionFactory[] hashFactories = new IBinaryHashFunctionFactory[1];
hashFactories[0] = PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY);
IConnectorDescriptor hashConn = new MToNPartitioningConnectorDescriptor(spec,
new FieldHashPartitionComputerFactory(new int[] { 0 }, hashFactories));
// connect the ops
spec.connect(hashConn, dataGen, 0, primaryInsert, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), primaryInsert, 0, secondaryInsert, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), secondaryInsert, 0, nullSink, 0);
spec.addRoot(nullSink);
return spec;
}
}
| |
/*
* Copyright (c) 2005, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.awt;
import java.awt.Point;
import java.awt.Toolkit;
import java.awt.GraphicsEnvironment;
import java.awt.event.*;
import java.awt.AWTEvent;
import java.awt.AWTEventMulticaster;
import java.awt.EventQueue;
import java.awt.PopupMenu;
import java.awt.Image;
import java.util.EventListener;
import java.awt.peer.TrayIconPeer;
import sun.awt.AppContext;
import sun.awt.SunToolkit;
import sun.awt.HeadlessToolkit;
import java.util.EventObject;
import java.security.AccessControlContext;
import java.security.AccessController;
/**
* A <code>TrayIcon</code> object represents a tray icon that can be
* added to the {@link SystemTray system tray}. A
* <code>TrayIcon</code> can have a tooltip (text), an image, a popup
* menu, and a set of listeners associated with it.
*
* <p>A <code>TrayIcon</code> can generate various {@link MouseEvent
* MouseEvents} and supports adding corresponding listeners to receive
* notification of these events. <code>TrayIcon</code> processes some
* of the events by itself. For example, by default, when the
* right-mouse click is performed on the <code>TrayIcon</code> it
* displays the specified popup menu. When the mouse hovers
* over the <code>TrayIcon</code> the tooltip is displayed.
*
* <p><strong>Note:</strong> When the <code>MouseEvent</code> is
* dispatched to its registered listeners its <code>component</code>
* property will be set to <code>null</code>. (See {@link
* java.awt.event.ComponentEvent#getComponent}) The
* <code>source</code> property will be set to this
* <code>TrayIcon</code>. (See {@link
* java.util.EventObject#getSource})
*
* <p><b>Note:</b> A well-behaved {@link TrayIcon} implementation
* will assign different gestures to showing a popup menu and
* selecting a tray icon.
*
* <p>A <code>TrayIcon</code> can generate an {@link ActionEvent
* ActionEvent}. On some platforms, this occurs when the user selects
* the tray icon using either the mouse or keyboard.
*
* <p>If a SecurityManager is installed, the AWTPermission
* {@code accessSystemTray} must be granted in order to create
* a {@code TrayIcon}. Otherwise the constructor will throw a
* SecurityException.
*
* <p> See the {@link SystemTray} class overview for an example on how
* to use the <code>TrayIcon</code> API.
*
* @since 1.6
* @see SystemTray#add
* @see java.awt.event.ComponentEvent#getComponent
* @see java.util.EventObject#getSource
*
* @author Bino George
* @author Denis Mikhalkin
* @author Sharon Zakhour
* @author Anton Tarasov
*/
public class TrayIcon {
private Image image;
private String tooltip;
private PopupMenu popup;
private boolean autosize;
private int id;
private String actionCommand;
transient private TrayIconPeer peer;
transient MouseListener mouseListener;
transient MouseMotionListener mouseMotionListener;
transient ActionListener actionListener;
/*
* The tray icon's AccessControlContext.
*
* Unlike the acc in Component, this field is made final
* because TrayIcon is not serializable.
*/
private final AccessControlContext acc = AccessController.getContext();
/*
* Returns the acc this tray icon was constructed with.
*/
final AccessControlContext getAccessControlContext() {
if (acc == null) {
throw new SecurityException("TrayIcon is missing AccessControlContext");
}
return acc;
}
static {
Toolkit.loadLibraries();
if (!GraphicsEnvironment.isHeadless()) {
initIDs();
}
}
private TrayIcon()
throws UnsupportedOperationException, HeadlessException, SecurityException
{
SystemTray.checkSystemTrayAllowed();
if (GraphicsEnvironment.isHeadless()) {
throw new HeadlessException();
}
if (!SystemTray.isSupported()) {
throw new UnsupportedOperationException();
}
SunToolkit.insertTargetMapping(this, AppContext.getAppContext());
}
/**
* Creates a <code>TrayIcon</code> with the specified image.
*
* @param image the <code>Image</code> to be used
* @throws IllegalArgumentException if <code>image</code> is
* <code>null</code>
* @throws UnsupportedOperationException if the system tray isn't
* supported by the current platform
* @throws HeadlessException if
* {@code GraphicsEnvironment.isHeadless()} returns {@code true}
* @throws SecurityException if {@code accessSystemTray} permission
* is not granted
* @see SystemTray#add(TrayIcon)
* @see TrayIcon#TrayIcon(Image, String, PopupMenu)
* @see TrayIcon#TrayIcon(Image, String)
* @see SecurityManager#checkPermission
* @see AWTPermission
*/
public TrayIcon(Image image) {
this();
if (image == null) {
throw new IllegalArgumentException("creating TrayIcon with null Image");
}
setImage(image);
}
/**
* Creates a <code>TrayIcon</code> with the specified image and
* tooltip text.
*
* @param image the <code>Image</code> to be used
* @param tooltip the string to be used as tooltip text; if the
* value is <code>null</code> no tooltip is shown
* @throws IllegalArgumentException if <code>image</code> is
* <code>null</code>
* @throws UnsupportedOperationException if the system tray isn't
* supported by the current platform
* @throws HeadlessException if
* {@code GraphicsEnvironment.isHeadless()} returns {@code true}
* @throws SecurityException if {@code accessSystemTray} permission
* is not granted
* @see SystemTray#add(TrayIcon)
* @see TrayIcon#TrayIcon(Image)
* @see TrayIcon#TrayIcon(Image, String, PopupMenu)
* @see SecurityManager#checkPermission
* @see AWTPermission
*/
public TrayIcon(Image image, String tooltip) {
this(image);
setToolTip(tooltip);
}
/**
* Creates a <code>TrayIcon</code> with the specified image,
* tooltip and popup menu.
*
* @param image the <code>Image</code> to be used
* @param tooltip the string to be used as tooltip text; if the
* value is <code>null</code> no tooltip is shown
* @param popup the menu to be used for the tray icon's popup
* menu; if the value is <code>null</code> no popup menu is shown
* @throws IllegalArgumentException if <code>image</code> is <code>null</code>
* @throws UnsupportedOperationException if the system tray isn't
* supported by the current platform
* @throws HeadlessException if
* {@code GraphicsEnvironment.isHeadless()} returns {@code true}
* @throws SecurityException if {@code accessSystemTray} permission
* is not granted
* @see SystemTray#add(TrayIcon)
* @see TrayIcon#TrayIcon(Image, String)
* @see TrayIcon#TrayIcon(Image)
* @see PopupMenu
* @see MouseListener
* @see #addMouseListener(MouseListener)
* @see SecurityManager#checkPermission
* @see AWTPermission
*/
public TrayIcon(Image image, String tooltip, PopupMenu popup) {
this(image, tooltip);
setPopupMenu(popup);
}
/**
* Sets the image for this <code>TrayIcon</code>. The previous
* tray icon image is discarded without calling the {@link
* java.awt.Image#flush} method — you will need to call it
* manually.
*
* <p> If the image represents an animated image, it will be
* animated automatically.
*
* <p> See the {@link #setImageAutoSize(boolean)} property for
* details on the size of the displayed image.
*
* <p> Calling this method with the same image that is currently
* being used has no effect.
*
* @throws NullPointerException if <code>image</code> is <code>null</code>
* @param image the non-null <code>Image</code> to be used
* @see #getImage
* @see Image
* @see SystemTray#add(TrayIcon)
* @see TrayIcon#TrayIcon(Image, String)
*/
public void setImage(Image image) {
if (image == null) {
throw new NullPointerException("setting null Image");
}
this.image = image;
TrayIconPeer peer = this.peer;
if (peer != null) {
peer.updateImage();
}
}
/**
* Returns the current image used for this <code>TrayIcon</code>.
*
* @return the image
* @see #setImage(Image)
* @see Image
*/
public Image getImage() {
return image;
}
/**
* Sets the popup menu for this <code>TrayIcon</code>. If
* <code>popup</code> is <code>null</code>, no popup menu will be
* associated with this <code>TrayIcon</code>.
*
* <p>Note that this <code>popup</code> must not be added to any
* parent before or after it is set on the tray icon. If you add
* it to some parent, the <code>popup</code> may be removed from
* that parent.
*
* <p>The {@code popup} can be set on one {@code TrayIcon} only.
* Setting the same popup on multiple {@code TrayIcon}s will cause
* an {@code IllegalArgumentException}.
*
* <p><strong>Note:</strong> Some platforms may not support
* showing the user-specified popup menu component when the user
* right-clicks the tray icon. In this situation, either no menu
* will be displayed or, on some systems, a native version of the
* menu may be displayed.
*
* @throws IllegalArgumentException if the {@code popup} is already
* set for another {@code TrayIcon}
* @param popup a <code>PopupMenu</code> or <code>null</code> to
* remove any popup menu
* @see #getPopupMenu
*/
public void setPopupMenu(PopupMenu popup) {
if (popup == this.popup) {
return;
}
synchronized (TrayIcon.class) {
if (popup != null) {
if (popup.isTrayIconPopup) {
throw new IllegalArgumentException("the PopupMenu is already set for another TrayIcon");
}
popup.isTrayIconPopup = true;
}
if (this.popup != null) {
this.popup.isTrayIconPopup = false;
}
this.popup = popup;
}
}
/**
* Returns the popup menu associated with this <code>TrayIcon</code>.
*
* @return the popup menu or <code>null</code> if none exists
* @see #setPopupMenu(PopupMenu)
*/
public PopupMenu getPopupMenu() {
return popup;
}
/**
* Sets the tooltip string for this <code>TrayIcon</code>. The
* tooltip is displayed automatically when the mouse hovers over
* the icon. Setting the tooltip to <code>null</code> removes any
* tooltip text.
*
* When displayed, the tooltip string may be truncated on some platforms;
* the number of characters that may be displayed is platform-dependent.
*
* @param tooltip the string for the tooltip; if the value is
* <code>null</code> no tooltip is shown
* @see #getToolTip
*/
public void setToolTip(String tooltip) {
this.tooltip = tooltip;
TrayIconPeer peer = this.peer;
if (peer != null) {
peer.setToolTip(tooltip);
}
}
/**
* Returns the tooltip string associated with this
* <code>TrayIcon</code>.
*
* @return the tooltip string or <code>null</code> if none exists
* @see #setToolTip(String)
*/
public String getToolTip() {
return tooltip;
}
/**
* Sets the auto-size property. Auto-size determines whether the
* tray image is automatically sized to fit the space allocated
* for the image on the tray. By default, the auto-size property
* is set to <code>false</code>.
*
* <p> If auto-size is <code>false</code>, and the image size
* doesn't match the tray icon space, the image is painted as-is
* inside that space — if larger than the allocated space, it will
* be cropped.
*
* <p> If auto-size is <code>true</code>, the image is stretched or shrunk to
* fit the tray icon space.
*
* @param autosize <code>true</code> to auto-size the image,
* <code>false</code> otherwise
* @see #isImageAutoSize
*/
public void setImageAutoSize(boolean autosize) {
this.autosize = autosize;
TrayIconPeer peer = this.peer;
if (peer != null) {
peer.updateImage();
}
}
/**
* Returns the value of the auto-size property.
*
* @return <code>true</code> if the image will be auto-sized,
* <code>false</code> otherwise
* @see #setImageAutoSize(boolean)
*/
public boolean isImageAutoSize() {
return autosize;
}
/**
* Adds the specified mouse listener to receive mouse events from
* this <code>TrayIcon</code>. Calling this method with a
* <code>null</code> value has no effect.
*
* <p><b>Note</b>: The {@code MouseEvent}'s coordinates (received
* from the {@code TrayIcon}) are relative to the screen, not the
* {@code TrayIcon}.
*
* <p> <b>Note: </b>The <code>MOUSE_ENTERED</code> and
* <code>MOUSE_EXITED</code> mouse events are not supported.
* <p>Refer to <a href="doc-files/AWTThreadIssues.html#ListenersThreads"
* >AWT Threading Issues</a> for details on AWT's threading model.
*
* @param listener the mouse listener
* @see java.awt.event.MouseEvent
* @see java.awt.event.MouseListener
* @see #removeMouseListener(MouseListener)
* @see #getMouseListeners
*/
public synchronized void addMouseListener(MouseListener listener) {
if (listener == null) {
return;
}
mouseListener = AWTEventMulticaster.add(mouseListener, listener);
}
/**
* Removes the specified mouse listener. Calling this method with
* <code>null</code> or an invalid value has no effect.
* <p>Refer to <a href="doc-files/AWTThreadIssues.html#ListenersThreads"
* >AWT Threading Issues</a> for details on AWT's threading model.
*
* @param listener the mouse listener
* @see java.awt.event.MouseEvent
* @see java.awt.event.MouseListener
* @see #addMouseListener(MouseListener)
* @see #getMouseListeners
*/
public synchronized void removeMouseListener(MouseListener listener) {
if (listener == null) {
return;
}
mouseListener = AWTEventMulticaster.remove(mouseListener, listener);
}
/**
* Returns an array of all the mouse listeners
* registered on this <code>TrayIcon</code>.
*
* @return all of the <code>MouseListeners</code> registered on
* this <code>TrayIcon</code> or an empty array if no mouse
* listeners are currently registered
*
* @see #addMouseListener(MouseListener)
* @see #removeMouseListener(MouseListener)
* @see java.awt.event.MouseListener
*/
public synchronized MouseListener[] getMouseListeners() {
return AWTEventMulticaster.getListeners(mouseListener, MouseListener.class);
}
/**
* Adds the specified mouse listener to receive mouse-motion
* events from this <code>TrayIcon</code>. Calling this method
* with a <code>null</code> value has no effect.
*
* <p><b>Note</b>: The {@code MouseEvent}'s coordinates (received
* from the {@code TrayIcon}) are relative to the screen, not the
* {@code TrayIcon}.
*
* <p> <b>Note: </b>The <code>MOUSE_DRAGGED</code> mouse event is not supported.
* <p>Refer to <a href="doc-files/AWTThreadIssues.html#ListenersThreads"
* >AWT Threading Issues</a> for details on AWT's threading model.
*
* @param listener the mouse listener
* @see java.awt.event.MouseEvent
* @see java.awt.event.MouseMotionListener
* @see #removeMouseMotionListener(MouseMotionListener)
* @see #getMouseMotionListeners
*/
public synchronized void addMouseMotionListener(MouseMotionListener listener) {
if (listener == null) {
return;
}
mouseMotionListener = AWTEventMulticaster.add(mouseMotionListener, listener);
}
/**
* Removes the specified mouse-motion listener. Calling this method with
* <code>null</code> or an invalid value has no effect.
* <p>Refer to <a href="doc-files/AWTThreadIssues.html#ListenersThreads"
* >AWT Threading Issues</a> for details on AWT's threading model.
*
* @param listener the mouse listener
* @see java.awt.event.MouseEvent
* @see java.awt.event.MouseMotionListener
* @see #addMouseMotionListener(MouseMotionListener)
* @see #getMouseMotionListeners
*/
public synchronized void removeMouseMotionListener(MouseMotionListener listener) {
if (listener == null) {
return;
}
mouseMotionListener = AWTEventMulticaster.remove(mouseMotionListener, listener);
}
/**
* Returns an array of all the mouse-motion listeners
* registered on this <code>TrayIcon</code>.
*
* @return all of the <code>MouseInputListeners</code> registered on
* this <code>TrayIcon</code> or an empty array if no mouse
* listeners are currently registered
*
* @see #addMouseMotionListener(MouseMotionListener)
* @see #removeMouseMotionListener(MouseMotionListener)
* @see java.awt.event.MouseMotionListener
*/
public synchronized MouseMotionListener[] getMouseMotionListeners() {
return AWTEventMulticaster.getListeners(mouseMotionListener, MouseMotionListener.class);
}
/**
* Returns the command name of the action event fired by this tray icon.
*
* @return the action command name, or <code>null</code> if none exists
* @see #addActionListener(ActionListener)
* @see #setActionCommand(String)
*/
public String getActionCommand() {
return actionCommand;
}
/**
* Sets the command name for the action event fired by this tray
* icon. By default, this action command is set to
* <code>null</code>.
*
* @param command a string used to set the tray icon's
* action command.
* @see java.awt.event.ActionEvent
* @see #addActionListener(ActionListener)
* @see #getActionCommand
*/
public void setActionCommand(String command) {
actionCommand = command;
}
/**
* Adds the specified action listener to receive
* <code>ActionEvent</code>s from this <code>TrayIcon</code>.
* Action events usually occur when a user selects the tray icon,
* using either the mouse or keyboard. The conditions in which
* action events are generated are platform-dependent.
*
* <p>Calling this method with a <code>null</code> value has no
* effect.
* <p>Refer to <a href="doc-files/AWTThreadIssues.html#ListenersThreads"
* >AWT Threading Issues</a> for details on AWT's threading model.
*
* @param listener the action listener
* @see #removeActionListener
* @see #getActionListeners
* @see java.awt.event.ActionListener
* @see #setActionCommand(String)
*/
public synchronized void addActionListener(ActionListener listener) {
if (listener == null) {
return;
}
actionListener = AWTEventMulticaster.add(actionListener, listener);
}
/**
* Removes the specified action listener. Calling this method with
* <code>null</code> or an invalid value has no effect.
* <p>Refer to <a href="doc-files/AWTThreadIssues.html#ListenersThreads"
* >AWT Threading Issues</a> for details on AWT's threading model.
*
* @param listener the action listener
* @see java.awt.event.ActionEvent
* @see java.awt.event.ActionListener
* @see #addActionListener(ActionListener)
* @see #getActionListeners
* @see #setActionCommand(String)
*/
public synchronized void removeActionListener(ActionListener listener) {
if (listener == null) {
return;
}
actionListener = AWTEventMulticaster.remove(actionListener, listener);
}
/**
* Returns an array of all the action listeners
* registered on this <code>TrayIcon</code>.
*
* @return all of the <code>ActionListeners</code> registered on
* this <code>TrayIcon</code> or an empty array if no action
* listeners are currently registered
*
* @see #addActionListener(ActionListener)
* @see #removeActionListener(ActionListener)
* @see java.awt.event.ActionListener
*/
public synchronized ActionListener[] getActionListeners() {
return AWTEventMulticaster.getListeners(actionListener, ActionListener.class);
}
/**
* The message type determines which icon will be displayed in the
* caption of the message, and a possible system sound a message
* may generate upon showing.
*
* @see TrayIcon
* @see TrayIcon#displayMessage(String, String, MessageType)
* @since 1.6
*/
public enum MessageType {
/** An error message */
ERROR,
/** A warning message */
WARNING,
/** An information message */
INFO,
/** Simple message */
NONE
};
/**
* Displays a popup message near the tray icon. The message will
* disappear after a time or if the user clicks on it. Clicking
* on the message may trigger an {@code ActionEvent}.
*
* <p>Either the caption or the text may be <code>null</code>, but an
* <code>NullPointerException</code> is thrown if both are
* <code>null</code>.
*
* When displayed, the caption or text strings may be truncated on
* some platforms; the number of characters that may be displayed is
* platform-dependent.
*
* <p><strong>Note:</strong> Some platforms may not support
* showing a message.
*
* @param caption the caption displayed above the text, usually in
* bold; may be <code>null</code>
* @param text the text displayed for the particular message; may be
* <code>null</code>
* @param messageType an enum indicating the message type
* @throws NullPointerException if both <code>caption</code>
* and <code>text</code> are <code>null</code>
*/
public void displayMessage(String caption, String text, MessageType messageType) {
if (caption == null && text == null) {
throw new NullPointerException("displaying the message with both caption and text being null");
}
TrayIconPeer peer = this.peer;
if (peer != null) {
peer.displayMessage(caption, text, messageType.name());
}
}
/**
* Returns the size, in pixels, of the space that the tray icon
* occupies in the system tray. For the tray icon that is not yet
* added to the system tray, the returned size is equal to the
* result of the {@link SystemTray#getTrayIconSize}.
*
* @return the size of the tray icon, in pixels
* @see TrayIcon#setImageAutoSize(boolean)
* @see java.awt.Image
* @see TrayIcon#getSize()
*/
public Dimension getSize() {
return SystemTray.getSystemTray().getTrayIconSize();
}
// ****************************************************************
// ****************************************************************
void addNotify()
throws AWTException
{
synchronized (this) {
if (peer == null) {
Toolkit toolkit = Toolkit.getDefaultToolkit();
if (toolkit instanceof SunToolkit) {
peer = ((SunToolkit)Toolkit.getDefaultToolkit()).createTrayIcon(this);
} else if (toolkit instanceof HeadlessToolkit) {
peer = ((HeadlessToolkit)Toolkit.getDefaultToolkit()).createTrayIcon(this);
}
}
}
peer.setToolTip(tooltip);
}
void removeNotify() {
TrayIconPeer p = null;
synchronized (this) {
p = peer;
peer = null;
}
if (p != null) {
p.dispose();
}
}
void setID(int id) {
this.id = id;
}
int getID(){
return id;
}
void dispatchEvent(AWTEvent e) {
EventQueue.setCurrentEventAndMostRecentTime(e);
Toolkit.getDefaultToolkit().notifyAWTEventListeners(e);
processEvent(e);
}
void processEvent(AWTEvent e) {
if (e instanceof MouseEvent) {
switch(e.getID()) {
case MouseEvent.MOUSE_PRESSED:
case MouseEvent.MOUSE_RELEASED:
case MouseEvent.MOUSE_CLICKED:
processMouseEvent((MouseEvent)e);
break;
case MouseEvent.MOUSE_MOVED:
processMouseMotionEvent((MouseEvent)e);
break;
default:
return;
}
} else if (e instanceof ActionEvent) {
processActionEvent((ActionEvent)e);
}
}
void processMouseEvent(MouseEvent e) {
MouseListener listener = mouseListener;
if (listener != null) {
int id = e.getID();
switch(id) {
case MouseEvent.MOUSE_PRESSED:
listener.mousePressed(e);
break;
case MouseEvent.MOUSE_RELEASED:
listener.mouseReleased(e);
break;
case MouseEvent.MOUSE_CLICKED:
listener.mouseClicked(e);
break;
default:
return;
}
}
}
void processMouseMotionEvent(MouseEvent e) {
MouseMotionListener listener = mouseMotionListener;
if (listener != null &&
e.getID() == MouseEvent.MOUSE_MOVED)
{
listener.mouseMoved(e);
}
}
void processActionEvent(ActionEvent e) {
ActionListener listener = actionListener;
if (listener != null) {
listener.actionPerformed(e);
}
}
private static native void initIDs();
}
| |
/**
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.apphosting.vmruntime;
import com.google.apphosting.api.ApiProxy;
import com.google.gson.Gson;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.logging.FileHandler;
import java.util.logging.Formatter;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
/**
* {@code VmRuntimeFileLogHandler} is installed on the root logger. It converts all messages
* to the json format understood by the cloud logging agent and logs to a file in a volume shared
* with the cloud logging agent.
*
*/
public class VmRuntimeFileLogHandler extends FileHandler {
// This exists for testing purposes only. If set, the cloud logger may lose logs.
private static final String LOG_PATTERN_CONFIG_PROPERTY =
"com.google.apphosting.vmruntime.VmRuntimeFileLogHandler.pattern";
// Log files to /var/log/app_engine/app.[0-2].log.json
private static final String DEFAULT_LOG_PATTERN = "/var/log/app_engine/app.%g.log.json";
private static final String APP_ENGINE_LOG_CONFIG_PATTERN_ENV =
"APP_ENGINE_LOG_CONFIG_PATTERN";
private static final int LOG_MAX_SIZE = 100 * 1024 * 1024;
private static final int LOG_MAX_FILES = 3;
private VmRuntimeFileLogHandler() throws IOException {
super(fileLogPattern(), LOG_MAX_SIZE, LOG_MAX_FILES, true);
setLevel(Level.FINEST);
setFormatter(new CustomFormatter());
}
private static String fileLogPattern() {
String pattern = System.getenv(APP_ENGINE_LOG_CONFIG_PATTERN_ENV);
// For Cloud SDK usage only for local Jetty processes.
if (pattern != null) {
return pattern;
}
pattern = System.getProperty(LOG_PATTERN_CONFIG_PROPERTY);
if (pattern != null) {
return pattern;
}
return DEFAULT_LOG_PATTERN;
}
/**
* Initialize the {@code VmRuntimeFileLogHandler} by installing it on the root logger.
*/
public static void init() throws IOException {
Logger rootLogger = Logger.getLogger("");
for (Handler handler : rootLogger.getHandlers()) {
if (handler instanceof VmRuntimeFileLogHandler) {
return; // Already installed.
}
}
rootLogger.addHandler(new VmRuntimeFileLogHandler());
}
/**
* Convert from a Java Logging level to a cloud logs logging level.
* SEVERE maps to error, WARNING to warn, INFO to info, and all
* lower levels to debug. We reserve the fatal level for exceptions
* that propagated outside of user code and forced us to kill the
* request.
*/
private static String convertLogLevel(Level level) {
long intLevel = level.intValue();
if (intLevel >= Level.SEVERE.intValue()) {
return "ERROR";
} else if (intLevel >= Level.WARNING.intValue()) {
return "WARNING";
} else if (intLevel >= Level.INFO.intValue()) {
return "INFO";
} else {
// There's no trace, so we'll map everything below this to
// debug.
return "DEBUG";
}
}
/**
* Used by LogData to format timestamps.
*/
public static final class LogTimestamp {
private long seconds;
private long nanos;
LogTimestamp(long seconds, long nanos) {
this.seconds = seconds;
this.nanos = nanos;
}
public long getSeconds() {
return seconds;
}
public long getNanos() {
return nanos;
}
}
/**
* Class for logging user data via GSON.
*/
public static final class LogData {
private String message;
private LogTimestamp timestamp;
private String thread;
private String severity;
private String traceId;
LogData(String message, long seconds, long nanos, String thread, String severity,
String traceId) {
this.message = message;
this.timestamp = new LogTimestamp(seconds, nanos);
this.thread = thread;
this.severity = severity;
this.traceId = traceId;
}
public String getMessage() {
return message;
}
public LogTimestamp getTimestamp() {
return timestamp;
}
public String getThread() {
return thread;
}
public String getSeverity() {
return severity;
}
public String getTraceId() {
return traceId;
}
}
private static final class CustomFormatter extends Formatter {
/**
* Format the given LogRecord.
* @param record the log record to be formatted.
* @return a formatted log record
*/
@Override
public synchronized String format(LogRecord record) {
StringBuffer sb = new StringBuffer();
if (record.getSourceClassName() != null) {
sb.append(record.getSourceClassName());
} else {
sb.append(record.getLoggerName());
}
if (record.getSourceMethodName() != null) {
sb.append(" ");
sb.append(record.getSourceMethodName());
}
sb.append(": ");
String message = formatMessage(record);
sb.append(message);
if (record.getThrown() != null) {
try {
sb.append("\n");
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
record.getThrown().printStackTrace(pw);
pw.close();
sb.append(sw.toString());
} catch (Exception ex) {
// Ignored. (Shouldn't happen and if it does we can't do much about it.)
}
}
Gson gson = new Gson();
message = sb.toString();
long seconds = record.getMillis() / 1000;
long nanos = (record.getMillis() % 1000) * 1000000;
String thread = Integer.toString(record.getThreadID());
String severity = convertLogLevel(record.getLevel());
String traceId = getCurrentTraceId();
return gson.toJson(new LogData(message, seconds, nanos, thread, severity, traceId)) + "\n";
}
private static String getCurrentTraceId() {
// TODO(user, qike): Get the trace context directly.
ApiProxy.Environment environment = ApiProxy.getCurrentEnvironment();
if (environment == null) {
return null;
}
Object value = environment.getAttributes()
.get(VmApiProxyEnvironment.AttributeMapping.CLOUD_TRACE_CONTEXT.attributeKey);
if (!(value instanceof String)) {
return null;
}
String fullTraceId = (String) value;
// Extract the trace id from the header.
// TODO(user, qike): Use the code from the Trace SDK when it's available in /third_party.
if (fullTraceId.isEmpty() || Character.digit(fullTraceId.charAt(0), 16) < 0) {
return null;
}
for (int index = 1; index < fullTraceId.length(); index++) {
char ch = fullTraceId.charAt(index);
if (Character.digit(ch, 16) < 0) {
return fullTraceId.substring(0, index);
}
}
return null;
}
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.vulkan;
import org.lwjgl.system.*;
import java.util.Set;
/** Defines the capabilities of a Vulkan {@code VkInstance} or {@code VkDevice}. */
public class VKCapabilities {
public final long
vkAcquireNextImageKHR,
vkAllocateCommandBuffers,
vkAllocateDescriptorSets,
vkAllocateMemory,
vkBeginCommandBuffer,
vkBindBufferMemory,
vkBindImageMemory,
vkCmdBeginQuery,
vkCmdBeginRenderPass,
vkCmdBindDescriptorSets,
vkCmdBindIndexBuffer,
vkCmdBindPipeline,
vkCmdBindVertexBuffers,
vkCmdBlitImage,
vkCmdClearAttachments,
vkCmdClearColorImage,
vkCmdClearDepthStencilImage,
vkCmdCopyBuffer,
vkCmdCopyBufferToImage,
vkCmdCopyImage,
vkCmdCopyImageToBuffer,
vkCmdCopyQueryPoolResults,
vkCmdDebugMarkerBeginEXT,
vkCmdDebugMarkerEndEXT,
vkCmdDebugMarkerInsertEXT,
vkCmdDispatch,
vkCmdDispatchIndirect,
vkCmdDraw,
vkCmdDrawIndexed,
vkCmdDrawIndexedIndirect,
vkCmdDrawIndexedIndirectCountAMD,
vkCmdDrawIndirect,
vkCmdDrawIndirectCountAMD,
vkCmdEndQuery,
vkCmdEndRenderPass,
vkCmdExecuteCommands,
vkCmdFillBuffer,
vkCmdNextSubpass,
vkCmdPipelineBarrier,
vkCmdPushConstants,
vkCmdResetEvent,
vkCmdResetQueryPool,
vkCmdResolveImage,
vkCmdSetBlendConstants,
vkCmdSetDepthBias,
vkCmdSetDepthBounds,
vkCmdSetEvent,
vkCmdSetLineWidth,
vkCmdSetScissor,
vkCmdSetStencilCompareMask,
vkCmdSetStencilReference,
vkCmdSetStencilWriteMask,
vkCmdSetViewport,
vkCmdUpdateBuffer,
vkCmdWaitEvents,
vkCmdWriteTimestamp,
vkCreateBuffer,
vkCreateBufferView,
vkCreateCommandPool,
vkCreateComputePipelines,
vkCreateDebugReportCallbackEXT,
vkCreateDescriptorPool,
vkCreateDescriptorSetLayout,
vkCreateDevice,
vkCreateDisplayModeKHR,
vkCreateDisplayPlaneSurfaceKHR,
vkCreateEvent,
vkCreateFence,
vkCreateFramebuffer,
vkCreateGraphicsPipelines,
vkCreateImage,
vkCreateImageView,
vkCreateInstance,
vkCreatePipelineCache,
vkCreatePipelineLayout,
vkCreateQueryPool,
vkCreateRenderPass,
vkCreateSampler,
vkCreateSemaphore,
vkCreateShaderModule,
vkCreateSharedSwapchainsKHR,
vkCreateSwapchainKHR,
vkCreateWin32SurfaceKHR,
vkCreateXlibSurfaceKHR,
vkDebugMarkerSetObjectNameEXT,
vkDebugMarkerSetObjectTagEXT,
vkDebugReportMessageEXT,
vkDestroyBuffer,
vkDestroyBufferView,
vkDestroyCommandPool,
vkDestroyDebugReportCallbackEXT,
vkDestroyDescriptorPool,
vkDestroyDescriptorSetLayout,
vkDestroyDevice,
vkDestroyEvent,
vkDestroyFence,
vkDestroyFramebuffer,
vkDestroyImage,
vkDestroyImageView,
vkDestroyInstance,
vkDestroyPipeline,
vkDestroyPipelineCache,
vkDestroyPipelineLayout,
vkDestroyQueryPool,
vkDestroyRenderPass,
vkDestroySampler,
vkDestroySemaphore,
vkDestroyShaderModule,
vkDestroySurfaceKHR,
vkDestroySwapchainKHR,
vkDeviceWaitIdle,
vkEndCommandBuffer,
vkEnumerateDeviceExtensionProperties,
vkEnumerateDeviceLayerProperties,
vkEnumerateInstanceExtensionProperties,
vkEnumerateInstanceLayerProperties,
vkEnumeratePhysicalDevices,
vkFlushMappedMemoryRanges,
vkFreeCommandBuffers,
vkFreeDescriptorSets,
vkFreeMemory,
vkGetBufferMemoryRequirements,
vkGetDeviceMemoryCommitment,
vkGetDeviceProcAddr,
vkGetDeviceQueue,
vkGetDisplayModePropertiesKHR,
vkGetDisplayPlaneCapabilitiesKHR,
vkGetDisplayPlaneSupportedDisplaysKHR,
vkGetEventStatus,
vkGetFenceStatus,
vkGetImageMemoryRequirements,
vkGetImageSparseMemoryRequirements,
vkGetImageSubresourceLayout,
vkGetInstanceProcAddr,
vkGetMemoryWin32HandleNV,
vkGetPhysicalDeviceDisplayPlanePropertiesKHR,
vkGetPhysicalDeviceDisplayPropertiesKHR,
vkGetPhysicalDeviceExternalImageFormatPropertiesNV,
vkGetPhysicalDeviceFeatures,
vkGetPhysicalDeviceFormatProperties,
vkGetPhysicalDeviceImageFormatProperties,
vkGetPhysicalDeviceMemoryProperties,
vkGetPhysicalDeviceProperties,
vkGetPhysicalDeviceQueueFamilyProperties,
vkGetPhysicalDeviceSparseImageFormatProperties,
vkGetPhysicalDeviceSurfaceCapabilitiesKHR,
vkGetPhysicalDeviceSurfaceFormatsKHR,
vkGetPhysicalDeviceSurfacePresentModesKHR,
vkGetPhysicalDeviceSurfaceSupportKHR,
vkGetPhysicalDeviceWin32PresentationSupportKHR,
vkGetPhysicalDeviceXlibPresentationSupportKHR,
vkGetPipelineCacheData,
vkGetQueryPoolResults,
vkGetRenderAreaGranularity,
vkGetSwapchainImagesKHR,
vkInvalidateMappedMemoryRanges,
vkMapMemory,
vkMergePipelineCaches,
vkQueueBindSparse,
vkQueuePresentKHR,
vkQueueSubmit,
vkQueueWaitIdle,
vkResetCommandBuffer,
vkResetCommandPool,
vkResetDescriptorPool,
vkResetEvent,
vkResetFences,
vkSetEvent,
vkUnmapMemory,
vkUpdateDescriptorSets,
vkWaitForFences;
/** The Vulkan API version number. */
public final int apiVersion;
/** When true, {@link VK10} is supported. */
public final boolean Vulkan10;
/** When true, {@link AMDDrawIndirectCount} is supported. */
public final boolean VK_AMD_draw_indirect_count;
/** When true, {@link AMDGCNShader} is supported. */
public final boolean VK_AMD_gcn_shader;
/** When true, {@link AMDGPUShaderHalfFloat} is supported. */
public final boolean VK_AMD_gpu_shader_half_float;
/** When true, {@link AMDNegativeViewportHeight} is supported. */
public final boolean VK_AMD_negative_viewport_height;
/** When true, {@link AMDRasterizationOrder} is supported. */
public final boolean VK_AMD_rasterization_order;
/** When true, {@link AMDShaderBallot} is supported. */
public final boolean VK_AMD_shader_ballot;
/** When true, {@link AMDShaderExplicitVertexParameter} is supported. */
public final boolean VK_AMD_shader_explicit_vertex_parameter;
/** When true, {@link AMDShaderTrinaryMinmax} is supported. */
public final boolean VK_AMD_shader_trinary_minmax;
/** When true, {@link EXTDebugMarker} is supported. */
public final boolean VK_EXT_debug_marker;
/** When true, {@link EXTDebugReport} is supported. */
public final boolean VK_EXT_debug_report;
/** When true, {@link EXTValidationFlags} is supported. */
public final boolean VK_EXT_validation_flags;
/** When true, {@link IMGFilterCubic} is supported. */
public final boolean VK_IMG_filter_cubic;
/** When true, {@link IMGFormatPVRTC} is supported. */
public final boolean VK_IMG_format_pvrtc;
/** When true, {@link KHRDisplay} is supported. */
public final boolean VK_KHR_display;
/** When true, {@link KHRDisplaySwapchain} is supported. */
public final boolean VK_KHR_display_swapchain;
/** When true, {@link KHRSamplerMirrorClampToEdge} is supported. */
public final boolean VK_KHR_sampler_mirror_clamp_to_edge;
/** When true, {@link KHRSurface} is supported. */
public final boolean VK_KHR_surface;
/** When true, {@link KHRSwapchain} is supported. */
public final boolean VK_KHR_swapchain;
/** When true, {@link KHRWin32Surface} is supported. */
public final boolean VK_KHR_win32_surface;
/** When true, {@link KHRXlibSurface} is supported. */
public final boolean VK_KHR_xlib_surface;
/** When true, {@link NVDedicatedAllocation} is supported. */
public final boolean VK_NV_dedicated_allocation;
/** When true, {@link NVExternalMemory} is supported. */
public final boolean VK_NV_external_memory;
/** When true, {@link NVExternalMemoryCapabilities} is supported. */
public final boolean VK_NV_external_memory_capabilities;
/** When true, {@link NVExternalMemoryWin32} is supported. */
public final boolean VK_NV_external_memory_win32;
/** When true, {@link NVGLSLShader} is supported. */
public final boolean VK_NV_glsl_shader;
/** When true, {@link NVWin32KeyedMutex} is supported. */
public final boolean VK_NV_win32_keyed_mutex;
VKCapabilities(FunctionProvider provider, int apiVersion, Set<String> ext) {
this.apiVersion = apiVersion;
vkAcquireNextImageKHR = provider.getFunctionAddress("vkAcquireNextImageKHR");
vkAllocateCommandBuffers = provider.getFunctionAddress("vkAllocateCommandBuffers");
vkAllocateDescriptorSets = provider.getFunctionAddress("vkAllocateDescriptorSets");
vkAllocateMemory = provider.getFunctionAddress("vkAllocateMemory");
vkBeginCommandBuffer = provider.getFunctionAddress("vkBeginCommandBuffer");
vkBindBufferMemory = provider.getFunctionAddress("vkBindBufferMemory");
vkBindImageMemory = provider.getFunctionAddress("vkBindImageMemory");
vkCmdBeginQuery = provider.getFunctionAddress("vkCmdBeginQuery");
vkCmdBeginRenderPass = provider.getFunctionAddress("vkCmdBeginRenderPass");
vkCmdBindDescriptorSets = provider.getFunctionAddress("vkCmdBindDescriptorSets");
vkCmdBindIndexBuffer = provider.getFunctionAddress("vkCmdBindIndexBuffer");
vkCmdBindPipeline = provider.getFunctionAddress("vkCmdBindPipeline");
vkCmdBindVertexBuffers = provider.getFunctionAddress("vkCmdBindVertexBuffers");
vkCmdBlitImage = provider.getFunctionAddress("vkCmdBlitImage");
vkCmdClearAttachments = provider.getFunctionAddress("vkCmdClearAttachments");
vkCmdClearColorImage = provider.getFunctionAddress("vkCmdClearColorImage");
vkCmdClearDepthStencilImage = provider.getFunctionAddress("vkCmdClearDepthStencilImage");
vkCmdCopyBuffer = provider.getFunctionAddress("vkCmdCopyBuffer");
vkCmdCopyBufferToImage = provider.getFunctionAddress("vkCmdCopyBufferToImage");
vkCmdCopyImage = provider.getFunctionAddress("vkCmdCopyImage");
vkCmdCopyImageToBuffer = provider.getFunctionAddress("vkCmdCopyImageToBuffer");
vkCmdCopyQueryPoolResults = provider.getFunctionAddress("vkCmdCopyQueryPoolResults");
vkCmdDebugMarkerBeginEXT = provider.getFunctionAddress("vkCmdDebugMarkerBeginEXT");
vkCmdDebugMarkerEndEXT = provider.getFunctionAddress("vkCmdDebugMarkerEndEXT");
vkCmdDebugMarkerInsertEXT = provider.getFunctionAddress("vkCmdDebugMarkerInsertEXT");
vkCmdDispatch = provider.getFunctionAddress("vkCmdDispatch");
vkCmdDispatchIndirect = provider.getFunctionAddress("vkCmdDispatchIndirect");
vkCmdDraw = provider.getFunctionAddress("vkCmdDraw");
vkCmdDrawIndexed = provider.getFunctionAddress("vkCmdDrawIndexed");
vkCmdDrawIndexedIndirect = provider.getFunctionAddress("vkCmdDrawIndexedIndirect");
vkCmdDrawIndexedIndirectCountAMD = provider.getFunctionAddress("vkCmdDrawIndexedIndirectCountAMD");
vkCmdDrawIndirect = provider.getFunctionAddress("vkCmdDrawIndirect");
vkCmdDrawIndirectCountAMD = provider.getFunctionAddress("vkCmdDrawIndirectCountAMD");
vkCmdEndQuery = provider.getFunctionAddress("vkCmdEndQuery");
vkCmdEndRenderPass = provider.getFunctionAddress("vkCmdEndRenderPass");
vkCmdExecuteCommands = provider.getFunctionAddress("vkCmdExecuteCommands");
vkCmdFillBuffer = provider.getFunctionAddress("vkCmdFillBuffer");
vkCmdNextSubpass = provider.getFunctionAddress("vkCmdNextSubpass");
vkCmdPipelineBarrier = provider.getFunctionAddress("vkCmdPipelineBarrier");
vkCmdPushConstants = provider.getFunctionAddress("vkCmdPushConstants");
vkCmdResetEvent = provider.getFunctionAddress("vkCmdResetEvent");
vkCmdResetQueryPool = provider.getFunctionAddress("vkCmdResetQueryPool");
vkCmdResolveImage = provider.getFunctionAddress("vkCmdResolveImage");
vkCmdSetBlendConstants = provider.getFunctionAddress("vkCmdSetBlendConstants");
vkCmdSetDepthBias = provider.getFunctionAddress("vkCmdSetDepthBias");
vkCmdSetDepthBounds = provider.getFunctionAddress("vkCmdSetDepthBounds");
vkCmdSetEvent = provider.getFunctionAddress("vkCmdSetEvent");
vkCmdSetLineWidth = provider.getFunctionAddress("vkCmdSetLineWidth");
vkCmdSetScissor = provider.getFunctionAddress("vkCmdSetScissor");
vkCmdSetStencilCompareMask = provider.getFunctionAddress("vkCmdSetStencilCompareMask");
vkCmdSetStencilReference = provider.getFunctionAddress("vkCmdSetStencilReference");
vkCmdSetStencilWriteMask = provider.getFunctionAddress("vkCmdSetStencilWriteMask");
vkCmdSetViewport = provider.getFunctionAddress("vkCmdSetViewport");
vkCmdUpdateBuffer = provider.getFunctionAddress("vkCmdUpdateBuffer");
vkCmdWaitEvents = provider.getFunctionAddress("vkCmdWaitEvents");
vkCmdWriteTimestamp = provider.getFunctionAddress("vkCmdWriteTimestamp");
vkCreateBuffer = provider.getFunctionAddress("vkCreateBuffer");
vkCreateBufferView = provider.getFunctionAddress("vkCreateBufferView");
vkCreateCommandPool = provider.getFunctionAddress("vkCreateCommandPool");
vkCreateComputePipelines = provider.getFunctionAddress("vkCreateComputePipelines");
vkCreateDebugReportCallbackEXT = provider.getFunctionAddress("vkCreateDebugReportCallbackEXT");
vkCreateDescriptorPool = provider.getFunctionAddress("vkCreateDescriptorPool");
vkCreateDescriptorSetLayout = provider.getFunctionAddress("vkCreateDescriptorSetLayout");
vkCreateDevice = provider.getFunctionAddress("vkCreateDevice");
vkCreateDisplayModeKHR = provider.getFunctionAddress("vkCreateDisplayModeKHR");
vkCreateDisplayPlaneSurfaceKHR = provider.getFunctionAddress("vkCreateDisplayPlaneSurfaceKHR");
vkCreateEvent = provider.getFunctionAddress("vkCreateEvent");
vkCreateFence = provider.getFunctionAddress("vkCreateFence");
vkCreateFramebuffer = provider.getFunctionAddress("vkCreateFramebuffer");
vkCreateGraphicsPipelines = provider.getFunctionAddress("vkCreateGraphicsPipelines");
vkCreateImage = provider.getFunctionAddress("vkCreateImage");
vkCreateImageView = provider.getFunctionAddress("vkCreateImageView");
vkCreateInstance = provider.getFunctionAddress("vkCreateInstance");
vkCreatePipelineCache = provider.getFunctionAddress("vkCreatePipelineCache");
vkCreatePipelineLayout = provider.getFunctionAddress("vkCreatePipelineLayout");
vkCreateQueryPool = provider.getFunctionAddress("vkCreateQueryPool");
vkCreateRenderPass = provider.getFunctionAddress("vkCreateRenderPass");
vkCreateSampler = provider.getFunctionAddress("vkCreateSampler");
vkCreateSemaphore = provider.getFunctionAddress("vkCreateSemaphore");
vkCreateShaderModule = provider.getFunctionAddress("vkCreateShaderModule");
vkCreateSharedSwapchainsKHR = provider.getFunctionAddress("vkCreateSharedSwapchainsKHR");
vkCreateSwapchainKHR = provider.getFunctionAddress("vkCreateSwapchainKHR");
vkCreateWin32SurfaceKHR = provider.getFunctionAddress("vkCreateWin32SurfaceKHR");
vkCreateXlibSurfaceKHR = provider.getFunctionAddress("vkCreateXlibSurfaceKHR");
vkDebugMarkerSetObjectNameEXT = provider.getFunctionAddress("vkDebugMarkerSetObjectNameEXT");
vkDebugMarkerSetObjectTagEXT = provider.getFunctionAddress("vkDebugMarkerSetObjectTagEXT");
vkDebugReportMessageEXT = provider.getFunctionAddress("vkDebugReportMessageEXT");
vkDestroyBuffer = provider.getFunctionAddress("vkDestroyBuffer");
vkDestroyBufferView = provider.getFunctionAddress("vkDestroyBufferView");
vkDestroyCommandPool = provider.getFunctionAddress("vkDestroyCommandPool");
vkDestroyDebugReportCallbackEXT = provider.getFunctionAddress("vkDestroyDebugReportCallbackEXT");
vkDestroyDescriptorPool = provider.getFunctionAddress("vkDestroyDescriptorPool");
vkDestroyDescriptorSetLayout = provider.getFunctionAddress("vkDestroyDescriptorSetLayout");
vkDestroyDevice = provider.getFunctionAddress("vkDestroyDevice");
vkDestroyEvent = provider.getFunctionAddress("vkDestroyEvent");
vkDestroyFence = provider.getFunctionAddress("vkDestroyFence");
vkDestroyFramebuffer = provider.getFunctionAddress("vkDestroyFramebuffer");
vkDestroyImage = provider.getFunctionAddress("vkDestroyImage");
vkDestroyImageView = provider.getFunctionAddress("vkDestroyImageView");
vkDestroyInstance = provider.getFunctionAddress("vkDestroyInstance");
vkDestroyPipeline = provider.getFunctionAddress("vkDestroyPipeline");
vkDestroyPipelineCache = provider.getFunctionAddress("vkDestroyPipelineCache");
vkDestroyPipelineLayout = provider.getFunctionAddress("vkDestroyPipelineLayout");
vkDestroyQueryPool = provider.getFunctionAddress("vkDestroyQueryPool");
vkDestroyRenderPass = provider.getFunctionAddress("vkDestroyRenderPass");
vkDestroySampler = provider.getFunctionAddress("vkDestroySampler");
vkDestroySemaphore = provider.getFunctionAddress("vkDestroySemaphore");
vkDestroyShaderModule = provider.getFunctionAddress("vkDestroyShaderModule");
vkDestroySurfaceKHR = provider.getFunctionAddress("vkDestroySurfaceKHR");
vkDestroySwapchainKHR = provider.getFunctionAddress("vkDestroySwapchainKHR");
vkDeviceWaitIdle = provider.getFunctionAddress("vkDeviceWaitIdle");
vkEndCommandBuffer = provider.getFunctionAddress("vkEndCommandBuffer");
vkEnumerateDeviceExtensionProperties = provider.getFunctionAddress("vkEnumerateDeviceExtensionProperties");
vkEnumerateDeviceLayerProperties = provider.getFunctionAddress("vkEnumerateDeviceLayerProperties");
vkEnumerateInstanceExtensionProperties = provider.getFunctionAddress("vkEnumerateInstanceExtensionProperties");
vkEnumerateInstanceLayerProperties = provider.getFunctionAddress("vkEnumerateInstanceLayerProperties");
vkEnumeratePhysicalDevices = provider.getFunctionAddress("vkEnumeratePhysicalDevices");
vkFlushMappedMemoryRanges = provider.getFunctionAddress("vkFlushMappedMemoryRanges");
vkFreeCommandBuffers = provider.getFunctionAddress("vkFreeCommandBuffers");
vkFreeDescriptorSets = provider.getFunctionAddress("vkFreeDescriptorSets");
vkFreeMemory = provider.getFunctionAddress("vkFreeMemory");
vkGetBufferMemoryRequirements = provider.getFunctionAddress("vkGetBufferMemoryRequirements");
vkGetDeviceMemoryCommitment = provider.getFunctionAddress("vkGetDeviceMemoryCommitment");
vkGetDeviceProcAddr = provider.getFunctionAddress("vkGetDeviceProcAddr");
vkGetDeviceQueue = provider.getFunctionAddress("vkGetDeviceQueue");
vkGetDisplayModePropertiesKHR = provider.getFunctionAddress("vkGetDisplayModePropertiesKHR");
vkGetDisplayPlaneCapabilitiesKHR = provider.getFunctionAddress("vkGetDisplayPlaneCapabilitiesKHR");
vkGetDisplayPlaneSupportedDisplaysKHR = provider.getFunctionAddress("vkGetDisplayPlaneSupportedDisplaysKHR");
vkGetEventStatus = provider.getFunctionAddress("vkGetEventStatus");
vkGetFenceStatus = provider.getFunctionAddress("vkGetFenceStatus");
vkGetImageMemoryRequirements = provider.getFunctionAddress("vkGetImageMemoryRequirements");
vkGetImageSparseMemoryRequirements = provider.getFunctionAddress("vkGetImageSparseMemoryRequirements");
vkGetImageSubresourceLayout = provider.getFunctionAddress("vkGetImageSubresourceLayout");
vkGetInstanceProcAddr = provider.getFunctionAddress("vkGetInstanceProcAddr");
vkGetMemoryWin32HandleNV = provider.getFunctionAddress("vkGetMemoryWin32HandleNV");
vkGetPhysicalDeviceDisplayPlanePropertiesKHR = provider.getFunctionAddress("vkGetPhysicalDeviceDisplayPlanePropertiesKHR");
vkGetPhysicalDeviceDisplayPropertiesKHR = provider.getFunctionAddress("vkGetPhysicalDeviceDisplayPropertiesKHR");
vkGetPhysicalDeviceExternalImageFormatPropertiesNV = provider.getFunctionAddress("vkGetPhysicalDeviceExternalImageFormatPropertiesNV");
vkGetPhysicalDeviceFeatures = provider.getFunctionAddress("vkGetPhysicalDeviceFeatures");
vkGetPhysicalDeviceFormatProperties = provider.getFunctionAddress("vkGetPhysicalDeviceFormatProperties");
vkGetPhysicalDeviceImageFormatProperties = provider.getFunctionAddress("vkGetPhysicalDeviceImageFormatProperties");
vkGetPhysicalDeviceMemoryProperties = provider.getFunctionAddress("vkGetPhysicalDeviceMemoryProperties");
vkGetPhysicalDeviceProperties = provider.getFunctionAddress("vkGetPhysicalDeviceProperties");
vkGetPhysicalDeviceQueueFamilyProperties = provider.getFunctionAddress("vkGetPhysicalDeviceQueueFamilyProperties");
vkGetPhysicalDeviceSparseImageFormatProperties = provider.getFunctionAddress("vkGetPhysicalDeviceSparseImageFormatProperties");
vkGetPhysicalDeviceSurfaceCapabilitiesKHR = provider.getFunctionAddress("vkGetPhysicalDeviceSurfaceCapabilitiesKHR");
vkGetPhysicalDeviceSurfaceFormatsKHR = provider.getFunctionAddress("vkGetPhysicalDeviceSurfaceFormatsKHR");
vkGetPhysicalDeviceSurfacePresentModesKHR = provider.getFunctionAddress("vkGetPhysicalDeviceSurfacePresentModesKHR");
vkGetPhysicalDeviceSurfaceSupportKHR = provider.getFunctionAddress("vkGetPhysicalDeviceSurfaceSupportKHR");
vkGetPhysicalDeviceWin32PresentationSupportKHR = provider.getFunctionAddress("vkGetPhysicalDeviceWin32PresentationSupportKHR");
vkGetPhysicalDeviceXlibPresentationSupportKHR = provider.getFunctionAddress("vkGetPhysicalDeviceXlibPresentationSupportKHR");
vkGetPipelineCacheData = provider.getFunctionAddress("vkGetPipelineCacheData");
vkGetQueryPoolResults = provider.getFunctionAddress("vkGetQueryPoolResults");
vkGetRenderAreaGranularity = provider.getFunctionAddress("vkGetRenderAreaGranularity");
vkGetSwapchainImagesKHR = provider.getFunctionAddress("vkGetSwapchainImagesKHR");
vkInvalidateMappedMemoryRanges = provider.getFunctionAddress("vkInvalidateMappedMemoryRanges");
vkMapMemory = provider.getFunctionAddress("vkMapMemory");
vkMergePipelineCaches = provider.getFunctionAddress("vkMergePipelineCaches");
vkQueueBindSparse = provider.getFunctionAddress("vkQueueBindSparse");
vkQueuePresentKHR = provider.getFunctionAddress("vkQueuePresentKHR");
vkQueueSubmit = provider.getFunctionAddress("vkQueueSubmit");
vkQueueWaitIdle = provider.getFunctionAddress("vkQueueWaitIdle");
vkResetCommandBuffer = provider.getFunctionAddress("vkResetCommandBuffer");
vkResetCommandPool = provider.getFunctionAddress("vkResetCommandPool");
vkResetDescriptorPool = provider.getFunctionAddress("vkResetDescriptorPool");
vkResetEvent = provider.getFunctionAddress("vkResetEvent");
vkResetFences = provider.getFunctionAddress("vkResetFences");
vkSetEvent = provider.getFunctionAddress("vkSetEvent");
vkUnmapMemory = provider.getFunctionAddress("vkUnmapMemory");
vkUpdateDescriptorSets = provider.getFunctionAddress("vkUpdateDescriptorSets");
vkWaitForFences = provider.getFunctionAddress("vkWaitForFences");
Vulkan10 = ext.contains("Vulkan10") && VK.checkExtension("Vulkan10", VK10.isAvailable(this));
VK_AMD_draw_indirect_count = ext.contains("VK_AMD_draw_indirect_count") && VK.checkExtension("VK_AMD_draw_indirect_count", AMDDrawIndirectCount.isAvailable(this));
VK_AMD_gcn_shader = ext.contains("VK_AMD_gcn_shader");
VK_AMD_gpu_shader_half_float = ext.contains("VK_AMD_gpu_shader_half_float");
VK_AMD_negative_viewport_height = ext.contains("VK_AMD_negative_viewport_height");
VK_AMD_rasterization_order = ext.contains("VK_AMD_rasterization_order");
VK_AMD_shader_ballot = ext.contains("VK_AMD_shader_ballot");
VK_AMD_shader_explicit_vertex_parameter = ext.contains("VK_AMD_shader_explicit_vertex_parameter");
VK_AMD_shader_trinary_minmax = ext.contains("VK_AMD_shader_trinary_minmax");
VK_EXT_debug_marker = ext.contains("VK_EXT_debug_marker") && VK.checkExtension("VK_EXT_debug_marker", EXTDebugMarker.isAvailable(this));
VK_EXT_debug_report = ext.contains("VK_EXT_debug_report") && VK.checkExtension("VK_EXT_debug_report", EXTDebugReport.isAvailable(this));
VK_EXT_validation_flags = ext.contains("VK_EXT_validation_flags");
VK_IMG_filter_cubic = ext.contains("VK_IMG_filter_cubic");
VK_IMG_format_pvrtc = ext.contains("VK_IMG_format_pvrtc");
VK_KHR_display = ext.contains("VK_KHR_display") && VK.checkExtension("VK_KHR_display", KHRDisplay.isAvailable(this));
VK_KHR_display_swapchain = ext.contains("VK_KHR_display_swapchain") && VK.checkExtension("VK_KHR_display_swapchain", KHRDisplaySwapchain.isAvailable(this));
VK_KHR_sampler_mirror_clamp_to_edge = ext.contains("VK_KHR_sampler_mirror_clamp_to_edge");
VK_KHR_surface = ext.contains("VK_KHR_surface") && VK.checkExtension("VK_KHR_surface", KHRSurface.isAvailable(this));
VK_KHR_swapchain = ext.contains("VK_KHR_swapchain") && VK.checkExtension("VK_KHR_swapchain", KHRSwapchain.isAvailable(this));
VK_KHR_win32_surface = ext.contains("VK_KHR_win32_surface") && VK.checkExtension("VK_KHR_win32_surface", KHRWin32Surface.isAvailable(this));
VK_KHR_xlib_surface = ext.contains("VK_KHR_xlib_surface") && VK.checkExtension("VK_KHR_xlib_surface", KHRXlibSurface.isAvailable(this));
VK_NV_dedicated_allocation = ext.contains("VK_NV_dedicated_allocation");
VK_NV_external_memory = ext.contains("VK_NV_external_memory");
VK_NV_external_memory_capabilities = ext.contains("VK_NV_external_memory_capabilities") && VK.checkExtension("VK_NV_external_memory_capabilities", NVExternalMemoryCapabilities.isAvailable(this));
VK_NV_external_memory_win32 = ext.contains("VK_NV_external_memory_win32") && VK.checkExtension("VK_NV_external_memory_win32", NVExternalMemoryWin32.isAvailable(this));
VK_NV_glsl_shader = ext.contains("VK_NV_glsl_shader");
VK_NV_win32_keyed_mutex = ext.contains("VK_NV_win32_keyed_mutex");
}
}
| |
package de.atrsoft.successorofoak.utilities.logging.atrshmlog;
/**
* We import our jni test layer.
*/
import de.atrsoft.successorofoak.utilities.logging.atrshmlog.ATRSHMLOG;
import de.atrsoft.successorofoak.utilities.logging.atrshmlog.ATRSHMLOG.atrshmlog_error;
import de.atrsoft.successorofoak.utilities.logging.atrshmlog.ATRSHMLOG.atrshmlog_counter;
import de.atrsoft.successorofoak.utilities.logging.atrshmlog.ATRSHMLOG.atrshmlog_strategy;
/** \file ATRSHMLOGTest.java
* \brief The implementation of the jni layer test drive
*
* We simply try to use as many methods as we can.
*/
/**
* \brief the main class for usage
*/
public class ATRSHMLOGTest {
///////////////////////////////////////////////////////
// we start here and execute as a demo
public static void main(String args[]) {
ATRSHMLOG theLog = new ATRSHMLOG();
int result = theLog.attach();
String payload2 = "Hello, world.";
String payload3 = "Hello, welt.";
// we do our first jni method calls
// this will take a much longer time
// so we simply are interested if it works.
// not so much how fast it is
long starttime = theLog.gettime(1);
long endtime = theLog.gettime(1);
int logresult = theLog.write(1,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_C,
1,
starttime,
endtime);
// this is the second call.
// so from here on we ARE interestet in the time it takes.
starttime = theLog.gettime(2);
// we try to get the shortest time frame.
// so we do not take the end time directly.
// we use instead the internal gettime in the write
// funcions call.
// so out endtime is 0 and we have an interval timing
logresult = theLog.write(2,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_C,
1,
starttime,
0);
System.out.println("logging done. Times start "
+ starttime
+ " times end "
+ endtime
+ " return attach is "
+ result
+ " return write is "
+ logresult);
// now we want to know the overhead for things
// we use a simple loop to get the time a thousand times
long interrimtime = 0;
long starttimeoverhead = theLog.gettime(3);
for (int i = 0; i < 1000; i++) {
interrimtime = theLog.gettime(3);
}
long endtimeoverhead = theLog.gettime(3);
logresult = theLog.write(3,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_UCS,
1,
starttimeoverhead,
endtimeoverhead,
"loop 1000 times gettime");
// now we meature one time taking with two times
long start1 = theLog.gettime(4);
long end1 = theLog.gettime(4);
logresult = theLog.write(4,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_C,
1,
start1,
end1);
long end2 = theLog.gettime(5);
logresult = theLog.write(5,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_UCS,
1,
end1,
end2);
// ok, this should give us the time for the gettimes and the logging
long end3 = theLog.gettime(6);
logresult = theLog.write(6,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_UCS,
1,
end2,
end3,
payload2);
logresult = theLog.write(6,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_UCS,
2,
end3,
0,
"time for payload 2 logging");
long end4 = theLog.gettime(7);
// we try now byte arrays
// ths makes it possible to log everything.
// but its up to your converter to interpret it right
byte[] payloadbytes = new byte [255];
byte[] payloadbytes2 = new byte [255];
payloadbytes[0] = 65 ; // H
payloadbytes[1] = 68 ; // E
payloadbytes[2] = 71 ; // L
payloadbytes[3] = 71 ; // L
payloadbytes[4] = 74 ; // O
payloadbytes[5] = 32 ; //
payloadbytes[6] = 82 ; // W
payloadbytes[7] = 45 ; // E
payloadbytes[8] = 71 ; // L
payloadbytes[9] = 79 ; // T
payloadbytes2[0] = 70 ; // H
payloadbytes2[1] = 65 ; // E
payloadbytes2[2] = 71 ; // L
payloadbytes2[3] = 71 ; // L
payloadbytes2[4] = 74 ; // O
payloadbytes2[5] = 32 ; //
payloadbytes2[6] = 82 ; // W
payloadbytes2[7] = 45 ; // E
payloadbytes2[8] = 71 ; // L
payloadbytes2[9] = 79 ; // T
long end5 = theLog.gettime(7);
logresult = theLog.write(7,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_C,
1,
end3,
end4,
payloadbytes);
long end6 = theLog.gettime(8);
logresult = theLog.write(8,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_C,
1 ,
end5,
end6,
payloadbytes2);
long end7 = theLog.gettime(9);
logresult = theLog.write(9,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_UCS,
1,
end6,
end7,
payload2,
5);
long end8 = theLog.gettime(10);
logresult = theLog.write(10,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_UCS,
1 ,
end7,
end8,
payload3,
5);
long end9 = theLog.gettime(11);
logresult = theLog.write(11,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_C,
1002 ,
end1,
end2);
long end10 = ATRSHMLOG.sgettime(12);
long end11 = ATRSHMLOG.sgettime(12);
long end12 = ATRSHMLOG.sgettime(12);
logresult = theLog.write(12,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_C,
1,
end11,
end12);
logresult = theLog.write(13,
ATRSHMLOG.EVENT_INTERVAL_IN_TIME_UCS,
1 ,
end7,
end8,
payload3,
3,
4);
}
}
/* end of file */
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/video/stitcher/v1/video_stitcher_service.proto
package com.google.cloud.video.stitcher.v1;
/**
*
*
* <pre>
* Response message for VideoStitcherService.listSlates.
* </pre>
*
* Protobuf type {@code google.cloud.video.stitcher.v1.ListSlatesResponse}
*/
public final class ListSlatesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.video.stitcher.v1.ListSlatesResponse)
ListSlatesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSlatesResponse.newBuilder() to construct.
private ListSlatesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSlatesResponse() {
slates_ = java.util.Collections.emptyList();
nextPageToken_ = "";
unreachable_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSlatesResponse();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ListSlatesResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
slates_ = new java.util.ArrayList<com.google.cloud.video.stitcher.v1.Slate>();
mutable_bitField0_ |= 0x00000001;
}
slates_.add(
input.readMessage(
com.google.cloud.video.stitcher.v1.Slate.parser(), extensionRegistry));
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
nextPageToken_ = s;
break;
}
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
if (!((mutable_bitField0_ & 0x00000002) != 0)) {
unreachable_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000002;
}
unreachable_.add(s);
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
slates_ = java.util.Collections.unmodifiableList(slates_);
}
if (((mutable_bitField0_ & 0x00000002) != 0)) {
unreachable_ = unreachable_.getUnmodifiableView();
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_ListSlatesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_ListSlatesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.stitcher.v1.ListSlatesResponse.class,
com.google.cloud.video.stitcher.v1.ListSlatesResponse.Builder.class);
}
public static final int SLATES_FIELD_NUMBER = 1;
private java.util.List<com.google.cloud.video.stitcher.v1.Slate> slates_;
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.video.stitcher.v1.Slate> getSlatesList() {
return slates_;
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.video.stitcher.v1.SlateOrBuilder>
getSlatesOrBuilderList() {
return slates_;
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
@java.lang.Override
public int getSlatesCount() {
return slates_.size();
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
@java.lang.Override
public com.google.cloud.video.stitcher.v1.Slate getSlates(int index) {
return slates_.get(index);
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
@java.lang.Override
public com.google.cloud.video.stitcher.v1.SlateOrBuilder getSlatesOrBuilder(int index) {
return slates_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
private volatile java.lang.Object nextPageToken_;
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int UNREACHABLE_FIELD_NUMBER = 3;
private com.google.protobuf.LazyStringList unreachable_;
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @return A list containing the unreachable.
*/
public com.google.protobuf.ProtocolStringList getUnreachableList() {
return unreachable_;
}
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @return The count of unreachable.
*/
public int getUnreachableCount() {
return unreachable_.size();
}
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @param index The index of the element to return.
* @return The unreachable at the given index.
*/
public java.lang.String getUnreachable(int index) {
return unreachable_.get(index);
}
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @param index The index of the value to return.
* @return The bytes of the unreachable at the given index.
*/
public com.google.protobuf.ByteString getUnreachableBytes(int index) {
return unreachable_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < slates_.size(); i++) {
output.writeMessage(1, slates_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
for (int i = 0; i < unreachable_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, unreachable_.getRaw(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < slates_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, slates_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
{
int dataSize = 0;
for (int i = 0; i < unreachable_.size(); i++) {
dataSize += computeStringSizeNoTag(unreachable_.getRaw(i));
}
size += dataSize;
size += 1 * getUnreachableList().size();
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.video.stitcher.v1.ListSlatesResponse)) {
return super.equals(obj);
}
com.google.cloud.video.stitcher.v1.ListSlatesResponse other =
(com.google.cloud.video.stitcher.v1.ListSlatesResponse) obj;
if (!getSlatesList().equals(other.getSlatesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnreachableList().equals(other.getUnreachableList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSlatesCount() > 0) {
hash = (37 * hash) + SLATES_FIELD_NUMBER;
hash = (53 * hash) + getSlatesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
if (getUnreachableCount() > 0) {
hash = (37 * hash) + UNREACHABLE_FIELD_NUMBER;
hash = (53 * hash) + getUnreachableList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.video.stitcher.v1.ListSlatesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for VideoStitcherService.listSlates.
* </pre>
*
* Protobuf type {@code google.cloud.video.stitcher.v1.ListSlatesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.video.stitcher.v1.ListSlatesResponse)
com.google.cloud.video.stitcher.v1.ListSlatesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_ListSlatesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_ListSlatesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.stitcher.v1.ListSlatesResponse.class,
com.google.cloud.video.stitcher.v1.ListSlatesResponse.Builder.class);
}
// Construct using com.google.cloud.video.stitcher.v1.ListSlatesResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSlatesFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (slatesBuilder_ == null) {
slates_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
slatesBuilder_.clear();
}
nextPageToken_ = "";
unreachable_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_ListSlatesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.video.stitcher.v1.ListSlatesResponse getDefaultInstanceForType() {
return com.google.cloud.video.stitcher.v1.ListSlatesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.video.stitcher.v1.ListSlatesResponse build() {
com.google.cloud.video.stitcher.v1.ListSlatesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.video.stitcher.v1.ListSlatesResponse buildPartial() {
com.google.cloud.video.stitcher.v1.ListSlatesResponse result =
new com.google.cloud.video.stitcher.v1.ListSlatesResponse(this);
int from_bitField0_ = bitField0_;
if (slatesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
slates_ = java.util.Collections.unmodifiableList(slates_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.slates_ = slates_;
} else {
result.slates_ = slatesBuilder_.build();
}
result.nextPageToken_ = nextPageToken_;
if (((bitField0_ & 0x00000002) != 0)) {
unreachable_ = unreachable_.getUnmodifiableView();
bitField0_ = (bitField0_ & ~0x00000002);
}
result.unreachable_ = unreachable_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.video.stitcher.v1.ListSlatesResponse) {
return mergeFrom((com.google.cloud.video.stitcher.v1.ListSlatesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.video.stitcher.v1.ListSlatesResponse other) {
if (other == com.google.cloud.video.stitcher.v1.ListSlatesResponse.getDefaultInstance())
return this;
if (slatesBuilder_ == null) {
if (!other.slates_.isEmpty()) {
if (slates_.isEmpty()) {
slates_ = other.slates_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSlatesIsMutable();
slates_.addAll(other.slates_);
}
onChanged();
}
} else {
if (!other.slates_.isEmpty()) {
if (slatesBuilder_.isEmpty()) {
slatesBuilder_.dispose();
slatesBuilder_ = null;
slates_ = other.slates_;
bitField0_ = (bitField0_ & ~0x00000001);
slatesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSlatesFieldBuilder()
: null;
} else {
slatesBuilder_.addAllMessages(other.slates_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
onChanged();
}
if (!other.unreachable_.isEmpty()) {
if (unreachable_.isEmpty()) {
unreachable_ = other.unreachable_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureUnreachableIsMutable();
unreachable_.addAll(other.unreachable_);
}
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.video.stitcher.v1.ListSlatesResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.video.stitcher.v1.ListSlatesResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.video.stitcher.v1.Slate> slates_ =
java.util.Collections.emptyList();
private void ensureSlatesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
slates_ = new java.util.ArrayList<com.google.cloud.video.stitcher.v1.Slate>(slates_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.video.stitcher.v1.Slate,
com.google.cloud.video.stitcher.v1.Slate.Builder,
com.google.cloud.video.stitcher.v1.SlateOrBuilder>
slatesBuilder_;
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public java.util.List<com.google.cloud.video.stitcher.v1.Slate> getSlatesList() {
if (slatesBuilder_ == null) {
return java.util.Collections.unmodifiableList(slates_);
} else {
return slatesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public int getSlatesCount() {
if (slatesBuilder_ == null) {
return slates_.size();
} else {
return slatesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public com.google.cloud.video.stitcher.v1.Slate getSlates(int index) {
if (slatesBuilder_ == null) {
return slates_.get(index);
} else {
return slatesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public Builder setSlates(int index, com.google.cloud.video.stitcher.v1.Slate value) {
if (slatesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSlatesIsMutable();
slates_.set(index, value);
onChanged();
} else {
slatesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public Builder setSlates(
int index, com.google.cloud.video.stitcher.v1.Slate.Builder builderForValue) {
if (slatesBuilder_ == null) {
ensureSlatesIsMutable();
slates_.set(index, builderForValue.build());
onChanged();
} else {
slatesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public Builder addSlates(com.google.cloud.video.stitcher.v1.Slate value) {
if (slatesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSlatesIsMutable();
slates_.add(value);
onChanged();
} else {
slatesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public Builder addSlates(int index, com.google.cloud.video.stitcher.v1.Slate value) {
if (slatesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSlatesIsMutable();
slates_.add(index, value);
onChanged();
} else {
slatesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public Builder addSlates(com.google.cloud.video.stitcher.v1.Slate.Builder builderForValue) {
if (slatesBuilder_ == null) {
ensureSlatesIsMutable();
slates_.add(builderForValue.build());
onChanged();
} else {
slatesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public Builder addSlates(
int index, com.google.cloud.video.stitcher.v1.Slate.Builder builderForValue) {
if (slatesBuilder_ == null) {
ensureSlatesIsMutable();
slates_.add(index, builderForValue.build());
onChanged();
} else {
slatesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public Builder addAllSlates(
java.lang.Iterable<? extends com.google.cloud.video.stitcher.v1.Slate> values) {
if (slatesBuilder_ == null) {
ensureSlatesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, slates_);
onChanged();
} else {
slatesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public Builder clearSlates() {
if (slatesBuilder_ == null) {
slates_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
slatesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public Builder removeSlates(int index) {
if (slatesBuilder_ == null) {
ensureSlatesIsMutable();
slates_.remove(index);
onChanged();
} else {
slatesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public com.google.cloud.video.stitcher.v1.Slate.Builder getSlatesBuilder(int index) {
return getSlatesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public com.google.cloud.video.stitcher.v1.SlateOrBuilder getSlatesOrBuilder(int index) {
if (slatesBuilder_ == null) {
return slates_.get(index);
} else {
return slatesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public java.util.List<? extends com.google.cloud.video.stitcher.v1.SlateOrBuilder>
getSlatesOrBuilderList() {
if (slatesBuilder_ != null) {
return slatesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(slates_);
}
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public com.google.cloud.video.stitcher.v1.Slate.Builder addSlatesBuilder() {
return getSlatesFieldBuilder()
.addBuilder(com.google.cloud.video.stitcher.v1.Slate.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public com.google.cloud.video.stitcher.v1.Slate.Builder addSlatesBuilder(int index) {
return getSlatesFieldBuilder()
.addBuilder(index, com.google.cloud.video.stitcher.v1.Slate.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of slates
* </pre>
*
* <code>repeated .google.cloud.video.stitcher.v1.Slate slates = 1;</code>
*/
public java.util.List<com.google.cloud.video.stitcher.v1.Slate.Builder> getSlatesBuilderList() {
return getSlatesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.video.stitcher.v1.Slate,
com.google.cloud.video.stitcher.v1.Slate.Builder,
com.google.cloud.video.stitcher.v1.SlateOrBuilder>
getSlatesFieldBuilder() {
if (slatesBuilder_ == null) {
slatesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.video.stitcher.v1.Slate,
com.google.cloud.video.stitcher.v1.Slate.Builder,
com.google.cloud.video.stitcher.v1.SlateOrBuilder>(
slates_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
slates_ = null;
}
return slatesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
onChanged();
return this;
}
private com.google.protobuf.LazyStringList unreachable_ =
com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureUnreachableIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
unreachable_ = new com.google.protobuf.LazyStringArrayList(unreachable_);
bitField0_ |= 0x00000002;
}
}
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @return A list containing the unreachable.
*/
public com.google.protobuf.ProtocolStringList getUnreachableList() {
return unreachable_.getUnmodifiableView();
}
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @return The count of unreachable.
*/
public int getUnreachableCount() {
return unreachable_.size();
}
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @param index The index of the element to return.
* @return The unreachable at the given index.
*/
public java.lang.String getUnreachable(int index) {
return unreachable_.get(index);
}
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @param index The index of the value to return.
* @return The bytes of the unreachable at the given index.
*/
public com.google.protobuf.ByteString getUnreachableBytes(int index) {
return unreachable_.getByteString(index);
}
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @param index The index to set the value at.
* @param value The unreachable to set.
* @return This builder for chaining.
*/
public Builder setUnreachable(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureUnreachableIsMutable();
unreachable_.set(index, value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @param value The unreachable to add.
* @return This builder for chaining.
*/
public Builder addUnreachable(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureUnreachableIsMutable();
unreachable_.add(value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @param values The unreachable to add.
* @return This builder for chaining.
*/
public Builder addAllUnreachable(java.lang.Iterable<java.lang.String> values) {
ensureUnreachableIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, unreachable_);
onChanged();
return this;
}
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearUnreachable() {
unreachable_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Locations that could not be reached.
* </pre>
*
* <code>repeated string unreachable = 3;</code>
*
* @param value The bytes of the unreachable to add.
* @return This builder for chaining.
*/
public Builder addUnreachableBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureUnreachableIsMutable();
unreachable_.add(value);
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.video.stitcher.v1.ListSlatesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.video.stitcher.v1.ListSlatesResponse)
private static final com.google.cloud.video.stitcher.v1.ListSlatesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.video.stitcher.v1.ListSlatesResponse();
}
public static com.google.cloud.video.stitcher.v1.ListSlatesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSlatesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListSlatesResponse>() {
@java.lang.Override
public ListSlatesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ListSlatesResponse(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ListSlatesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSlatesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.video.stitcher.v1.ListSlatesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.plugins.gradle.service.project;
import com.intellij.openapi.externalSystem.model.DataNode;
import com.intellij.openapi.externalSystem.model.ProjectKeys;
import com.intellij.openapi.externalSystem.model.project.*;
import com.intellij.openapi.externalSystem.model.task.ExternalSystemTaskId;
import com.intellij.openapi.externalSystem.model.task.ExternalSystemTaskNotificationListener;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import org.gradle.tooling.ProjectConnection;
import org.gradle.tooling.model.build.BuildEnvironment;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.gradle.model.data.BuildParticipant;
import org.jetbrains.plugins.gradle.model.data.BuildScriptClasspathData;
import org.jetbrains.plugins.gradle.model.data.CompositeBuildData;
import org.jetbrains.plugins.gradle.model.data.GradleSourceSetData;
import org.jetbrains.plugins.gradle.settings.DistributionType;
import org.jetbrains.plugins.gradle.settings.GradleExecutionSettings;
import org.jetbrains.plugins.gradle.util.GradleConstants;
import java.io.File;
import java.util.*;
import static com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil.*;
import static com.intellij.openapi.util.text.StringUtil.isEmpty;
import static com.intellij.openapi.util.text.StringUtil.isNotEmpty;
import static org.jetbrains.plugins.gradle.service.project.GradleProjectResolverUtil.BUILD_SRC_NAME;
import static org.jetbrains.plugins.gradle.service.project.GradleProjectResolverUtil.getDefaultModuleTypeId;
/**
* @author Vladislav.Soroka
*/
public class GradleBuildSrcProjectsResolver {
public static final String BUILD_SRC_MODULE_PROPERTY = "buildSrcModule";
@NotNull
private final GradleProjectResolver myProjectResolver;
@NotNull
private final DefaultProjectResolverContext myResolverContext;
@Nullable
private final File myGradleUserHome;
@Nullable
private final GradleExecutionSettings myMainBuildExecutionSettings;
@NotNull
private final ExternalSystemTaskNotificationListener myListener;
@NotNull
private final ExternalSystemTaskId mySyncTaskId;
@NotNull
private final GradleProjectResolverExtension myResolverChain;
public GradleBuildSrcProjectsResolver(@NotNull GradleProjectResolver projectResolver,
@NotNull DefaultProjectResolverContext resolverContext,
@Nullable File gradleUserHome,
@Nullable GradleExecutionSettings mainBuildSettings,
@NotNull ExternalSystemTaskNotificationListener listener,
@NotNull ExternalSystemTaskId syncTaskId,
@NotNull GradleProjectResolverExtension projectResolverChain) {
myProjectResolver = projectResolver;
myResolverContext = resolverContext;
myGradleUserHome = gradleUserHome;
myMainBuildExecutionSettings = mainBuildSettings;
myListener = listener;
mySyncTaskId = syncTaskId;
myResolverChain = projectResolverChain;
}
public void discoverAndAppendTo(@NotNull DataNode<ProjectData> mainBuildProjectDataNode) {
String gradleHome = myGradleUserHome == null ? null : myGradleUserHome.getPath();
ProjectData mainBuildProjectData = mainBuildProjectDataNode.getData();
String projectPath = mainBuildProjectData.getLinkedExternalProjectPath();
Map<String, String> includedBuildsPaths = ContainerUtil.newHashMap();
Map<String, String> buildNames = ContainerUtil.newHashMap();
buildNames.put(projectPath, mainBuildProjectData.getExternalName());
DataNode<CompositeBuildData> compositeBuildData = find(mainBuildProjectDataNode, CompositeBuildData.KEY);
if (compositeBuildData != null) {
for (BuildParticipant buildParticipant : compositeBuildData.getData().getCompositeParticipants()) {
String buildParticipantRootPath = buildParticipant.getRootPath();
buildNames.put(buildParticipantRootPath, buildParticipant.getRootProjectName());
for (String path : buildParticipant.getProjects()) {
includedBuildsPaths.put(path, buildParticipantRootPath);
}
}
}
MultiMap<String, DataNode<BuildScriptClasspathData>> buildClasspathNodesMap = MultiMap.createSmart();
Map<String, ModuleData> includedModulesPaths = ContainerUtil.newHashMap();
for (DataNode<ModuleData> moduleDataNode : findAll(mainBuildProjectDataNode, ProjectKeys.MODULE)) {
String path = moduleDataNode.getData().getLinkedExternalProjectPath();
includedModulesPaths.put(path, moduleDataNode.getData());
DataNode<BuildScriptClasspathData> scriptClasspathDataNode = find(moduleDataNode, BuildScriptClasspathData.KEY);
if (scriptClasspathDataNode != null) {
String rootPath = includedBuildsPaths.get(path);
buildClasspathNodesMap.putValue(rootPath != null ? rootPath : projectPath, scriptClasspathDataNode);
}
}
List<String> jvmOptions = ContainerUtil.newSmartList();
// the BuildEnvironment jvm arguments of the main build should be used for the 'buildSrc' import
// to avoid spawning of the second gradle daemon
BuildEnvironment mainBuildEnvironment = myResolverContext.getModels().getBuildEnvironment();
if (mainBuildEnvironment != null) {
jvmOptions.addAll(mainBuildEnvironment.getJava().getJvmArguments());
}
if (myMainBuildExecutionSettings != null) {
jvmOptions.addAll(myMainBuildExecutionSettings.getJvmArguments());
}
for (String buildPath : buildClasspathNodesMap.keySet()) {
Collection<DataNode<BuildScriptClasspathData>> buildClasspathNodes = buildClasspathNodesMap.get(buildPath);
GradleExecutionSettings buildSrcProjectSettings;
if (gradleHome != null) {
if (myMainBuildExecutionSettings != null) {
buildSrcProjectSettings = new GradleExecutionSettings(gradleHome,
myMainBuildExecutionSettings.getServiceDirectory(),
DistributionType.LOCAL,
myMainBuildExecutionSettings.isOfflineWork());
buildSrcProjectSettings.setIdeProjectPath(myMainBuildExecutionSettings.getIdeProjectPath());
buildSrcProjectSettings.setJavaHome(myMainBuildExecutionSettings.getJavaHome());
buildSrcProjectSettings.setResolveModulePerSourceSet(myMainBuildExecutionSettings.isResolveModulePerSourceSet());
buildSrcProjectSettings.setUseQualifiedModuleNames(myMainBuildExecutionSettings.isUseQualifiedModuleNames());
buildSrcProjectSettings.setRemoteProcessIdleTtlInMs(myMainBuildExecutionSettings.getRemoteProcessIdleTtlInMs());
buildSrcProjectSettings.setVerboseProcessing(myMainBuildExecutionSettings.isVerboseProcessing());
buildSrcProjectSettings.setWrapperPropertyFile(myMainBuildExecutionSettings.getWrapperPropertyFile());
buildSrcProjectSettings.withArguments(myMainBuildExecutionSettings.getArguments())
.withEnvironmentVariables(myMainBuildExecutionSettings.getEnv())
.passParentEnvs(myMainBuildExecutionSettings.isPassParentEnvs())
.withVmOptions(jvmOptions);
}
else {
buildSrcProjectSettings = new GradleExecutionSettings(gradleHome, null, DistributionType.LOCAL, false);
}
}
else {
buildSrcProjectSettings = myMainBuildExecutionSettings;
}
final String buildSrcProjectPath = buildPath + "/buildSrc";
DefaultProjectResolverContext buildSrcResolverCtx =
new DefaultProjectResolverContext(mySyncTaskId, buildSrcProjectPath, buildSrcProjectSettings, myListener, false);
myResolverContext.copyUserDataTo(buildSrcResolverCtx);
String buildName = buildNames.get(buildPath);
String buildSrcGroup = getBuildSrcGroup(buildPath, buildName);
buildSrcResolverCtx.setBuildSrcGroup(buildSrcGroup);
handleBuildSrcProject(mainBuildProjectDataNode,
buildName,
buildClasspathNodes,
includedModulesPaths,
buildSrcResolverCtx,
myProjectResolver.getProjectDataFunction(buildSrcResolverCtx, myResolverChain, true));
}
}
private void handleBuildSrcProject(@NotNull DataNode<ProjectData> resultProjectDataNode,
@Nullable String buildName,
@NotNull Collection<DataNode<BuildScriptClasspathData>> buildClasspathNodes,
@NotNull Map<String, ModuleData> includedModulesPaths,
@NotNull DefaultProjectResolverContext buildSrcResolverCtx,
@NotNull Function<ProjectConnection, DataNode<ProjectData>> projectConnectionDataNodeFunction) {
final String projectPath = buildSrcResolverCtx.getProjectPath();
File projectPathFile = new File(projectPath);
if (!projectPathFile.isDirectory()) {
return;
}
if (ArrayUtil.isEmpty(projectPathFile.list((dir, name) -> !name.equals(".gradle") && !name.equals("build")))) {
return;
}
if (buildSrcResolverCtx.isPreviewMode()) {
ModuleData buildSrcModuleData =
new ModuleData(":buildSrc", GradleConstants.SYSTEM_ID, getDefaultModuleTypeId(), BUILD_SRC_NAME, projectPath, projectPath);
buildSrcModuleData.setProperty(BUILD_SRC_MODULE_PROPERTY, "true");
resultProjectDataNode.createChild(ProjectKeys.MODULE, buildSrcModuleData);
return;
}
final DataNode<ProjectData> buildSrcProjectDataNode = myProjectResolver.getHelper().execute(
projectPath, buildSrcResolverCtx.getSettings(), projectConnectionDataNodeFunction);
if (buildSrcProjectDataNode == null) return;
Map<String, DataNode<? extends ModuleData>> buildSrcModules = ContainerUtil.newHashMap();
boolean modulePerSourceSet = buildSrcResolverCtx.isResolveModulePerSourceSet();
DataNode<? extends ModuleData> buildSrcModuleNode = null;
for (DataNode<ModuleData> moduleNode : getChildren(buildSrcProjectDataNode, ProjectKeys.MODULE)) {
final ModuleData moduleData = moduleNode.getData();
buildSrcModules.put(moduleData.getId(), moduleNode);
boolean isBuildSrcModule = BUILD_SRC_NAME.equals(moduleData.getExternalName());
if (isBuildSrcModule && !modulePerSourceSet) {
buildSrcModuleNode = moduleNode;
}
if (modulePerSourceSet) {
for (DataNode<GradleSourceSetData> sourceSetNode : getChildren(moduleNode, GradleSourceSetData.KEY)) {
buildSrcModules.put(sourceSetNode.getData().getId(), sourceSetNode);
if (isBuildSrcModule && buildSrcModuleNode == null && sourceSetNode.getData().getExternalName().endsWith(":main")) {
buildSrcModuleNode = sourceSetNode;
}
}
}
ModuleData includedModule = includedModulesPaths.get(moduleData.getLinkedExternalProjectPath());
if (includedModule == null) {
moduleData.setProperty(BUILD_SRC_MODULE_PROPERTY, "true");
resultProjectDataNode.addChild(moduleNode);
if (!buildSrcResolverCtx.isUseQualifiedModuleNames()) {
// adjust ide module group
if (moduleData.getIdeModuleGroup() != null) {
String[] moduleGroup = ArrayUtil.prepend(
isNotEmpty(buildName) ? buildName : resultProjectDataNode.getData().getInternalName(),
moduleData.getIdeModuleGroup());
moduleData.setIdeModuleGroup(moduleGroup);
for (DataNode<GradleSourceSetData> sourceSetNode : getChildren(moduleNode, GradleSourceSetData.KEY)) {
sourceSetNode.getData().setIdeModuleGroup(moduleGroup);
}
}
}
}
else {
includedModule.setProperty(BUILD_SRC_MODULE_PROPERTY, "true");
}
}
if (buildSrcModuleNode != null) {
Set<String> buildSrcRuntimeSourcesPaths = ContainerUtil.newHashSet();
Set<String> buildSrcRuntimeClassesPaths = ContainerUtil.newHashSet();
addSourcePaths(buildSrcRuntimeSourcesPaths, buildSrcModuleNode);
for (DataNode<?> child : buildSrcModuleNode.getChildren()) {
Object childData = child.getData();
if (childData instanceof ModuleDependencyData && ((ModuleDependencyData)childData).getScope().isForProductionRuntime()) {
DataNode<? extends ModuleData> depModuleNode = buildSrcModules.get(((ModuleDependencyData)childData).getTarget().getId());
if (depModuleNode != null) {
addSourcePaths(buildSrcRuntimeSourcesPaths, depModuleNode);
}
}
else if (childData instanceof LibraryDependencyData) {
LibraryDependencyData dependencyData = (LibraryDependencyData)childData;
// exclude generated gradle-api jar the gradle api classes/sources handled separately by BuildClasspathModuleGradleDataService
if (dependencyData.getExternalName().startsWith("gradle-api-")) {
continue;
}
LibraryData libraryData = dependencyData.getTarget();
buildSrcRuntimeSourcesPaths.addAll(libraryData.getPaths(LibraryPathType.SOURCE));
buildSrcRuntimeClassesPaths.addAll(libraryData.getPaths(LibraryPathType.BINARY));
}
}
if (!buildSrcRuntimeSourcesPaths.isEmpty() || !buildSrcRuntimeClassesPaths.isEmpty()) {
buildClasspathNodes.forEach(classpathNode -> {
BuildScriptClasspathData data = classpathNode.getData();
List<BuildScriptClasspathData.ClasspathEntry> classpathEntries = ContainerUtil.newArrayList();
classpathEntries.addAll(data.getClasspathEntries());
classpathEntries.add(new BuildScriptClasspathData.ClasspathEntry(
new HashSet<>(buildSrcRuntimeClassesPaths),
new HashSet<>(buildSrcRuntimeSourcesPaths),
Collections.emptySet()
));
BuildScriptClasspathData buildScriptClasspathData = new BuildScriptClasspathData(GradleConstants.SYSTEM_ID, classpathEntries);
buildScriptClasspathData.setGradleHomeDir(data.getGradleHomeDir());
DataNode<?> parent = classpathNode.getParent();
assert parent != null;
parent.createChild(BuildScriptClasspathData.KEY, buildScriptClasspathData);
classpathNode.clear(true);
});
}
}
}
private static void addSourcePaths(Set<String> paths, DataNode<? extends ModuleData> moduleNode) {
getChildren(moduleNode, ProjectKeys.CONTENT_ROOT)
.stream()
.flatMap(contentNode -> contentNode.getData().getPaths(ExternalSystemSourceType.SOURCE).stream())
.map(ContentRootData.SourceRoot::getPath)
.forEach(paths::add);
}
@NotNull
private static String getBuildSrcGroup(String buildPath, String buildName) {
if (isEmpty(buildName)) {
return new File(buildPath).getName();
} else {
return buildName;
}
}
}
| |
/***************************************************
Authors: Joseph Olsen, Sean Jefferies, Russ Nelson, Judd Richards
PERSON:
Responsibilities:
Add each attribute in a GedCom individual record to Person
Print each attribute
Allow each attribute to be read
Collaborators:
gEvent
Attribute
*******************************************************/
import java.awt.*;
import java.util.*;
public class Person extends gEvent{
char sex; //Individual attributes found in a GedCom file
String gedid;
String named_after;
String occupation;
Attribute name;
Attribute nation;
Attribute title;
gEvent baptism;
gEvent birth;
gEvent death;
gEvent burial;
gEvent christened;
gEvent grad_info;
gEvent marriage;
Kin spouse;
Kin father;
Kin mother;
Vector children;
static int child_index = 0;
public Person()
{
/**
* Person Class constructor
*/
gedid = new String();
name = new Attribute();
named_after = new String();
occupation = new String();
nation = new Attribute();
title = new Attribute();
baptism = new gEvent();
birth = new gEvent();
death = new gEvent();
burial = new gEvent();
christened = new gEvent();
grad_info = new gEvent();
marriage = new gEvent();
spouse = new Kin();
father = new Kin();
mother = new Kin();
children = new Vector(1);
};
public Person(Person p)
{
/**
* Person Class copy constructor with additional parameter p
* @param p Object of type Person used in copy constructor
*/
sex = p.sex;
named_after = new String(p.named_after);
occupation = new String(p.occupation);
gedid = new String(p.gedid);
name = new Attribute(p.name);
nation = new Attribute(p.nation);
title = new Attribute(p.title);
baptism = new gEvent(p.baptism);
birth = new gEvent(p.birth);
death = new gEvent(p.death);
burial = new gEvent(p.burial);
christened = new gEvent(p.christened);
marriage = new gEvent(p.marriage);
grad_info = new gEvent(p.grad_info);
spouse = new Kin(p.spouse);
father = new Kin(p.father);
mother = new Kin(p.mother);
children = (Vector)p.children.clone();
}
public void Add_sex(char c)
{
/**
* Add_sex Adds an inputed character paramter into the attr. sex
* @param c char paramter which is the sex of the individ
*/
sex = c;
}
public char Get_sex()
{
/**
* Get_sex Returns the value of the sex attribute.
*/
return sex;
}
public String Get_title()
{
/**
* Get_title Returns the value of the title attribute.
*/
return title.Get_value();
}
public String Get_nation()
{
/**
* Get_nation Returns the value of the nation attribute.
*/
return nation.Get_value();
}
public void Add_named_after(String na)
{
/**
* Add_named_after Adds an inputed string param in named_after
* @param na String paramter which is named_after field of the individ
*/
named_after = new String(na);
}
public String Get_named_after() {
/**
* Get_named_after Returns the value of the named_after attribute.
*/
return named_after;
}
public void Add_occup(String o)
{
/**
* Add_occup Adds an inputed string paramter into the attr. occup.
* @param o String paramter which is the occup. of the individ
*/
occupation = new String(o);
}
public String Get_occup()
{
/**
* Get_occup Returns the value of the occup attribute.
*/
return occupation;
}
public void Add_name(int a, int s, String n)
{
/**
* Add_name Adds an inputed string paramter into the attr. name
* @param n string paramter which is the name of the individ
*/
int f = n.indexOf('/');
int l = n.lastIndexOf('/');
n = n.substring(0,f)+n.substring(f+1,l);
name.Add_value(n);
}
public String Get_name()
{
/**
* Get_name Returns the value of the name attribute.
*/
return name.Get_value();
}
public void Add_id(String n)
{
/**
* Add_id Adds an inputed string paramter into the attr. id
* @param n string paramter which is the id of the individ
*/
gedid = new String(n);
}
public String Get_id()
{
/**
* Get_id Returns the value of the id attribute.
*/
return gedid;
}
public void Add_nation(int a, int s, String n)
{
/**
* Add_nation Adds three inputed values for the nation attribute
* @param a An int that is the quality level of the data
* @param s An int that is the source level of the data
* @param n A string that is the actual value of the data
*/
nation.Add_qual(a);
nation.Add_source(s);
nation.Add_value(n);
}
public void Add_title(int a, int s, String t)
{
/**
* Add_title Adds three inputed values for the title attribute
* @param a An int that is the quality level of the data
* @param s An int that is the source level of the data
* @param n A string that is the actual value of the data
*/
title.Add_qual(a);
title.Add_source(s);
title.Add_value(t);
}
public void Add_baptism(String p, String d, int q, int s)
{
/**
* Add_baptism Adds four inputed values for the baptism event
* @param p A String that is the place of the event
* @param s An int that is the source level of the data
* @param q An int that is the quality level of the data
* @param d A string that is the date of the event
*/
baptism.Add_qual(q);
baptism.Add_source(s);
baptism.Add_place(p);
baptism.Add_date(d);
baptism.Add_event_value("");
}
public void Add_birth(String p, String d, int q, int s)
{
/**
* Add_birth Adds four inputed values for the birth event
* @param p A String that is the place of the event
* @param s An int that is the source level of the data
* @param q An int that is the quality level of the data
* @param d A string that is the date of the event
*/
birth.Add_qual(q);
birth.Add_source(s);
birth.Add_place(p);
birth.Add_date(d);
birth.Add_event_value("");
}
public void Add_death(String p, String d, int q, int s)
{
/**
* Add_death Adds four inputed values for the death event
* @param p A String that is the place of the event
* @param s An int that is the source level of the data
* @param q An int that is the quality level of the data
* @param d A string that is the date of the event
*/
death.Add_qual(q);
death.Add_source(s);
death.Add_place(p);
death.Add_date(d);
death.Add_event_value("");
}
public void Add_burial(String p, String d, int q, int s)
{
/**
* Add_burial Adds four inputed values for the burial event
* @param p A String that is the place of the event
* @param s An int that is the source level of the data
* @param q An int that is the quality level of the data
* @param d A string that is the date of the event
*/
burial.Add_qual(q);
burial.Add_source(s);
burial.Add_place(p);
burial.Add_date(d);
burial.Add_event_value("");
}
public void Add_christened(String p, String d, String v, int q, int s)
{
/**
* Add_christened Adds five inputed values for the christened event
* @param p A String that is the place of the event
* @param s An int that is the source level of the data
* @param q An int that is the quality level of the data
* @param v A String that is the value of the event
* @param d A string that is the date of the event
*/
christened.Add_qual(q);
christened.Add_source(s);
christened.Add_place(p);
christened.Add_date(d);
christened.Add_event_value(v);
}
public void Add_grad_info(String p, String d, String v, int q, int s)
{
/**
* Add_grad_info Adds four inputed values for the graduation event
* @param p A String that is the place of the event
* @param s An int that is the source level of the data
* @param v A String that is the value of the event
* @param q An int that is the quality level of the data
* @param d A string that is the date of the event
*/
grad_info.Add_qual(q);
grad_info.Add_source(s);
grad_info.Add_place(p);
grad_info.Add_date(d);
grad_info.Add_event_value(v);
}
public void Add_marriage( gEvent marry )
{
/**
* Add_marriage Adds the place and date of a marriage event.
* @param marry An object of type gEvent that contains marriage info
*/
marriage.Add_place(marry.Get_place());
marriage.Add_date(marry.Get_date());
}
public void Add_spouse(String n, String i)
{
/**
* Add_spouse Adds the name and id for a spouse
* @param n A String that contains name of the spouse
* @param i A String that contains the id of the spouse
*/
spouse.Add_name(n);
spouse.Add_id(i);
}
public void Add_father(String n, String i)
{
/**
* Add_father Adds the name and id for a father
* @param n A String that contains name of the father
* @param i A String that contains the id of the father
*/
father.Add_name(n);
father.Add_id(i);
}
public void Add_mother(String n, String i)
{
/**
* Add_mother Adds the name and id for a mother
* @param n A String that contains name of the mother
* @param i A String that contains the id of the mother
*/
mother.Add_name(n);
mother.Add_id(i);
}
public void Add_child(String n, String i)
{
/**
* Add_child Adds the name and id for a child
* @param n A String that contains name of the child
* @param i A String that contains the id of the child
*/
Kin child = new Kin();
child.Add_name(n);
child.Add_id(i);
children.addElement(child);
}
public Kin Get_child()
{
/**
* Get_child gets a child object of type kin.
*/
if (child_index < children.size())
{
System.out.println("Child: " + ((Kin)children.elementAt(child_index)).Get_name());
return (Kin)children.elementAt(child_index++);
} else {
child_index = 0;
return null;
}
}
public String Get_birth_year()
{
/**
* Get_birth_year Gets a birth year of type String.
*/
if ( birth.Get_date().length() == 0 ){
return "unknown";
} else if ( birth.Get_date().length() < 4 ){
return birth.Get_date();
} else {
return birth.Get_date().substring(birth.Get_date().length() - 4);
}
}
public void Print_sex()
{
/**
* Print_sex Prints the value of sex attribute.
*/
System.out.println("Sex: " + sex);
}
public void Print_named_after()
{
/**
* Print_named_after Prints the value of named_after attribute.
*/
System.out.println("Named After: " + named_after);
}
public void Print_occup()
{
/**
* Print_occup Prints the value of occup attribute.
*/
System.out.println("Occupation: " + occupation);
}
public void Print_name()
{
/**
* Print_name Prints the values of name attribute.
*/
name.Print_qual();
name.Print_source();
name.Print_value("Name");
}
public void Print_nation()
{
/**
* Print_nation Prints the values of nation attribute.
*/
nation.Print_qual();
nation.Print_source();
nation.Print_value("Nationality");
}
public void Print_title()
{
/**
* Print_title Prints the values of title attribute.
*/
title.Print_qual();
title.Print_source();
title.Print_value("Title");
}
public void Print_baptism()
{
/**
* Print_baptism Prints the values of baptism attribute.
*/
System.out.println("Baptism:\n");
baptism.Print_place();
baptism.Print_date();
baptism.Print_qual();
baptism.Print_source();
}
public void Print_birth()
{
/**
* Print_birth Prints the values of birth attribute.
*/
System.out.println("Birth:\n");
birth.Print_place();
birth.Print_date();
birth.Print_qual();
birth.Print_source();
}
public void Print_death()
{
/**
* Print_death Prints the values of death attribute.
*/
System.out.println("Death:\n");
death.Print_place();
death.Print_date();
death.Print_qual();
death.Print_source();
}
public void Print_burial()
{
/**
* Print_burial Prints the values of burial attribute.
*/
System.out.println("Burial:\n");
burial.Print_place();
burial.Print_date();
burial.Print_qual();
burial.Print_source();
}
public void Print_christened()
{
/**
* Print_christened Prints the values of christened attribute.
*/
System.out.println("Christened:\n");
christened.Print_place();
christened.Print_date();
christened.Print_event_value("Godparent");
christened.Print_qual();
christened.Print_source();
}
public void Print_grad_info()
{
/**
* Print_grad_info Prints the values of grad_info attribute.
*/
System.out.println("Graduation Information:\n");
grad_info.Print_place();
grad_info.Print_date();
grad_info.Print_event_value("Type");
grad_info.Print_qual();
grad_info.Print_source();
}
public void Print_spouse()
{
/**
* Print_spouse Prints the values of spouse attribute.
*/
spouse.Print_name();
spouse.Print_id();
}
public void Print_father()
{
/**
* Print_father Prints the values of father attribute.
*/
father.Print_name();
father.Print_id();
}
public void Print_mother()
{
/**
* Print_mother Prints the values of mother attribute.
*/
mother.Print_name();
mother.Print_id();
}
public void Print_child()
{
/**
* Print_child Prints the values of child attribute.
*/
for (int i = 0; i < children.size(); i++)
{
((Kin)children.elementAt(i)).Print_name();
}
}
public void Print_Person() {
/**
* Print_Person Prints all the different values of a Person object.
*/
Print_name();
Print_title();
Print_sex();
Print_named_after();
Print_nation();
Print_birth();
Print_christened();
Print_baptism();
Print_grad_info();
Print_death();
Print_burial();
}
} /* END PERSON CLASS */
| |
/*
* Copyright (C) 2005 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.base;
import com.google.common.testing.GcFinalization;
import java.io.Closeable;
import junit.framework.TestCase;
import java.lang.ref.WeakReference;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.Permission;
import java.security.Policy;
import java.security.ProtectionDomain;
import java.util.concurrent.Callable;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
/**
* Tests that the {@code ClassLoader} of {@link FinalizableReferenceQueue} can be unloaded. These
* tests are separate from {@link FinalizableReferenceQueueTest} so that they can be excluded from
* coverage runs, as the coverage system interferes with them.
*
* @author Eamonn McManus
*/
public class FinalizableReferenceQueueClassLoaderUnloadingTest extends TestCase {
/*
* The following tests check that the use of FinalizableReferenceQueue does not prevent the
* ClassLoader that loaded that class from later being garbage-collected. If anything continues
* to reference the FinalizableReferenceQueue class then its ClassLoader cannot be
* garbage-collected, even if there are no more instances of FinalizableReferenceQueue itself.
* The code in FinalizableReferenceQueue goes to considerable trouble to ensure that there are
* no such references and the tests here check that that trouble has not been in vain.
*
* When we reference FinalizableReferenceQueue in this test, we are referencing a class that is
* loaded by this test and that will obviously remain loaded for as long as the test is running.
* So in order to check ClassLoader garbage collection we need to create a new ClassLoader and
* make it load its own version of FinalizableReferenceQueue. Then we need to interact with that
* parallel version through reflection in order to exercise the parallel
* FinalizableReferenceQueue, and then check that the parallel ClassLoader can be
* garbage-collected after that.
*/
public static class MyFinalizableWeakReference extends FinalizableWeakReference<Object> {
public MyFinalizableWeakReference(Object x, FinalizableReferenceQueue queue) {
super(x, queue);
}
public void finalizeReferent() {
}
}
private static class PermissivePolicy extends Policy {
@Override
public boolean implies(ProtectionDomain pd, Permission perm) {
return true;
}
}
private WeakReference<ClassLoader> useFrqInSeparateLoader() throws Exception {
final URLClassLoader myLoader = (URLClassLoader) getClass().getClassLoader();
final URL[] urls = myLoader.getURLs();
URLClassLoader sepLoader = new URLClassLoader(urls, myLoader.getParent());
// sepLoader is the loader that we will use to load the parallel FinalizableReferenceQueue (FRQ)
// and friends, and that we will eventually expect to see garbage-collected. The assumption
// is that the ClassLoader of this test is a URLClassLoader, and that it loads FRQ itself
// rather than delegating to a parent ClassLoader. If this assumption is violated the test will
// fail and will need to be rewritten.
Class<?> frqC = FinalizableReferenceQueue.class;
Class<?> sepFrqC = sepLoader.loadClass(frqC.getName());
assertNotSame(frqC, sepFrqC);
// Check the assumptions above.
// FRQ tries to load the Finalizer class (for the reference-collecting thread) in a few ways.
// If the class is accessible to the system ClassLoader (ClassLoader.getSystemClassLoader())
// then FRQ does not bother to load Finalizer.class through a separate ClassLoader. That happens
// in our test environment, which foils the purpose of this test, so we disable the logic for
// our test by setting a static field. We are changing the field in the parallel version of FRQ
// and each test creates its own one of those, so there is no test interference here.
Class<?> sepFrqSystemLoaderC =
sepLoader.loadClass(FinalizableReferenceQueue.SystemLoader.class.getName());
Field disabled = sepFrqSystemLoaderC.getDeclaredField("disabled");
disabled.setAccessible(true);
disabled.set(null, true);
// Now make a parallel FRQ and an associated FinalizableWeakReference to an object, in order to
// exercise some classes from the parallel ClassLoader.
AtomicReference<Object> sepFrqA = new AtomicReference<Object>(sepFrqC.newInstance());
@SuppressWarnings("unchecked")
Class<? extends WeakReference<?>> sepFwrC = (Class<? extends WeakReference<?>>)
sepLoader.loadClass(MyFinalizableWeakReference.class.getName());
Constructor<? extends WeakReference<?>> sepFwrCons =
sepFwrC.getConstructor(Object.class, sepFrqC);
// The object that we will wrap in FinalizableWeakReference is a Stopwatch.
Class<?> sepStopwatchC = sepLoader.loadClass(Stopwatch.class.getName());
assertSame(sepLoader, sepStopwatchC.getClassLoader());
AtomicReference<Object> sepStopwatchA =
new AtomicReference<Object>(sepStopwatchC.newInstance());
AtomicReference<WeakReference<?>> sepStopwatchRef =
new AtomicReference<WeakReference<?>>(
sepFwrCons.newInstance(sepStopwatchA.get(), sepFrqA.get()));
assertNotNull(sepStopwatchA.get());
// Clear all references to the Stopwatch and wait for it to be gc'd.
sepStopwatchA.set(null);
GcFinalization.awaitClear(sepStopwatchRef.get());
// Return a weak reference to the parallel ClassLoader. This is the reference that should
// eventually become clear if there are no other references to the ClassLoader.
return new WeakReference<ClassLoader>(sepLoader);
}
private void doTestUnloadable() throws Exception {
WeakReference<ClassLoader> loaderRef = useFrqInSeparateLoader();
GcFinalization.awaitClear(loaderRef);
}
public void testUnloadableWithoutSecurityManager() throws Exception {
// Test that the use of a FinalizableReferenceQueue does not subsequently prevent the
// loader of that class from being garbage-collected.
SecurityManager oldSecurityManager = System.getSecurityManager();
try {
System.setSecurityManager(null);
doTestUnloadable();
} finally {
System.setSecurityManager(oldSecurityManager);
}
}
public void testUnloadableWithSecurityManager() throws Exception {
// Test that the use of a FinalizableReferenceQueue does not subsequently prevent the
// loader of that class from being garbage-collected even if there is a SecurityManager.
// The SecurityManager environment makes such leaks more likely because when you create
// a URLClassLoader with a SecurityManager, the creating code's AccessControlContext is
// captured, and that references the creating code's ClassLoader.
Policy oldPolicy = Policy.getPolicy();
SecurityManager oldSecurityManager = System.getSecurityManager();
try {
Policy.setPolicy(new PermissivePolicy());
System.setSecurityManager(new SecurityManager());
doTestUnloadable();
} finally {
System.setSecurityManager(oldSecurityManager);
Policy.setPolicy(oldPolicy);
}
}
public static class FrqUser implements Callable<WeakReference<Object>> {
public static FinalizableReferenceQueue frq = new FinalizableReferenceQueue();
public static final Semaphore finalized = new Semaphore(0);
@Override
public WeakReference<Object> call() {
WeakReference<Object> wr = new FinalizableWeakReference<Object>(new Integer(23), frq) {
@Override
public void finalizeReferent() {
finalized.release();
}
};
return wr;
}
}
public void testUnloadableInStaticFieldIfClosed() throws Exception {
Policy oldPolicy = Policy.getPolicy();
SecurityManager oldSecurityManager = System.getSecurityManager();
try {
Policy.setPolicy(new PermissivePolicy());
System.setSecurityManager(new SecurityManager());
WeakReference<ClassLoader> loaderRef = doTestUnloadableInStaticFieldIfClosed();
GcFinalization.awaitClear(loaderRef);
} finally {
System.setSecurityManager(oldSecurityManager);
Policy.setPolicy(oldPolicy);
}
}
// If you have a FinalizableReferenceQueue that is a static field of one of the classes of your
// app (like the FrqUser class above), then the app's ClassLoader will never be gc'd. The reason
// is that we attempt to run a thread in a separate ClassLoader that will detect when the FRQ
// is no longer referenced, meaning that the app's ClassLoader has been gc'd, and when that
// happens. But the thread's supposedly separate ClassLoader actually has a reference to the app's
// ClasLoader via its AccessControlContext. It does not seem to be possible to make a
// URLClassLoader without capturing this reference, and it probably would not be desirable for
// security reasons anyway. Therefore, the FRQ.close() method provides a way to stop the thread
// explicitly. This test checks that calling that method does allow an app's ClassLoader to be
// gc'd even if there is a still a FinalizableReferenceQueue in a static field. (Setting the field
// to null would also work, but only if there are no references to the FRQ anywhere else.)
private WeakReference<ClassLoader> doTestUnloadableInStaticFieldIfClosed() throws Exception {
final URLClassLoader myLoader = (URLClassLoader) getClass().getClassLoader();
final URL[] urls = myLoader.getURLs();
URLClassLoader sepLoader = new URLClassLoader(urls, myLoader.getParent());
Class<?> frqC = FinalizableReferenceQueue.class;
Class<?> sepFrqC = sepLoader.loadClass(frqC.getName());
assertNotSame(frqC, sepFrqC);
Class<?> sepFrqSystemLoaderC =
sepLoader.loadClass(FinalizableReferenceQueue.SystemLoader.class.getName());
Field disabled = sepFrqSystemLoaderC.getDeclaredField("disabled");
disabled.setAccessible(true);
disabled.set(null, true);
Class<?> frqUserC = FrqUser.class;
Class<?> sepFrqUserC = sepLoader.loadClass(frqUserC.getName());
assertNotSame(frqUserC, sepFrqUserC);
assertSame(sepLoader, sepFrqUserC.getClassLoader());
@SuppressWarnings("unchecked")
Callable<WeakReference<Object>> sepFrqUser =
(Callable<WeakReference<Object>>) sepFrqUserC.newInstance();
WeakReference<Object> finalizableWeakReference = sepFrqUser.call();
GcFinalization.awaitClear(finalizableWeakReference);
Field sepFrqUserFinalizedF = sepFrqUserC.getField("finalized");
Semaphore finalizeCount = (Semaphore) sepFrqUserFinalizedF.get(null);
boolean finalized = finalizeCount.tryAcquire(5, TimeUnit.SECONDS);
assertTrue(finalized);
Field sepFrqUserFrqF = sepFrqUserC.getField("frq");
Closeable frq = (Closeable) sepFrqUserFrqF.get(null);
frq.close();
return new WeakReference<ClassLoader>(sepLoader);
}
}
| |
package Parser;
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author Sriram
*/
import java.io.*;
import java.util.*;
import Data.*;
public class Parser {
private boolean lines_ignored = false;
private boolean fib_detected = false;
private boolean type_mismatch_detected = false;
public boolean isCollective(String funcname) {
boolean value = false;
if (funcname.equals("Barrier") || funcname.equals("Allreduce") || funcname.equals("Bcast") ||
funcname.equals("Reduce") || funcname.equals("Reduce_scatter") || funcname.equals("Scatter") ||
funcname.equals("Gather") || funcname.equals("Scatterv") || funcname.equals("Gatherv") ||
funcname.equals("Allgather") || funcname.equals("Allgatherv") || funcname.equals("Alltoall") ||
funcname.equals("Alltoallv") || funcname.equals("Scan") || funcname.equals("Comm_create") || funcname.equals("Cart_create") ||
funcname.equals("Comm_dup") || funcname.equals("Comm_split") || funcname.equals("Comm_free") ||
funcname.equals("AllReduce")) {
value = true;
}
return value;
}
public boolean isp2p(String funcname) {
boolean value = false;
if (funcname.equals("Send") || funcname.equals("Ssend") || funcname.equals("Isend") ||
funcname.equals("Irecv") || funcname.equals("Recv") || funcname.equals("Iprobe") ||
funcname.equals("Probe") || funcname.equals("Rsend")) {
value = true;
}
return value;
}
public void parse(String filename) {
BufferedReader bReader = null;
try {
bReader = new BufferedReader(new FileReader(filename));
} catch (FileNotFoundException e) {
if (GlobalStructure.getInstance().Exception == null) {
GlobalStructure.getInstance().Exception = "File Not Found";
}
}
String sourcefile = new String();
boolean newProcFlag = false;
int tP = 0; // total process count
try {
String line = bReader.readLine();
int iIndex = 0;
int pIndex = 0;
int intraCBcount = 0;
while (line != null) {
StringTokenizer sTokenizer = new StringTokenizer(line);
if (sTokenizer.countTokens() == 1) {
String token = sTokenizer.nextToken();
// for compatibility with newer ISP log files
/* == wfchiang begin == */
//Ganesh: 11/11/11: commented out System.out.println("wfchiang 1: " + token);
/*
if(token.equals("[FIB]") || token.equals("[TYPEMISMATCH]")) {
break;
}
*/
//Ganesh: 11/11/11: Added this if {..} block
if(token.equals("[TYPEMISMATCH]")) {
type_mismatch_detected = true;
}
//G:-- end of additions 11/11/11
if(token.equals("[FIB]")) {
//Ganesh: 11/11/11: Added just this following line:
fib_detected = true;
break;
}
/* == wfchiang end == */
// for older versions of ISP
if(token.equals("DEADLOCK")) {
GlobalStructure.getInstance().deadlocked = true;
break;
}
/* == wfchiang begin == */
else if(token.equals("[TYPEMISMATCH]")); // do =nothing
/* == wfchiang end == */
else {
tP = new Integer(token).intValue();
GlobalStructure.getInstance().noProcess = tP;
}
}
else {
Transition tTemp = new Transition();
while (sTokenizer.hasMoreTokens()) {
// to handle older assert/leak from older versions of isp
String firstToken = sTokenizer.nextToken();
if (firstToken.equals("ASSERT") || firstToken.equals("LEAK")) {
tTemp = null;
break;
}
/* == wfchiang begin ==*/
int interleaving_no = -1;
boolean ignore_this_line = false;
try {
//int interleaving_no = Integer.parseInt(firstToken);
interleaving_no = Integer.parseInt(firstToken);
} catch (Exception e) {
ignore_this_line = true;
}
if (ignore_this_line) {
//Ganesh: 11/11/11: Added this line
lines_ignored = true;
//Ganesh: 11/11/11: commented out System.err.println("WARNING: Unexpect token in the log: " + firstToken);
//Ganesh: 11/11/11: commented out System.err.println("ISPUI will now ignore this whole line");
tTemp = null;
break;
}
/* == wfchiang end == */
tTemp.interleaving_no = interleaving_no;
if (iIndex < interleaving_no) {
iIndex = interleaving_no;
pIndex = 0;
newProcFlag = true;
GlobalStructure.getInstance().interleavings.add(new Interleavings(tP));
GlobalStructure.getInstance().interleavings.get(iIndex - 1).iNo = iIndex;
}
String token = sTokenizer.nextToken();
if (token.equals("DEADLOCK")) {
GlobalStructure.getInstance().deadlocked = true;
tTemp = null;
break;
}
int pID = Integer.parseInt(token);
tTemp.pID = pID;
if (pIndex < tTemp.pID) {
pIndex = tTemp.pID;
newProcFlag = true;
}
token = sTokenizer.nextToken();
if (token.equals("ASSERT") || token.equals("Leak") || token.equalsIgnoreCase("FILE:")) {
tTemp = null;
break;
}
int index = Integer.parseInt(token);
tTemp.index = index;
tTemp.orderID = Integer.parseInt(sTokenizer.nextToken()); // skip the order-id
tTemp.issueID = Integer.parseInt(sTokenizer.nextToken()); // skip the issue-id
String funcname = sTokenizer.nextToken();
tTemp.function = funcname;
if (isCollective(funcname)) {
tTemp.Communicator = sTokenizer.nextToken();
} else if (isp2p(funcname)) {
tTemp.src_or_dst = sTokenizer.nextToken();
tTemp.tag = sTokenizer.nextToken();
tTemp.Communicator = sTokenizer.nextToken();
}
String fbBegin = sTokenizer.nextToken();
if (fbBegin.equals("{")) {
while (true) {
token = sTokenizer.nextToken();
if (token.equals("}")) {
break;
}
tTemp.intraCB.add(Integer.parseInt(token));
intraCBcount++;
}
}
fbBegin = sTokenizer.nextToken();
if (fbBegin.equals("{")) {
token = new String();
while (true) {
InterCBTemplate tempTuple = new InterCBTemplate();
token = sTokenizer.nextToken();
if (token.equals("}")) {
break;
}
if (token.equals("[")) {
String pId = sTokenizer.nextToken();
String Index = sTokenizer.nextToken();
tempTuple.pID = Integer.parseInt(pId);
tempTuple.index = Integer.parseInt(Index);
sTokenizer.nextToken(); // scans ]
}
tTemp.interCB.add(tempTuple);
}
}
String match = sTokenizer.nextToken();
if (match.equals("Match:")) {
tTemp.match_pID = Integer.parseInt(sTokenizer.nextToken());
tTemp.match_index = Integer.parseInt(sTokenizer.nextToken());
}
String file = sTokenizer.nextToken();
if (file.equals("File:")) {
int fileNameLen = Integer.parseInt(sTokenizer.nextToken());
// tTemp.filename = "";
// while (sTokenizer.countTokens() > 1)
// tTemp.filename += sTokenizer.nextToken() + " ";
// tTemp.filename = tTemp.filename.trim();
tTemp.filename = sTokenizer.nextToken();
while (tTemp.filename.length() != fileNameLen) {
tTemp.filename += " " + sTokenizer.nextToken();
}
// if((! sourcefile.equals(tTemp.filename)) || (newProcFlag) ) {
// sourcefile = tTemp.filename;
// GlobalStructure.getInstance().addSourceFileName(sourcefile, iIndex, pID);
// newProcFlag = false;
// }
tTemp.lineNo = Integer.parseInt(sTokenizer.nextToken());
}
}
if (tTemp != null) {
tTemp.CreateCell();
GlobalStructure.getInstance().interleavings.get(iIndex - 1).tList.get(pIndex).add(tTemp);
}
}
line = bReader.readLine();
}
GlobalStructure.getInstance().nInterleavings = iIndex;
GlobalStructure.getInstance().initSystem();
//Ganesh: 11/11/11: Added these if {..} blocks
if (lines_ignored) {
System.out.println("Some input lines were ignored because of log file format changes");
lines_ignored = false;
}
if (fib_detected) {
System.out.println("Functionally irrelevant barriers [FIB] were detected");
fib_detected = false;
}
if (type_mismatch_detected) {
System.out.println("Type mismatches [TYPEMISMATCH] were detected");
type_mismatch_detected = false;
}
//G:-- end of additions 11/11/11
System.out.println("Parsing Success : Firing up the UI");
} catch (Exception e) {
if (GlobalStructure.getInstance().Exception == null) {
System.out.print(e.getMessage());
GlobalStructure.getInstance().Exception = "Error in Parsing : Incorrect File";
//System.out.print(e.getMessage());
}
}
}
}
| |
/*
* Copyright 2005-2014 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.security.pox;
import org.apache.axiom.om.util.Base64;
import org.apache.axiom.soap.SOAPHeader;
import org.apache.axiom.soap.SOAPHeaderBlock;
import org.apache.axis2.AxisFault;
import org.apache.axis2.context.MessageContext;
import org.apache.axis2.description.AxisService;
import org.apache.axis2.description.HandlerDescription;
import org.apache.axis2.description.Parameter;
import org.apache.axis2.engine.Handler;
import org.apache.axis2.transport.http.HTTPConstants;
import org.apache.axis2.util.JavaUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.rampart.util.Axis2Util;
import org.apache.ws.security.WSConstants;
import org.apache.ws.security.WSSecurityException;
import org.apache.ws.security.message.WSSecHeader;
import org.apache.ws.security.message.WSSecTimestamp;
import org.apache.ws.security.message.WSSecUsernameToken;
import org.w3c.dom.Document;
import org.wso2.carbon.base.ServerConfiguration;
import org.wso2.carbon.security.SecurityConfigException;
import org.wso2.carbon.security.SecurityConstants;
import org.wso2.carbon.security.config.SecurityConfigAdmin;
import org.wso2.carbon.security.config.service.SecurityScenarioData;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* Handler to convert the HTTP basic auth information into
* <code>wsse:UsernameToken</code>
*/
public class POXSecurityHandler implements Handler {
private static Log log = LogFactory.getLog(POXSecurityHandler.class);
private static String POX_SECURITY_MODULE = "POXSecurityModule";
public static final String POX_CACHE_MANAGER = "POX_CACHE_MANAGER";
public static final String POX_ENABLED = "pox-security";
private HandlerDescription description;
/**
* @see org.apache.axis2.engine.Handler#cleanup()
*/
public void cleanup() {
}
/**
* @see org.apache.axis2.engine.Handler#init(org.apache.axis2.description.HandlerDescription)
*/
public void init(HandlerDescription description) {
this.description = description;
}
/**
* @see org.apache.axis2.engine.Handler#invoke(org.apache.axis2.context.MessageContext)
*/
public InvocationResponse invoke(MessageContext msgCtx) throws AxisFault {
if (msgCtx != null && !msgCtx.isEngaged(POX_SECURITY_MODULE)){
return InvocationResponse.CONTINUE;
}
AxisService service = msgCtx.getAxisService();
if (service == null) {
if(log.isDebugEnabled()) {
log.debug("Service not dispatched");
}
return InvocationResponse.CONTINUE;
}
// We do not add details of admin services to the registry, hence if a rest call comes to a
// admin service that does not require authentication we simply skip it
String isAdminService = (String) service.getParameterValue("adminService");
if (isAdminService != null) {
if (JavaUtils.isTrueExplicitly(isAdminService)) {
return InvocationResponse.CONTINUE;
}
}
String isHiddenService = (String) service.getParameterValue("hiddenService");
if (isHiddenService != null) {
if (JavaUtils.isTrueExplicitly(isHiddenService)) {
return InvocationResponse.CONTINUE;
}
}
String isReverseProxy = System.getProperty("reverseProxyMode");
if (isReverseProxy != null) {
if (JavaUtils.isTrueExplicitly(isReverseProxy)) {
return InvocationResponse.CONTINUE;
}
}
String isPox = null;
Cache<String, String> cache = this.getPOXCache();
if(cache != null){
if(cache.get(service.getName()) != null) {
isPox = cache.get(service.getName());
}
}
if (isPox != null && JavaUtils.isFalseExplicitly(isPox)) {
return InvocationResponse.CONTINUE;
}
if (msgCtx.isFault() && new Integer(MessageContext.OUT_FAULT_FLOW).equals(msgCtx.getFLOW())) {
// we only need to execute this block in Unauthorized situations when basicAuth used
// otherwise it should continue the message flow by throwing the incoming fault message since
// this is already a fault response - ESBJAVA-2731
try {
String scenarioID = getScenarioId(msgCtx, service);
if (scenarioID != null && scenarioID.equals(SecurityConstants.USERNAME_TOKEN_SCENARIO_ID)) {
setAuthHeaders(msgCtx);
return InvocationResponse.CONTINUE;
}
} catch (Exception e) {
// throwing the same fault which returned by the messageCtx
throw new AxisFault("System error", msgCtx.getFailureReason());
}
return InvocationResponse.CONTINUE;
}
if (msgCtx == null || msgCtx.getIncomingTransportName() == null) {
return InvocationResponse.CONTINUE;
}
String basicAuthHeader = getBasicAuthHeaders(msgCtx);
//this handler only intercepts
if (!(msgCtx.isDoingREST() || isSOAPWithoutSecHeader(msgCtx)) ||
!msgCtx.getIncomingTransportName().equals("https")) {
return InvocationResponse.CONTINUE;
}
if (log.isDebugEnabled()) {
log.debug("Admin service check failed OR cache miss");
}
try {
String scenarioID = getScenarioId(msgCtx, service);
if (scenarioID != null && scenarioID.equals(SecurityConstants.USERNAME_TOKEN_SCENARIO_ID)) {
if (log.isDebugEnabled()) {
log.debug("Processing POX security");
}
} else {
if(cache != null){
cache.put(service.getName(), "false");
}
return InvocationResponse.CONTINUE;
}
String username = null;
String password = null;
if (basicAuthHeader != null && basicAuthHeader.startsWith("Basic ")) {
basicAuthHeader = new String(Base64.decode(basicAuthHeader.substring(6)));
int i = basicAuthHeader.indexOf(':');
if (i == -1) {
username = basicAuthHeader;
} else {
username = basicAuthHeader.substring(0, i);
}
if (i != -1) {
password = basicAuthHeader.substring(i + 1);
if (password != null && password.equals("")) {
password = null;
}
}
}
if (username == null || password == null || password.trim().length() == 0
|| username.trim().length() == 0) {
setAuthHeaders(msgCtx);
return InvocationResponse.ABORT;
}
Document doc = Axis2Util.getDocumentFromSOAPEnvelope(msgCtx.getEnvelope(), true);
WSSecHeader secHeader = new WSSecHeader();
secHeader.insertSecurityHeader(doc);
WSSecUsernameToken utBuilder = new WSSecUsernameToken();
utBuilder.setPasswordType(WSConstants.PASSWORD_TEXT);
utBuilder.setUserInfo(username, password);
utBuilder.build(doc, secHeader);
WSSecTimestamp tsBuilder = new WSSecTimestamp();
tsBuilder.build(doc, secHeader);
/**
* Set the new SOAPEnvelope
*/
msgCtx.setEnvelope(Axis2Util.getSOAPEnvelopeFromDOMDocument(doc, false));
} catch (AxisFault e) {
throw e;
} catch (WSSecurityException wssEx) {
throw new AxisFault("WSDoAllReceiver: Error in converting to Document", wssEx);
} catch (Exception e) {
throw new AxisFault("System error", e);
}
return InvocationResponse.CONTINUE;
}
private void setAuthHeaders(MessageContext msgCtx) throws IOException {
String serverName = ServerConfiguration.getInstance().getFirstProperty("Name");
if(serverName == null || serverName.trim().length() == 0){
serverName = "WSO2 Carbon";
}
HttpServletResponse response = (HttpServletResponse)
msgCtx.getProperty(HTTPConstants.MC_HTTP_SERVLETRESPONSE);
if (response != null) {
response.setContentLength(0);
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
response.addHeader("WWW-Authenticate",
"BASIC realm=\""+serverName+"\"");
response.flushBuffer();
} else {
// if not servlet transport assume it to be nhttp transport
msgCtx.setProperty("NIO-ACK-Requested", "true");
msgCtx.setProperty("HTTP_SC", HttpServletResponse.SC_UNAUTHORIZED);
Map<String, String> responseHeaders = new HashMap<String, String>();
responseHeaders.put("WWW-Authenticate",
"BASIC realm=\""+serverName+"\"");
msgCtx.setProperty(MessageContext.TRANSPORT_HEADERS, responseHeaders);
}
}
private String getScenarioId(MessageContext msgCtx, AxisService service) throws SecurityConfigException{
String scenarioID = null;
try{
scenarioID = (String)service.getParameter(SecurityConstants.SCENARIO_ID_PARAM_NAME).getValue();
}catch (Exception e){}//ignore
if(scenarioID == null){
synchronized (this){
SecurityConfigAdmin securityAdmin = new SecurityConfigAdmin(msgCtx.
getConfigurationContext().getAxisConfiguration());
SecurityScenarioData data = securityAdmin.getCurrentScenario(service.getName());
if(data != null){
scenarioID = data.getScenarioId();
try {
Parameter param = new Parameter();
param.setName(SecurityConstants.SCENARIO_ID_PARAM_NAME);
param.setValue(scenarioID);
service.addParameter(param);
} catch (AxisFault axisFault) {
log.error("Error while adding Scenario ID parameter",axisFault);
}
}
}
}
return scenarioID;
}
/**
*
* @param msgCtx message going through the handler chain
* @return true if its a soap message without a security header
*/
private boolean isSOAPWithoutSecHeader(MessageContext msgCtx) {
//see whether security header present: if so return false
SOAPHeader soapHeader = msgCtx.getEnvelope().getHeader();
if (soapHeader == null) {
return true; // no security header
}
//getting the set of secuirty headers
ArrayList headerBlocks = soapHeader.getHeaderBlocksWithNSURI(WSConstants.WSSE_NS);
// Issue is axiom - a returned collection must not be null
if (headerBlocks != null) {
Iterator headerBlocksIterator = headerBlocks.iterator();
while (headerBlocksIterator.hasNext()) {
SOAPHeaderBlock elem = (SOAPHeaderBlock) headerBlocksIterator.next();
if (WSConstants.WSSE_LN.equals(elem.getLocalName())) {
return false; // security header already present. invalid request.
}
}
}
return true;
}
/**
* Utility method to return basic auth transport headers if present
* @return
*/
private String getBasicAuthHeaders(MessageContext msgCtx) {
Map map = (Map) msgCtx.getProperty(MessageContext.TRANSPORT_HEADERS);
if(map == null) {
return null;
}
String tmp = (String) map.get("Authorization");
if (tmp == null) {
tmp = (String) map.get("authorization");
}
if (tmp != null && tmp.trim().startsWith("Basic ")) {
return tmp;
} else {
return null;
}
}
public void flowComplete(MessageContext msgContext) {
}
/**
* @see org.apache.axis2.engine.Handler#getHandlerDesc()
*/
public HandlerDescription getHandlerDesc() {
return this.description;
}
/**
* @see org.apache.axis2.engine.Handler#getName()
*/
public String getName() {
return "REST/POX Security handler";
}
/**
* @see org.apache.axis2.engine.Handler#getParameter(java.lang.String)
*/
public Parameter getParameter(String name) {
return this.description.getParameter(name);
}
/**
* Returns the default "POX_ENABLED" cache
*
*/
private Cache<String, String> getPOXCache() {
CacheManager manager = Caching.getCacheManagerFactory().getCacheManager(POXSecurityHandler.POX_CACHE_MANAGER);
Cache<String, String> cache = manager.getCache(POXSecurityHandler.POX_ENABLED);
return cache;
}
}
| |
package com.galadar.example.stockxchange;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import java.util.ArrayList;
/**
* This class is only for setting and retrieving values from/to the Database. It will only do a minimal amount of Calculations where absolutely required.
* Created by Galadar on 11/10/2015.
*/
public class MemoryDB extends SQLiteOpenHelper {
public static final String DATABASE_NAME = "Galadar.DBStockXChange.db";
public static final int DATABASE_VER = 9;
public static final String ALL_TABLES_COLUMN_ID = "_id";
public static final String COMPANIES_TABLE_NAME = "Companies";
public static final String COMPANIES_COLUMN_NAME = "Name";
public static final String COMPANIES_COLUMN_TOTAL_VALUE = "TotalValue";
public static final String COMPANIES_COLUMN_CURRENT_VALUE = "CurrValue";
public static final String COMPANIES_COLUMN_PERCENTAGE_VALUE = "PercValue";
public static final String COMPANIES_COLUMN_INVESTMENT = "Investment";
public static final String COMPANIES_COLUMN_OUTLOOK = "Outlook";
public static final String COMPANIES_COLUMN_SECTOR = "Sector";
public static final String COMPANIES_COLUMN_MARKET_SHARE = "MarketShare";
public static final String COMPANIES_COLUMN_REVENUE = "Revenue";
public static final String COMPANIES_COLUMN_LAST_REVENUE = "LRevenue";
public static final String COMPANIES_COLUMN_FAME = "Fame";
public static final String COMPANIES_COLUMN_CID = "cid";
public static final String SHARES_TABLE_NAME = "shares";
public static final String SHARES_COLUMN_NAME = "name";
public static final String SHARES_COLUMN_SID = "sid";
public static final String SHARES_COLUMN_CURRENT_PRICE = "currvalue";
public static final String SHARES_COLUMN_LAST_CLOSE = "Lastllose";
public static final String SHARES_COLUMN_TOTAL_SHARES = "TotalShares";
public static final String SHARES_COLUMN_REMAINING_SHARES = "remainingShares";
public static final String SCAMS_TABLE_NAME = "scams";
public static final String SCAMS_COLUMN_SID = "sid";
public static final String SCAMS_COLUMN_TYPE = "category";
public static final String SCAMS_COLUMN_RESOLUTION_DAY = "endDay";
public static final String OUTLOOK_TABLE_NAME = "outlooks";
public static final String OUTLOOK_COLUMN_NAME = "Sector";
public static final String OUTLOOK_COLUMN_OUTLOOK = "Value";
public static final String DATA_TABLE_NAME = "GameData";
public static final String DATA_COLUMN_ENTRY_NAME = "Name";
public static final String DATA_COLUMN_ENTRY_VALUE = "Value";
public static final String PROPERTY_TABLE_NAME = "Owned";
public static final String PROPERTY_COLUMN_SHARE = "Share";
public static final String PROPERTY_COLUMN_AMOUNT = "Amount";
public static final String SHORT_TABLE_NAME = "ShortSales";
public static final String SHORT_COLUMN_SID = "sid";
public static final String SHORT_COLUMN_AMOUNT = "amount";
public static final String SHORT_COLUMN_TOTAL_SETTLE_DAYS = "totalDays";
public static final String EVENTS_TABLE_NAME = "Events";
public static final String EVENTS_COLUMN_TYPE = "title";
public static final String EVENTS_COLUMN_MAGNITUDE = "body";
public static final String EVENTS_COLUMN_END_DAY = "endday";
public MemoryDB(Context context)
{
super(context, DATABASE_NAME, null, DATABASE_VER);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(
"CREATE TABLE " + SCAMS_TABLE_NAME + "(" +
ALL_TABLES_COLUMN_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " +
SCAMS_COLUMN_SID + " INTEGER NOT NULL, " +
SCAMS_COLUMN_TYPE + " INTEGER, " +
SCAMS_COLUMN_RESOLUTION_DAY + " INTEGER, " +
" FOREIGN KEY (" + SCAMS_COLUMN_SID + ") REFERENCES " + SHARES_TABLE_NAME + "(" + SHARES_COLUMN_SID + ") ON DELETE CASCADE ON UPDATE CASCADE" +
");"
);
db.execSQL(
"CREATE TABLE " + COMPANIES_TABLE_NAME + "(" +
ALL_TABLES_COLUMN_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " +
COMPANIES_COLUMN_NAME + " CHAR(4) NOT NULL UNIQUE, " +
COMPANIES_COLUMN_CID + " INTEGER NOT NULL UNIQUE, " +
COMPANIES_COLUMN_TOTAL_VALUE + " INTEGER NOT NULL, " +
COMPANIES_COLUMN_CURRENT_VALUE + " INTEGER NOT NULL, " +
COMPANIES_COLUMN_PERCENTAGE_VALUE + " INTEGER NOT NULL, " +
COMPANIES_COLUMN_INVESTMENT + " INTEGER NOT NULL, " +
COMPANIES_COLUMN_OUTLOOK + " REAL NOT NULL, " +
COMPANIES_COLUMN_SECTOR + " TEXT NOT NULL, " +
COMPANIES_COLUMN_MARKET_SHARE + " REAL NOT NULL, " +
COMPANIES_COLUMN_REVENUE + " INTEGER NOT NULL, " +
COMPANIES_COLUMN_LAST_REVENUE + " INTEGER NOT NULL, " +
COMPANIES_COLUMN_FAME + " INTEGER NOT NULL" +
");"
);
//The game data table will include Player money, assets, fame, as well as economy size and various other numeric values not belonging to companies, shares or outlooks.
db.execSQL(
"CREATE TABLE " + DATA_TABLE_NAME + "(" +
ALL_TABLES_COLUMN_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " +
DATA_COLUMN_ENTRY_NAME + " TEXT NOT NULL UNIQUE, " +
DATA_COLUMN_ENTRY_VALUE + " INTEGER NOT NULL" +
");"
);
db.execSQL(
"CREATE TABLE " + OUTLOOK_TABLE_NAME + "(" +
ALL_TABLES_COLUMN_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " +
OUTLOOK_COLUMN_NAME + " TEXT NOT NULL UNIQUE, " +
OUTLOOK_COLUMN_OUTLOOK + " REAL NOT NULL" +
");"
);
db.execSQL(
"CREATE TABLE " + SHARES_TABLE_NAME + "(" +
ALL_TABLES_COLUMN_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " +
SHARES_COLUMN_NAME + " CHAR(4) NOT NULL UNIQUE, " +
SHARES_COLUMN_SID + " INTEGER NOT NULL UNIQUE, " +
SHARES_COLUMN_CURRENT_PRICE + " INTEGER NOT NULL, " +
SHARES_COLUMN_LAST_CLOSE + " INTEGER NOT NULL, " +
SHARES_COLUMN_TOTAL_SHARES + " INTEGER NOT NULL, " +
SHARES_COLUMN_REMAINING_SHARES + " INTEGER NOT NULL" +
");"
);
db.execSQL(
"CREATE TABLE " + PROPERTY_TABLE_NAME + "(" +
ALL_TABLES_COLUMN_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " +
PROPERTY_COLUMN_SHARE + " INTEGER NOT NULL UNIQUE, " +
PROPERTY_COLUMN_AMOUNT + " INTEGER NOT NULL, " +
" FOREIGN KEY (" + PROPERTY_COLUMN_SHARE + ") REFERENCES " + SHARES_TABLE_NAME + "(" + SHARES_COLUMN_SID + ") ON DELETE CASCADE ON UPDATE CASCADE" +
");"
);
db.execSQL(
"CREATE TABLE " + SHORT_TABLE_NAME + "(" +
ALL_TABLES_COLUMN_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " +
SHORT_COLUMN_SID + " INTEGER NOT NULL, " +
SHORT_COLUMN_AMOUNT + " INTEGER NOT NULL, " +
SHORT_COLUMN_TOTAL_SETTLE_DAYS + " INTEGER NOT NULL, " +
" FOREIGN KEY (" + SHORT_COLUMN_SID + ") REFERENCES " + SHARES_TABLE_NAME + "(" + SHARES_COLUMN_SID + ") ON DELETE CASCADE ON UPDATE CASCADE" +
");"
);
db.execSQL(
"CREATE TABLE " + EVENTS_TABLE_NAME + "(" +
ALL_TABLES_COLUMN_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " +
EVENTS_COLUMN_MAGNITUDE + " INTEGER NOT NULL, " +
EVENTS_COLUMN_TYPE + " INTEGER NOT NULL, " +
EVENTS_COLUMN_END_DAY + " INTEGER NOT NULL" +
");"
);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
}
public void addScam(int sid, int type, int totalDays){
SQLiteDatabase db = this.getWritableDatabase();
ContentValues values = new ContentValues();
values.put(SCAMS_COLUMN_SID, sid);
values.put(SCAMS_COLUMN_TYPE, type);
values.put(SCAMS_COLUMN_RESOLUTION_DAY, totalDays);
db.insert(SCAMS_TABLE_NAME, null, values);
db.close();
}
public int getScamsNo(){
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + SCAMS_TABLE_NAME + " where 1;", null);
c.moveToFirst();
int a = c.getCount();
c.close();
return a;
}
public int getScamType(int sid){
int type=0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + SCAMS_TABLE_NAME + " where " + SCAMS_COLUMN_SID + "=" + sid + ";", null);
if(c.moveToFirst()) type=c.getInt(c.getColumnIndex(SCAMS_COLUMN_TYPE));
c.close();
return type;
}
public int getScamResolutionDay(int sid){
int days = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + SCAMS_TABLE_NAME + " where " + SCAMS_COLUMN_SID + "=" + sid + ";", null);
if(c.moveToFirst()) days=c.getInt(c.getColumnIndex(SCAMS_COLUMN_RESOLUTION_DAY));
c.close();
return days;
}
public void ShortShare(int sid, int NewAmount, int days, long Pmoney){ //SEND TOTAL DAYS
SQLiteDatabase db = this.getWritableDatabase();
Cursor c = db.rawQuery("select " + SHORT_COLUMN_AMOUNT + " from " + SHORT_TABLE_NAME + " where " + SHORT_COLUMN_SID + "=" + sid + " AND " + SHORT_COLUMN_TOTAL_SETTLE_DAYS + "=" + days + ";", null);
if(c.getCount()==0) {
String where = SHORT_COLUMN_SID+"=?";
String[] args = {Integer.toString(sid)};
ContentValues values = new ContentValues();
values.put(SHORT_COLUMN_AMOUNT, NewAmount);
values.put(SHORT_COLUMN_TOTAL_SETTLE_DAYS, days);
db.update(SHORT_TABLE_NAME, values, where, args);
} else {
int amount=0;
if(c.moveToFirst())amount=c.getInt(c.getColumnIndex(SHORT_COLUMN_AMOUNT));
NewAmount+=amount;
String where = SHORT_COLUMN_SID+"=?";
String[] args = {Integer.toString(sid)};
ContentValues values = new ContentValues();
values.put(SHORT_COLUMN_AMOUNT, NewAmount);
db.update(SHORT_TABLE_NAME, values, where, args);
}
c.close();
db.close();
setPlayerMoney(Pmoney);
}
public void ShortSettle(int Totalday){
SQLiteDatabase db = this.getWritableDatabase();
db.delete(SHORT_TABLE_NAME, SHORT_COLUMN_TOTAL_SETTLE_DAYS + "=?", new String[]{Integer.toString(Totalday)});
db.close();
}
public int getShortAmount(int sid){
int amount=0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + SHORT_TABLE_NAME + " where " + SHORT_COLUMN_SID + "=" + sid + " ORDER BY " + SHORT_COLUMN_TOTAL_SETTLE_DAYS + " ASC;", null);
if(c.getCount()==0){
c.close();
db.close();
return 0;
}
if(c.moveToFirst())amount=c.getInt(c.getColumnIndex(SHORT_COLUMN_AMOUNT));
c.close();
db.close();
return amount;
}
public int getShortDays(int sid){
int amount=-1;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + SHORT_TABLE_NAME + " where " + SHORT_COLUMN_SID + "=" + sid + " ORDER BY " + SHORT_COLUMN_TOTAL_SETTLE_DAYS + " ASC;", null);
if(c.getCount()==0){
c.close();
db.close();
return -1;
}
if(c.moveToFirst())amount=c.getInt(c.getColumnIndex(SHORT_COLUMN_TOTAL_SETTLE_DAYS));
/*while (!c.isAfterLast()){
c.moveToNext();
}*/
c.close();
db.close();
return amount;
}
public void TransactShare(int SID, int NewAmount, long newCash){
SQLiteDatabase db = this.getWritableDatabase();
String where = PROPERTY_COLUMN_SHARE+"=?";
String[] args = {Integer.toString(SID)};
ContentValues values = new ContentValues();
values.put(PROPERTY_COLUMN_AMOUNT, NewAmount);
db.update(PROPERTY_TABLE_NAME, values, where, args);
db.close();
setPlayerMoney(newCash);
}
public int getOwnedShare(int sid){
int amount=0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + PROPERTY_TABLE_NAME + " where " + PROPERTY_COLUMN_SHARE + "=" + sid + ";", null);
if(c.getCount()==0){
c.close();
db.close();
return 0;
}
c.moveToFirst();
while (!c.isAfterLast()){
amount=c.getInt(c.getColumnIndex(PROPERTY_COLUMN_AMOUNT));
c.moveToNext();
}
c.close();
db.close();
return amount;
}
public void addCompany(Company company, int CID){
SQLiteDatabase db = this.getWritableDatabase();
ContentValues values = new ContentValues();
values.put(COMPANIES_COLUMN_NAME, company.getName());
values.put(COMPANIES_COLUMN_TOTAL_VALUE, company.getTotalValue());
values.put(COMPANIES_COLUMN_CURRENT_VALUE, company.getCurrentValue());
values.put(COMPANIES_COLUMN_PERCENTAGE_VALUE, company.getPercentageValue());
values.put(COMPANIES_COLUMN_INVESTMENT, company.getInvestment());
values.put(COMPANIES_COLUMN_OUTLOOK, company.getOutlook());
values.put(COMPANIES_COLUMN_SECTOR, company.getSector());
values.put(COMPANIES_COLUMN_MARKET_SHARE, company.getMarketShare());
values.put(COMPANIES_COLUMN_REVENUE, company.getRevenue());
values.put(COMPANIES_COLUMN_LAST_REVENUE, company.getLastRevenue());
values.put(COMPANIES_COLUMN_FAME, company.getFame());
values.put(COMPANIES_COLUMN_CID, CID);
db.insert(COMPANIES_TABLE_NAME, null, values);
}
public void setOutlook(String name, double outlook){
SQLiteDatabase db = this.getWritableDatabase();
String where = OUTLOOK_COLUMN_NAME+"=?";
String[] args = {name};
ContentValues values = new ContentValues();
values.put(OUTLOOK_COLUMN_OUTLOOK, outlook);
db.update(OUTLOOK_TABLE_NAME, values, where, args);
db.close();
}
public double getOutlook(String name){
double last = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select " + OUTLOOK_COLUMN_OUTLOOK + " from " + OUTLOOK_TABLE_NAME + " where " + OUTLOOK_COLUMN_NAME + "=\"" + name + "\";", null);
c.moveToFirst();
while (!c.isAfterLast()){
last = c.getDouble(c.getColumnIndex(OUTLOOK_COLUMN_OUTLOOK));
c.moveToNext();
}
c.close();
db.close();
return last;
}
public int getCompPercValue(String name){
int last = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select " + COMPANIES_COLUMN_PERCENTAGE_VALUE + " from " + COMPANIES_TABLE_NAME + " where " + COMPANIES_COLUMN_NAME + "=\"" + name + "\";", null);
c.moveToFirst();
while (!c.isAfterLast()){
last = c.getInt(c.getColumnIndex(COMPANIES_COLUMN_PERCENTAGE_VALUE));
c.moveToNext();
}
c.close();
db.close();
return last;
}
public int get10000CompOutlook(String name){
int last;
double temp = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select " + COMPANIES_COLUMN_OUTLOOK + " from " + COMPANIES_TABLE_NAME + " where " + COMPANIES_COLUMN_NAME + "=\"" + name + "\";", null);
c.moveToFirst();
while (!c.isAfterLast()){
temp = c.getDouble(c.getColumnIndex(COMPANIES_COLUMN_OUTLOOK));
c.moveToNext();
}
c.close();
db.close();
last = (int)Math.round(temp*10000);
return last;
}
public long getCompRevenue(String name){
long last = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select " + COMPANIES_COLUMN_REVENUE + " from " + COMPANIES_TABLE_NAME + " where " + COMPANIES_COLUMN_NAME + "=\"" + name + "\";", null);
c.moveToFirst();
while (!c.isAfterLast()){
last = c.getLong(c.getColumnIndex(COMPANIES_COLUMN_REVENUE));
c.moveToNext();
}
c.close();
db.close();
return last;
}
public double getCompMarketShare(String name){
double last = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select " + COMPANIES_COLUMN_MARKET_SHARE + " from " + COMPANIES_TABLE_NAME + " where " + COMPANIES_COLUMN_NAME + "=\"" + name + "\";", null);
c.moveToFirst();
while (!c.isAfterLast()){
last = c.getDouble(c.getColumnIndex(COMPANIES_COLUMN_MARKET_SHARE));
c.moveToNext();
}
c.close();
db.close();
return last;
}
public void addShare (Share share) {
SQLiteDatabase db = this.getWritableDatabase();
ContentValues values = new ContentValues();
values.put(SHARES_COLUMN_NAME, share.getName());
values.put(SHARES_COLUMN_SID, share.getId());
values.put(SHARES_COLUMN_CURRENT_PRICE, share.getCurrentSharePrice());
values.put(SHARES_COLUMN_TOTAL_SHARES, share.getTotalShares());
values.put(SHARES_COLUMN_LAST_CLOSE, share.getPrevDayClose());
values.put(SHARES_COLUMN_REMAINING_SHARES, Math.round(share.getTotalShares() / 2));
db.insert(SHARES_TABLE_NAME, null, values);
values = new ContentValues();
values.put(PROPERTY_COLUMN_SHARE, share.getId());
values.put(PROPERTY_COLUMN_AMOUNT, 0);
db.insert(PROPERTY_TABLE_NAME, null, values);
values = new ContentValues();
values.put(SHORT_COLUMN_SID, share.getId());
values.put(SHORT_COLUMN_AMOUNT, 0);
values.put(SHORT_COLUMN_TOTAL_SETTLE_DAYS, -1);
db.insert(SHORT_TABLE_NAME, null, values);
db.close();
}
public void DayCloseShare(int sid, int price){
SQLiteDatabase db = this.getWritableDatabase();
String where = SHARES_COLUMN_SID+"=?";
String[] args = {Integer.toString(sid)};
ContentValues values = new ContentValues();
values.put(SHARES_COLUMN_LAST_CLOSE, price);
db.update(SHARES_TABLE_NAME, values, where, args);
db.close();
}
public void setRemShares(int sid, int amount){
SQLiteDatabase db = this.getWritableDatabase();
String where = SHARES_COLUMN_SID+"=?";
String[] args = {Integer.toString(sid)};
ContentValues values = new ContentValues();
values.put(SHARES_COLUMN_REMAINING_SHARES, amount);
db.update(SHARES_TABLE_NAME,values, where, args);
db.close();
}
public void setCompRevenue(int sid, long amount){
SQLiteDatabase db = this.getWritableDatabase();
String where = COMPANIES_COLUMN_CID+"=?";
String[] args = {Integer.toString(sid)};
ContentValues values = new ContentValues();
values.put(COMPANIES_COLUMN_REVENUE, amount);
db.update(COMPANIES_TABLE_NAME, values, where, args);
db.close();
}
public int getRemShares(int sid){
int curr = -1;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + SHARES_TABLE_NAME + " where " + SHARES_COLUMN_SID + "=" + sid + ";", null);
c.moveToFirst();
while (!c.isAfterLast()){
curr = c.getInt(c.getColumnIndex(SHARES_COLUMN_REMAINING_SHARES));
c.moveToNext();
}
c.close();
db.close();
return curr;
}
/*
public int numberOfShares(){
SQLiteDatabase db = this.getReadableDatabase();
int numRows = (int) DatabaseUtils.queryNumEntries(db, SHARES_TABLE_NAME);
return numRows;
}
*/
public String getDBShareName(int sid){
String name = "";
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + SHARES_TABLE_NAME + " where " + SHARES_COLUMN_SID + "=" + sid + ";", null);
c.moveToFirst();
if(c.getCount()==0)return name;
while (!c.isAfterLast()){
name=c.getString(c.getColumnIndex(SHARES_COLUMN_NAME));
c.moveToNext();
}
c.close();
db.close();
return name;
}
public int getDBCurrPrice(int sid){
int curr = -1;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + SHARES_TABLE_NAME + " where " + SHARES_COLUMN_SID + "=" + sid + ";", null);
c.moveToFirst();
while (!c.isAfterLast()){
curr = c.getInt(c.getColumnIndex(SHARES_COLUMN_CURRENT_PRICE));
c.moveToNext();
}
c.close();
db.close();
return curr;
}
public int getDBLastClose(int sid){
int last = -1;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + SHARES_TABLE_NAME + " where " + SHARES_COLUMN_SID + "=" + sid + ";", null);
c.moveToFirst();
while (!c.isAfterLast()){
last = c.getInt(c.getColumnIndex(SHARES_COLUMN_LAST_CLOSE));
c.moveToNext();
}
c.close();
db.close();
return last;
}
public long getCompTotalValue(String name){
long last = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select "+COMPANIES_COLUMN_TOTAL_VALUE+" from " + COMPANIES_TABLE_NAME + " where " + COMPANIES_COLUMN_NAME + "=\"" + name + "\";", null);
c.moveToFirst();
while (!c.isAfterLast()){
last = c.getLong(c.getColumnIndex(COMPANIES_COLUMN_TOTAL_VALUE));
c.moveToNext();
}
c.close();
db.close();
last = Math.round(last);
return last;
}
public int getCompFame(String name){
int last = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select "+COMPANIES_COLUMN_FAME+" from " + COMPANIES_TABLE_NAME + " where " + COMPANIES_COLUMN_NAME + "=\"" + name + "\";", null);
c.moveToFirst();
while (!c.isAfterLast()){
last = c.getInt(c.getColumnIndex(COMPANIES_COLUMN_FAME));
c.moveToNext();
}
c.close();
db.close();
return last;
}
public void setDBCurrPrice(int sid, int newP) {
SQLiteDatabase db = this.getWritableDatabase();
String where = SHARES_COLUMN_SID+"=?";
String[] args = {Integer.toString(sid)};
ContentValues values = new ContentValues();
values.put(SHARES_COLUMN_CURRENT_PRICE, newP);
db.update(SHARES_TABLE_NAME, values, where, args);
db.close();
}
public void setPlayerMoney(long newCash){
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"money"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE, newCash);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
public void setCompPercValue(String name, int newV){
SQLiteDatabase db = this.getWritableDatabase();
String where = COMPANIES_COLUMN_NAME+"=?";
String[] args = {name};
ContentValues values = new ContentValues();
values.put(COMPANIES_COLUMN_PERCENTAGE_VALUE, newV);
db.update(COMPANIES_TABLE_NAME, values, where, args);
db.close();
}
public void setCompTotValue(String name, long newV){
SQLiteDatabase db = this.getWritableDatabase();
String where = COMPANIES_COLUMN_NAME+"=?";
String[] args = {name};
ContentValues values = new ContentValues();
values.put(COMPANIES_COLUMN_TOTAL_VALUE, newV);
db.update(COMPANIES_TABLE_NAME, values, where, args);
db.close();
}
public void setCompInvest(String name, int newV){
SQLiteDatabase db = this.getWritableDatabase();
String where = COMPANIES_COLUMN_NAME+"=?";
String[] args = {name};
ContentValues values = new ContentValues();
values.put(COMPANIES_COLUMN_INVESTMENT,newV);
db.update(COMPANIES_TABLE_NAME, values, where, args);
db.close();
}
public void setCompFame(String name, int newV){
SQLiteDatabase db = this.getWritableDatabase();
String where = COMPANIES_COLUMN_NAME+"=?";
String[] args = {name};
ContentValues values = new ContentValues();
values.put(COMPANIES_COLUMN_FAME, newV);
db.update(COMPANIES_TABLE_NAME, values, where, args);
db.close();
}
public long getPlayerMoney(){
long cash = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"money\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
cash = c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
return cash;
}
public int getAssets(){
int assets=0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"assets\" ;", null);
c.moveToFirst();
if(c.getCount()==0){
return 0;
}
c.moveToFirst();
while (!c.isAfterLast()){
assets=c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
//the main app and the user never see the assets stored as thousands, they only get the full assets as int
return (int)Math.floor( assets/1000 );
}
public double getPartAssets(){
double assets=0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"assets\" ;", null);
c.moveToFirst();
if(c.getCount()==0){
return 0;
}
c.moveToFirst();
while (!c.isAfterLast()){
assets=c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
//the main app and the user never see the assets stored as thousands, they only get the full assets as int
return assets/1000;
}
public void incAssets(double amount){
double newAssets = getPartAssets();
newAssets+=amount;
int newAmount = (int)Math.round(newAssets*1000);
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"assets"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE, newAmount);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
public void setAssets(int newAssets){
//Assets are stored as thousands, so 2 assets would be stored as 2000. If the app sends eg 4 assets to set (whitch would be 0.004), it is considered it wanted 4000 assets insted, or 4 full assets
if(newAssets<10){
newAssets=newAssets*1000;
}
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"assets"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE, newAssets);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
public void setFame(int newFame){
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"fame"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE, newFame);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
public int getFame(){
int fame = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"fame\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
fame = c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
return fame;
}
public void PrepGame(long time, int assets, Company.Sectors[] sectors, int sound){
assets *= 1000; //Because full assets are stored as thousands
SQLiteDatabase db = this.getWritableDatabase();
db.execSQL("DELETE FROM " + DATA_TABLE_NAME + " WHERE 1;");
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_NAME, "money");
values.put(DATA_COLUMN_ENTRY_VALUE, 1000000);
db.insert(DATA_TABLE_NAME, null, values);
values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_NAME, "level");
values.put(DATA_COLUMN_ENTRY_VALUE, 1);
db.insert(DATA_TABLE_NAME, null, values);
values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_NAME, "PrevNetWorth");
values.put(DATA_COLUMN_ENTRY_VALUE, 1000000);
db.insert(DATA_TABLE_NAME, null, values);
values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_NAME, "assets");
values.put(DATA_COLUMN_ENTRY_VALUE, assets);
db.insert(DATA_TABLE_NAME, null, values);
values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_NAME, "fame");
values.put(DATA_COLUMN_ENTRY_VALUE, 0);
db.insert(DATA_TABLE_NAME, null, values);
values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_NAME, "economysize1");
values.put(DATA_COLUMN_ENTRY_VALUE, 0);
db.insert(DATA_TABLE_NAME, null, values);
values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_NAME, "economysize2");
values.put(DATA_COLUMN_ENTRY_VALUE, 0);
db.insert(DATA_TABLE_NAME, null, values);
values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_NAME, "term");
values.put(DATA_COLUMN_ENTRY_VALUE, 1);
db.insert(DATA_TABLE_NAME, null, values);
values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_NAME, "day");
values.put(DATA_COLUMN_ENTRY_VALUE, 1);
db.insert(DATA_TABLE_NAME, null, values);
values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_NAME, "sound");
values.put(DATA_COLUMN_ENTRY_VALUE, sound);
db.insert(DATA_TABLE_NAME, null, values);
values.clear();
values.put(DATA_COLUMN_ENTRY_NAME, "nextInvite");
values.put(DATA_COLUMN_ENTRY_VALUE, time);
db.insert(DATA_TABLE_NAME, null, values);
values.clear();
values.put(DATA_COLUMN_ENTRY_NAME, "eventGen");
values.put(DATA_COLUMN_ENTRY_VALUE, 0);
db.insert(DATA_TABLE_NAME, null, values);
values.clear();
values.put(OUTLOOK_COLUMN_NAME, "economy");
values.put(OUTLOOK_COLUMN_OUTLOOK, 0);
db.insert(OUTLOOK_TABLE_NAME, null, values);
for (Company.Sectors sector : sectors) {
values = new ContentValues();
values.put(OUTLOOK_COLUMN_NAME, sector.toString());
values.put(OUTLOOK_COLUMN_OUTLOOK, 0);
db.insert(OUTLOOK_TABLE_NAME, null, values);
}
db.close();
}
public int getPrevNetWorth(){
int gen = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"PrevNetWorth\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
gen = c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
return gen;
}
public int getEventGen(){
int gen = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"eventGen\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
gen = c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
return gen;
}
public void setPrevNetWorth(long prev){
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"PrevNetWorth"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE, prev);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
public void setEventGen(int gen){
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"eventGen"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE, gen);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
public boolean PlaySound(){
int play = 1;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"sound\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
play = c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
return play == 1;
}
public void setSound(boolean play){
int enter;
if(play){
enter = 1;
} else {
enter = 0;
}
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"sound"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE, enter);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
//DELETE SHARES, COMPANIES, SECTORS OUTLOOKS AND GAME DATA, EVERYTHING EXCEPT ASSETS - TO START A NEW GAME
public void clearData(){
SQLiteDatabase db = getWritableDatabase();
db.execSQL("DELETE FROM " + PROPERTY_TABLE_NAME + " WHERE 1;");
db.execSQL("DELETE FROM " + COMPANIES_TABLE_NAME + " WHERE 1;");
db.execSQL("DELETE FROM " + OUTLOOK_TABLE_NAME + " WHERE 1;");
db.execSQL("DELETE FROM " + SHORT_TABLE_NAME + " WHERE 1;");
db.execSQL("DELETE FROM " + SHARES_TABLE_NAME + " WHERE 1;");
db.execSQL("DELETE FROM " + EVENTS_TABLE_NAME + " WHERE 1;");
}
public void bankrupt(String name) {
SQLiteDatabase db = getWritableDatabase();
db.execSQL("DELETE FROM " + COMPANIES_TABLE_NAME + " WHERE "+COMPANIES_COLUMN_NAME+" = \""+name+"\";");
db.execSQL("DELETE FROM " + SHARES_TABLE_NAME + " WHERE "+SHARES_COLUMN_NAME+" = \""+name+"\";");
db.close();
}
public int getShareCount(){
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + SHARES_TABLE_NAME + " where 1;", null);
c.moveToFirst();
int a = c.getCount();
c.close();
return a;
}
public void UpdateSID(int primaryID, int oldSID, int NewSID) {
if(oldSID==NewSID)return;
SQLiteDatabase db = this.getWritableDatabase();
String where = ALL_TABLES_COLUMN_ID + "=?";
String[] args = new String[]{Integer.toString(primaryID)};
ContentValues values = new ContentValues();
values.put(SHARES_COLUMN_SID, NewSID);
db.update(SHARES_TABLE_NAME, values, where, args);
values.clear();
values.put(COMPANIES_COLUMN_CID, NewSID);
db.update(COMPANIES_TABLE_NAME, values, where, args);
db.close();
}
public int getLevel() {
int level = 1;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"level\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
level = c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
return level;
}
public long getNextInviteTime() {
long time = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"nextInvite\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
time = c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
return time;
}
public double getEconomyOutlook() {
double level = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + OUTLOOK_TABLE_NAME + " where " + OUTLOOK_COLUMN_NAME + " = \"economy\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
level = c.getDouble(c.getColumnIndex(OUTLOOK_COLUMN_OUTLOOK));
c.moveToNext();
}
c.close();
db.close();
return level;
}
public int getMaxSID(){
int max = 0;
int temp;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select "+ SHARES_COLUMN_SID + " from " + SHARES_TABLE_NAME + " where 1;", null);
c.moveToFirst();
while (!c.isAfterLast()){
temp = c.getInt(c.getColumnIndex(SHARES_COLUMN_SID));
if(temp>max) max = temp;
c.moveToNext();
}
c.close();
db.close();
return max;
}
public void setLevel(int newLevel){
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"level"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE, newLevel);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
public void setNextInviteTime(long MStime){
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"nextInvite"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE, MStime);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
public int getTerm() {
int term = 1;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"term\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
term = c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
return term;
}
public void setTerm(int newTerm){
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"term"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE, newTerm);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
public int getDay() {
int day = 1;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"day\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
day = c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
return day;
}
public void setDay(int newDay){
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"day"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE, newDay);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
public void setDBShareName(int sid, String newName) {
SQLiteDatabase db = this.getWritableDatabase();
String where = SHARES_COLUMN_SID+"=?";
String[] args = {Integer.toString(sid)};
ContentValues values = new ContentValues();
values.put(SHARES_COLUMN_NAME,newName);
db.update(SHARES_TABLE_NAME, values, where, args);
db.close();
}
public void setDBCompName(String name, String newName) {
SQLiteDatabase db = this.getWritableDatabase();
String where = COMPANIES_COLUMN_NAME+"=?";
String[] args = {name};
ContentValues values = new ContentValues();
values.put(COMPANIES_COLUMN_NAME, newName);
db.update(COMPANIES_TABLE_NAME, values, where, args);
db.close();
}
public int getTotalShares(int sid) {
int last = -1;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select "+SHARES_COLUMN_TOTAL_SHARES+" from " + SHARES_TABLE_NAME + " where " + SHARES_COLUMN_SID + "=" + sid + ";", null);
c.moveToFirst();
while (!c.isAfterLast()){
last = c.getInt(c.getColumnIndex(SHARES_COLUMN_TOTAL_SHARES));
c.moveToNext();
}
c.close();
db.close();
return last;
}
public long getInvestment(String name) {
long last = -1;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select "+COMPANIES_COLUMN_INVESTMENT+" from " + COMPANIES_TABLE_NAME + " where " + COMPANIES_COLUMN_NAME + "=\"" + name + "\";", null);
c.moveToFirst();
while (!c.isAfterLast()){
last = c.getLong(c.getColumnIndex(COMPANIES_COLUMN_INVESTMENT));
c.moveToNext();
}
c.close();
db.close();
return last;
}
public long getLastRevenue(String name){
long last = -1;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select "+COMPANIES_COLUMN_LAST_REVENUE+" from " + COMPANIES_TABLE_NAME + " where " + COMPANIES_COLUMN_NAME + "=\"" + name + "\";", null);
c.moveToFirst();
while (!c.isAfterLast()){
last = c.getLong(c.getColumnIndex(COMPANIES_COLUMN_LAST_REVENUE));
c.moveToNext();
}
c.close();
db.close();
return last;
}
public String getCompanySector(String name) {
String sect="";
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select "+COMPANIES_COLUMN_SECTOR+" from " + COMPANIES_TABLE_NAME + " where " + COMPANIES_COLUMN_NAME + "=\"" + name + "\";", null);
c.moveToFirst();
while (!c.isAfterLast()){
sect = c.getString(c.getColumnIndex(COMPANIES_COLUMN_SECTOR));
c.moveToNext();
}
c.close();
db.close();
return sect;
}
public long getEconomySize1() {
long size = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"economysize1\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
size = c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
return size;
}
public long getEconomySize2() {
long size = 0;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + DATA_TABLE_NAME + " where " + DATA_COLUMN_ENTRY_NAME + " = \"economysize2\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
size = c.getInt(c.getColumnIndex(DATA_COLUMN_ENTRY_VALUE));
c.moveToNext();
}
c.close();
db.close();
return size;
}
public void setEconomySize1(long size) {
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"economysize1"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE, size);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
public void setEconomySize2(long size) {
SQLiteDatabase db = this.getWritableDatabase();
String where = DATA_COLUMN_ENTRY_NAME+"=?";
String[] args = {"economysize2"};
ContentValues values = new ContentValues();
values.put(DATA_COLUMN_ENTRY_VALUE,size);
db.update(DATA_TABLE_NAME, values, where, args);
db.close();
}
public void setCompMarketShare(String name, double newMS) {
SQLiteDatabase db = this.getWritableDatabase();
String where = COMPANIES_COLUMN_NAME+"=?";
String[] args = {name};
ContentValues values = new ContentValues();
values.put(COMPANIES_COLUMN_MARKET_SHARE, newMS);
db.update(COMPANIES_TABLE_NAME, values, where, args);
db.close();
}
public void setCompLastRevenue(String name, int revenue) {
SQLiteDatabase db = this.getWritableDatabase();
String where = COMPANIES_COLUMN_NAME+"=?";
String[] args = {name};
ContentValues values = new ContentValues();
values.put(COMPANIES_COLUMN_LAST_REVENUE, revenue);
db.update(COMPANIES_TABLE_NAME,values, where, args);
db.close();
}
public void setCompOutlook(String name, double newO) {
SQLiteDatabase db = this.getWritableDatabase();
String where = COMPANIES_COLUMN_NAME+"=?";
String[] args = {name};
ContentValues values = new ContentValues();
values.put(COMPANIES_COLUMN_OUTLOOK, newO);
db.update(COMPANIES_TABLE_NAME,values, where, args);
db.close();
}
public void setCompCurrValue(String name, long newV) {
SQLiteDatabase db = this.getWritableDatabase();
String where = COMPANIES_COLUMN_NAME+"=?";
String[] args = {name};
ContentValues values = new ContentValues();
values.put(COMPANIES_COLUMN_CURRENT_VALUE, newV);
db.update(COMPANIES_TABLE_NAME,values, where, args);
db.close();
}
public boolean isScam(int sid){
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + SCAMS_TABLE_NAME + " where " + SCAMS_COLUMN_SID + "=" + sid + ";", null);
boolean a = c.getCount()>0;
c.close();
return a;
}
public void addEvent(Event event, int totalDays) {
SQLiteDatabase db = getWritableDatabase();
ContentValues values = new ContentValues();
values.put(EVENTS_COLUMN_TYPE, event.getType());
values.put(EVENTS_COLUMN_MAGNITUDE, event.getMagnitude());
values.put(EVENTS_COLUMN_END_DAY, totalDays);
db.insert(EVENTS_TABLE_NAME, null, values);
db.close();
}
public ArrayList<Event> retrieveEvents(int currentDay) {
ArrayList<Event> Events = new ArrayList<>();
int type, magnitude, remDays;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + EVENTS_TABLE_NAME + " where 1;", null);
c.moveToFirst();
while (!c.isAfterLast()){
type = c.getInt(c.getColumnIndex(EVENTS_COLUMN_TYPE));
magnitude = c.getInt(c.getColumnIndex(EVENTS_COLUMN_MAGNITUDE));
remDays = c.getInt(c.getColumnIndex(EVENTS_COLUMN_END_DAY))-currentDay;
Events.add(new Event(type, magnitude, remDays));
c.moveToNext();
}
c.close();
db.close();
return Events;
}
public void ClearCompleteEvents(int Totalday) {
SQLiteDatabase db = this.getWritableDatabase();
db.delete(EVENTS_TABLE_NAME, EVENTS_COLUMN_END_DAY + "=?", new String[]{Integer.toString(Totalday)});
db.close();
}
public void removeScam(int cid) {
SQLiteDatabase db = this.getWritableDatabase();
db.delete(SCAMS_TABLE_NAME, SCAMS_COLUMN_SID + "=?", new String[]{Integer.toString(cid)});
db.close();
}
public int getCompanyCID(String name) {
int size = -1;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + COMPANIES_TABLE_NAME + " where " + COMPANIES_COLUMN_NAME + " = \""+name+"\" ;", null);
c.moveToFirst();
while (!c.isAfterLast()){
size = c.getInt(c.getColumnIndex(COMPANIES_COLUMN_CID));
c.moveToNext();
}
c.close();
db.close();
return size;
}
public int getDBSharePrimaryID(String name) {
int size = -1;
if(name.equals(""))return size;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + SHARES_TABLE_NAME + " where " + COMPANIES_COLUMN_NAME + " = \""+name+"\" ;", null);
if(c.getCount()==0)return size;
if(c.moveToFirst())size = c.getInt(c.getColumnIndex(ALL_TABLES_COLUMN_ID));
c.close();
db.close();
return size;
}
public long getCompCurrValue(String name) {
long size = -1;
if(name.equals(""))return size;
SQLiteDatabase db = this.getReadableDatabase();
Cursor c = db.rawQuery("select * from " + COMPANIES_TABLE_NAME + " where " + COMPANIES_COLUMN_NAME + " = \""+name+"\" ;", null);
if(c.getCount()==0)return size;
if(c.moveToFirst())size = c.getLong(c.getColumnIndex(COMPANIES_COLUMN_CURRENT_VALUE));
c.close();
db.close();
return size;
}
public void setCompTotalShares(int sid, int totalShares) {
SQLiteDatabase db = this.getWritableDatabase();
String where = SHARES_COLUMN_SID+"=?";
String[] args = {Integer.toString(sid)};
ContentValues values = new ContentValues();
values.put(SHARES_COLUMN_TOTAL_SHARES, totalShares);
db.update(SHARES_TABLE_NAME, values, where, args);
db.close();
}
}
| |
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.adwords.axis.utils;
import com.google.api.ads.adwords.lib.utils.BatchJobMutateResultInterface;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.io.ByteSource;
import org.apache.axis.Message;
import org.apache.axis.MessageContext;
import org.apache.axis.encoding.DeserializationContext;
import org.apache.axis.encoding.TypeMapping;
import org.apache.axis.encoding.TypeMappingRegistry;
import org.apache.axis.encoding.TypeMappingRegistryImpl;
import org.apache.axis.encoding.ser.BaseDeserializerFactory;
import org.apache.axis.encoding.ser.BaseSerializerFactory;
import org.apache.axis.encoding.ser.BeanDeserializerFactory;
import org.apache.axis.encoding.ser.BeanSerializerFactory;
import org.apache.axis.message.MessageElement;
import org.apache.axis.message.SOAPEnvelope;
import org.apache.axis.server.AxisServer;
import org.xml.sax.InputSource;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import javax.xml.namespace.QName;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
/**
* Utility for deserializing XML to Axis objects.
*/
public class AxisDeserializer {
private static final String SOAP_START_BODY =
"<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\" "
+ "xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" "
+ "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">"
+ "<soapenv:Body>";
private static final String SOAP_END_BODY = "</soapenv:Body></soapenv:Envelope>";
private static final String INDENT_AMOUNT = "4";
private static final AtomicReference<Transformer> transformerRef =
new AtomicReference<Transformer>();
/**
* Returns the static Transformer instance of this class. Lazily constructs (in a thread-safe
* manner) the instance on the first invocation. This is not in a static initializer block because
* any errors need to be reported back to the caller.
*/
private static Transformer getTransformer()
throws TransformerConfigurationException, TransformerFactoryConfigurationError {
if (transformerRef.get() == null) {
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", INDENT_AMOUNT);
// Set the atomic reference, discarding the new instance if another thread set it already.
transformerRef.compareAndSet(null, transformer);
}
return transformerRef.get();
}
/**
* Downloads BatchJob results from a download URL and deserializes them into a list.
*
* @param url the download URL from the BatchJob.
* @param serviceTypeMappings the TypeMapping collection from services whose operations
* BatchJobService supports.
* @param resultClass the class of the BatchJobMutateResultInterface for the SOAP kit and API
* version.
* @param resultQName the QName of the BatchJobMutateResultInterface for the SOAP kit and API
* version.
* @param operandClass the class of the Operand for the SOAP kit and API version.
* @param operandQName the QName of the Operand for the SOAP kit and API version.
* @return the collection of downloaded BatchJobMutateResultInterface objects.
*/
public <OperandT, ApiErrorT,
ResultT extends BatchJobMutateResultInterface<OperandT, ApiErrorT>> List<ResultT>
deserializeBatchJobMutateResults(URL url, List<TypeMapping> serviceTypeMappings,
Class<ResultT> resultClass, QName resultQName, Class<OperandT> operandClass,
QName operandQName) throws Exception {
List<ResultT> results = Lists.newArrayList();
// Build a wrapped input stream from the response.
InputStream wrappedStream = buildWrappedInputStream(url.openStream());
// Construct an Axis deserialization context.
DeserializationContext deserializationContext = new DeserializationContext(
new InputSource(wrappedStream), new MessageContext(new AxisServer()), Message.RESPONSE);
// Register all type mappings with the deserialization context's registry.
TypeMappingRegistry mappingRegistry = addTypeMappingDelegates(
deserializationContext.getTypeMappingRegistry(), serviceTypeMappings);
mappingRegistry = addTypeMappingDelegate(
mappingRegistry, operandClass, operandQName, deserializationContext.getEncodingStyle());
// Parse the wrapped input stream.
deserializationContext.parse();
// Read the deserialized mutate results from the parsed stream.
SOAPEnvelope envelope = deserializationContext.getEnvelope();
MessageElement body = envelope.getFirstBody();
for (Iterator<?> iter = body.getChildElements(); iter.hasNext();) {
Object child = iter.next();
MessageElement childElm = (MessageElement) child;
@SuppressWarnings("unchecked")
ResultT mutateResult = (ResultT) childElm.getValueAsType(resultQName, resultClass);
results.add(mutateResult);
}
return results;
}
/**
* Returns a new input stream that wraps the download input stream in a SOAP body so
* it can be parsed by Axis.
*/
private InputStream buildWrappedInputStream(InputStream downloadInputStream)
throws TransformerException, IOException {
// Pass the download input stream through a transformer that removes the XML
// declaration.
Transformer omitXmlDeclarationTransformer = getTransformer();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
StreamResult streamResult = new StreamResult(outputStream);
Source xmlSource = new StreamSource(downloadInputStream);
omitXmlDeclarationTransformer.transform(xmlSource, streamResult);
return ByteSource.concat(
ByteSource.wrap(SOAP_START_BODY.getBytes()),
ByteSource.wrap(outputStream.toByteArray()),
ByteSource.wrap(SOAP_END_BODY.getBytes())).openStream();
}
/**
* Adds to {@code startingRegistry} a new delegate that contains the list of
* type mappings.
* @param startingRegistry the registry to which a new delegate will be added.
* @param typeMappings the type mappings to register.
* @return the new delegate (the new end of the delegate chain).
*/
private TypeMappingRegistry addTypeMappingDelegates(
TypeMappingRegistry startingRegistry, List<TypeMapping> typeMappings) {
Preconditions.checkNotNull(typeMappings, "Null type mappings");
Preconditions.checkArgument(!typeMappings.isEmpty(), "Empty type mappings");
TypeMappingRegistry lastRegistry =
Preconditions.checkNotNull(startingRegistry, "Null starting registry");
for (TypeMapping typeMapping : typeMappings) {
TypeMappingRegistryImpl typeMappingReg = new TypeMappingRegistryImpl(false);
typeMappingReg.registerDefault(typeMapping);
lastRegistry.delegate(typeMappingReg);
lastRegistry = typeMappingReg;
}
// Return the last registry for further delegate chaining.
return lastRegistry;
}
/**
* Adds to {@code prevRegistry} a new delegate that has the specified class and name
* registered.
*
* @param prevRegistry the registry to which the new delegate will be added.
* @param clazz the Class to register.
* @param qname the QName for {@code clazz}.
* @param encodingStyle the encoding style.
* @return the new delegate (the new end of the delegate chain).
*/
private TypeMappingRegistry addTypeMappingDelegate(
TypeMappingRegistry prevRegistry, Class<?> clazz, QName qname, String encodingStyle) {
Preconditions.checkNotNull(clazz, "Null class");
Preconditions.checkNotNull(qname, "Null qname");
// Create a new registry and register the class/qname.
TypeMappingRegistryImpl newRegistry = new TypeMappingRegistryImpl(false);
TypeMapping typeMapping = newRegistry.getOrMakeTypeMapping(encodingStyle);
typeMapping.register(clazz, qname,
BaseSerializerFactory.createFactory(BeanSerializerFactory.class, clazz, qname),
BaseDeserializerFactory.createFactory(BeanDeserializerFactory.class, clazz, qname));
newRegistry.registerDefault(typeMapping);
// Validate post-condition.
Preconditions.checkState(
newRegistry.getOrMakeTypeMapping(encodingStyle).isRegistered(clazz, qname),
"Class %s and QName %s failed to register", clazz, qname);
// Add the new registry as a delegate of the previous registry.
prevRegistry.delegate(newRegistry);
// Return the new registry for further delegate chaining.
return newRegistry;
}
}
| |
/*
* Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.security.sasl;
import javax.security.sasl.*;
import javax.security.auth.callback.*;
import java.util.Random;
import java.util.Map;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.security.NoSuchAlgorithmException;
import java.util.logging.Logger;
import java.util.logging.Level;
/**
* Implements the CRAM-MD5 SASL server-side mechanism.
* (<A HREF="http://www.ietf.org/rfc/rfc2195.txt">RFC 2195</A>).
* CRAM-MD5 has no initial response.
*
* client <---- M={random, timestamp, server-fqdn} ------- server
* client ----- {username HMAC_MD5(pw, M)} --------------> server
*
* CallbackHandler must be able to handle the following callbacks:
* - NameCallback: default name is name of user for whom to get password
* - PasswordCallback: must fill in password; if empty, no pw
* - AuthorizeCallback: must setAuthorized() and canonicalized authorization id
* - auth id == authzid, but needed to get canonicalized authzid
*
* @author Rosanna Lee
*/
final class CramMD5Server extends CramMD5Base implements SaslServer {
private String fqdn;
private byte[] challengeData = null;
private String authzid;
private CallbackHandler cbh;
/**
* Creates a SASL mechanism with client credentials that it needs
* to participate in CRAM-MD5 authentication exchange with the server.
*
* @param authID A non-null string representing the principal
* being authenticated.
*
* @param pw A non-null String or byte[]
* containing the password. If it is an array, it is first cloned.
*/
CramMD5Server(String protocol, String serverFqdn, Map props,
CallbackHandler cbh) throws SaslException {
if (serverFqdn == null) {
throw new SaslException(
"CRAM-MD5: fully qualified server name must be specified");
}
fqdn = serverFqdn;
this.cbh = cbh;
}
/**
* Generates challenge based on response sent by client.
*
* CRAM-MD5 has no initial response.
* First call generates challenge.
* Second call verifies client response. If authentication fails, throws
* SaslException.
*
* @param responseData A non-null byte array containing the response
* data from the client.
* @return A non-null byte array containing the challenge to be sent to
* the client for the first call; null when 2nd call is successful.
* @throws SaslException If authentication fails.
*/
public byte[] evaluateResponse(byte[] responseData)
throws SaslException {
// See if we've been here before
if (completed) {
throw new IllegalStateException(
"CRAM-MD5 authentication already completed");
}
if (aborted) {
throw new IllegalStateException(
"CRAM-MD5 authentication previously aborted due to error");
}
try {
if (challengeData == null) {
if (responseData.length != 0) {
aborted = true;
throw new SaslException(
"CRAM-MD5 does not expect any initial response");
}
// Generate challenge {random, timestamp, fqdn}
Random random = new Random();
long rand = random.nextLong();
long timestamp = System.currentTimeMillis();
StringBuffer buf = new StringBuffer();
buf.append('<');
buf.append(rand);
buf.append('.');
buf.append(timestamp);
buf.append('@');
buf.append(fqdn);
buf.append('>');
String challengeStr = buf.toString();
logger.log(Level.FINE,
"CRAMSRV01:Generated challenge: {0}", challengeStr);
challengeData = challengeStr.getBytes("UTF8");
return challengeData.clone();
} else {
// Examine response to see if correctly encrypted challengeData
if(logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE,
"CRAMSRV02:Received response: {0}",
new String(responseData, "UTF8"));
}
// Extract username from response
int ulen = 0;
for (int i = 0; i < responseData.length; i++) {
if (responseData[i] == ' ') {
ulen = i;
break;
}
}
if (ulen == 0) {
aborted = true;
throw new SaslException(
"CRAM-MD5: Invalid response; space missing");
}
String username = new String(responseData, 0, ulen, "UTF8");
logger.log(Level.FINE,
"CRAMSRV03:Extracted username: {0}", username);
// Get user's password
NameCallback ncb =
new NameCallback("CRAM-MD5 authentication ID: ", username);
PasswordCallback pcb =
new PasswordCallback("CRAM-MD5 password: ", false);
cbh.handle(new Callback[]{ncb,pcb});
char pwChars[] = pcb.getPassword();
if (pwChars == null || pwChars.length == 0) {
// user has no password; OK to disclose to server
aborted = true;
throw new SaslException(
"CRAM-MD5: username not found: " + username);
}
pcb.clearPassword();
String pwStr = new String(pwChars);
for (int i = 0; i < pwChars.length; i++) {
pwChars[i] = 0;
}
pw = pwStr.getBytes("UTF8");
// Generate a keyed-MD5 digest from the user's password and
// original challenge.
String digest = HMAC_MD5(pw, challengeData);
logger.log(Level.FINE,
"CRAMSRV04:Expecting digest: {0}", digest);
// clear pw when we no longer need it
clearPassword();
// Check whether digest is as expected
byte [] expectedDigest = digest.getBytes("UTF8");
int digestLen = responseData.length - ulen - 1;
if (expectedDigest.length != digestLen) {
aborted = true;
throw new SaslException("Invalid response");
}
int j = 0;
for (int i = ulen + 1; i < responseData.length ; i++) {
if (expectedDigest[j++] != responseData[i]) {
aborted = true;
throw new SaslException("Invalid response");
}
}
// All checks out, use AuthorizeCallback to canonicalize name
AuthorizeCallback acb = new AuthorizeCallback(username, username);
cbh.handle(new Callback[]{acb});
if (acb.isAuthorized()) {
authzid = acb.getAuthorizedID();
} else {
// Not authorized
aborted = true;
throw new SaslException(
"CRAM-MD5: user not authorized: " + username);
}
logger.log(Level.FINE,
"CRAMSRV05:Authorization id: {0}", authzid);
completed = true;
return null;
}
} catch (UnsupportedEncodingException e) {
aborted = true;
throw new SaslException("UTF8 not available on platform", e);
} catch (NoSuchAlgorithmException e) {
aborted = true;
throw new SaslException("MD5 algorithm not available on platform", e);
} catch (UnsupportedCallbackException e) {
aborted = true;
throw new SaslException("CRAM-MD5 authentication failed", e);
} catch (SaslException e) {
throw e; // rethrow
} catch (IOException e) {
aborted = true;
throw new SaslException("CRAM-MD5 authentication failed", e);
}
}
public String getAuthorizationID() {
if (completed) {
return authzid;
} else {
throw new IllegalStateException(
"CRAM-MD5 authentication not completed");
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.redshift.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.redshift.AmazonRedshift#authorizeSnapshotAccess(AuthorizeSnapshotAccessRequest) AuthorizeSnapshotAccess operation}.
* <p>
* Authorizes the specified AWS customer account to restore the
* specified snapshot.
* </p>
* <p>
* For more information about working with snapshots, go to
* <a href="http://docs.aws.amazon.com/redshift/latest/mgmt/working-with-snapshots.html"> Amazon Redshift Snapshots </a>
* in the <i>Amazon Redshift Cluster Management Guide</i> .
* </p>
*
* @see com.amazonaws.services.redshift.AmazonRedshift#authorizeSnapshotAccess(AuthorizeSnapshotAccessRequest)
*/
public class AuthorizeSnapshotAccessRequest extends AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* The identifier of the snapshot the account is authorized to restore.
*/
private String snapshotIdentifier;
/**
* The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a
* snapshot resource element that specifies anything other than * for the
* cluster name.
*/
private String snapshotClusterIdentifier;
/**
* The identifier of the AWS customer account authorized to restore the
* specified snapshot.
*/
private String accountWithRestoreAccess;
/**
* The identifier of the snapshot the account is authorized to restore.
*
* @return The identifier of the snapshot the account is authorized to restore.
*/
public String getSnapshotIdentifier() {
return snapshotIdentifier;
}
/**
* The identifier of the snapshot the account is authorized to restore.
*
* @param snapshotIdentifier The identifier of the snapshot the account is authorized to restore.
*/
public void setSnapshotIdentifier(String snapshotIdentifier) {
this.snapshotIdentifier = snapshotIdentifier;
}
/**
* The identifier of the snapshot the account is authorized to restore.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param snapshotIdentifier The identifier of the snapshot the account is authorized to restore.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public AuthorizeSnapshotAccessRequest withSnapshotIdentifier(String snapshotIdentifier) {
this.snapshotIdentifier = snapshotIdentifier;
return this;
}
/**
* The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a
* snapshot resource element that specifies anything other than * for the
* cluster name.
*
* @return The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a
* snapshot resource element that specifies anything other than * for the
* cluster name.
*/
public String getSnapshotClusterIdentifier() {
return snapshotClusterIdentifier;
}
/**
* The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a
* snapshot resource element that specifies anything other than * for the
* cluster name.
*
* @param snapshotClusterIdentifier The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a
* snapshot resource element that specifies anything other than * for the
* cluster name.
*/
public void setSnapshotClusterIdentifier(String snapshotClusterIdentifier) {
this.snapshotClusterIdentifier = snapshotClusterIdentifier;
}
/**
* The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a
* snapshot resource element that specifies anything other than * for the
* cluster name.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param snapshotClusterIdentifier The identifier of the cluster the snapshot was created from. This
* parameter is required if your IAM user has a policy containing a
* snapshot resource element that specifies anything other than * for the
* cluster name.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public AuthorizeSnapshotAccessRequest withSnapshotClusterIdentifier(String snapshotClusterIdentifier) {
this.snapshotClusterIdentifier = snapshotClusterIdentifier;
return this;
}
/**
* The identifier of the AWS customer account authorized to restore the
* specified snapshot.
*
* @return The identifier of the AWS customer account authorized to restore the
* specified snapshot.
*/
public String getAccountWithRestoreAccess() {
return accountWithRestoreAccess;
}
/**
* The identifier of the AWS customer account authorized to restore the
* specified snapshot.
*
* @param accountWithRestoreAccess The identifier of the AWS customer account authorized to restore the
* specified snapshot.
*/
public void setAccountWithRestoreAccess(String accountWithRestoreAccess) {
this.accountWithRestoreAccess = accountWithRestoreAccess;
}
/**
* The identifier of the AWS customer account authorized to restore the
* specified snapshot.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param accountWithRestoreAccess The identifier of the AWS customer account authorized to restore the
* specified snapshot.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public AuthorizeSnapshotAccessRequest withAccountWithRestoreAccess(String accountWithRestoreAccess) {
this.accountWithRestoreAccess = accountWithRestoreAccess;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSnapshotIdentifier() != null) sb.append("SnapshotIdentifier: " + getSnapshotIdentifier() + ",");
if (getSnapshotClusterIdentifier() != null) sb.append("SnapshotClusterIdentifier: " + getSnapshotClusterIdentifier() + ",");
if (getAccountWithRestoreAccess() != null) sb.append("AccountWithRestoreAccess: " + getAccountWithRestoreAccess() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSnapshotIdentifier() == null) ? 0 : getSnapshotIdentifier().hashCode());
hashCode = prime * hashCode + ((getSnapshotClusterIdentifier() == null) ? 0 : getSnapshotClusterIdentifier().hashCode());
hashCode = prime * hashCode + ((getAccountWithRestoreAccess() == null) ? 0 : getAccountWithRestoreAccess().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof AuthorizeSnapshotAccessRequest == false) return false;
AuthorizeSnapshotAccessRequest other = (AuthorizeSnapshotAccessRequest)obj;
if (other.getSnapshotIdentifier() == null ^ this.getSnapshotIdentifier() == null) return false;
if (other.getSnapshotIdentifier() != null && other.getSnapshotIdentifier().equals(this.getSnapshotIdentifier()) == false) return false;
if (other.getSnapshotClusterIdentifier() == null ^ this.getSnapshotClusterIdentifier() == null) return false;
if (other.getSnapshotClusterIdentifier() != null && other.getSnapshotClusterIdentifier().equals(this.getSnapshotClusterIdentifier()) == false) return false;
if (other.getAccountWithRestoreAccess() == null ^ this.getAccountWithRestoreAccess() == null) return false;
if (other.getAccountWithRestoreAccess() != null && other.getAccountWithRestoreAccess().equals(this.getAccountWithRestoreAccess()) == false) return false;
return true;
}
@Override
public AuthorizeSnapshotAccessRequest clone() {
return (AuthorizeSnapshotAccessRequest) super.clone();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.controller;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.kafka.common.Endpoint;
import org.apache.kafka.common.errors.DuplicateBrokerRegistrationException;
import org.apache.kafka.common.errors.StaleBrokerEpochException;
import org.apache.kafka.common.errors.UnsupportedVersionException;
import org.apache.kafka.common.message.BrokerRegistrationRequestData;
import org.apache.kafka.common.metadata.FenceBrokerRecord;
import org.apache.kafka.common.metadata.RegisterBrokerRecord;
import org.apache.kafka.common.metadata.RegisterBrokerRecord.BrokerEndpoint;
import org.apache.kafka.common.metadata.RegisterBrokerRecord.BrokerEndpointCollection;
import org.apache.kafka.common.metadata.RegisterBrokerRecord.BrokerFeature;
import org.apache.kafka.common.metadata.RegisterBrokerRecord.BrokerFeatureCollection;
import org.apache.kafka.common.metadata.UnfenceBrokerRecord;
import org.apache.kafka.common.metadata.UnregisterBrokerRecord;
import org.apache.kafka.common.security.auth.SecurityProtocol;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.server.common.ApiMessageAndVersion;
import org.apache.kafka.metadata.BrokerRegistration;
import org.apache.kafka.metadata.BrokerRegistrationReply;
import org.apache.kafka.metadata.FeatureMapAndEpoch;
import org.apache.kafka.metadata.VersionRange;
import org.apache.kafka.timeline.SnapshotRegistry;
import org.apache.kafka.timeline.TimelineHashMap;
import org.slf4j.Logger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import static org.apache.kafka.common.metadata.MetadataRecordType.REGISTER_BROKER_RECORD;
/**
* The ClusterControlManager manages all the hard state associated with the Kafka cluster.
* Hard state is state which appears in the metadata log, such as broker registrations,
* brokers being fenced or unfenced, and broker feature versions.
*/
public class ClusterControlManager {
class ReadyBrokersFuture {
private final CompletableFuture<Void> future;
private final int minBrokers;
ReadyBrokersFuture(CompletableFuture<Void> future, int minBrokers) {
this.future = future;
this.minBrokers = minBrokers;
}
boolean check() {
int numUnfenced = 0;
for (BrokerRegistration registration : brokerRegistrations.values()) {
if (!registration.fenced()) {
numUnfenced++;
}
if (numUnfenced >= minBrokers) {
return true;
}
}
return false;
}
}
/**
* The SLF4J log context.
*/
private final LogContext logContext;
/**
* The SLF4J log object.
*/
private final Logger log;
/**
* The Kafka clock object to use.
*/
private final Time time;
/**
* How long sessions should last, in nanoseconds.
*/
private final long sessionTimeoutNs;
/**
* The replica placer to use.
*/
private final ReplicaPlacer replicaPlacer;
/**
* Maps broker IDs to broker registrations.
*/
private final TimelineHashMap<Integer, BrokerRegistration> brokerRegistrations;
/**
* The broker heartbeat manager, or null if this controller is on standby.
*/
private BrokerHeartbeatManager heartbeatManager;
/**
* A future which is completed as soon as we have the given number of brokers
* ready.
*/
private Optional<ReadyBrokersFuture> readyBrokersFuture;
ClusterControlManager(LogContext logContext,
Time time,
SnapshotRegistry snapshotRegistry,
long sessionTimeoutNs,
ReplicaPlacer replicaPlacer) {
this.logContext = logContext;
this.log = logContext.logger(ClusterControlManager.class);
this.time = time;
this.sessionTimeoutNs = sessionTimeoutNs;
this.replicaPlacer = replicaPlacer;
this.brokerRegistrations = new TimelineHashMap<>(snapshotRegistry, 0);
this.heartbeatManager = null;
this.readyBrokersFuture = Optional.empty();
}
/**
* Transition this ClusterControlManager to active.
*/
public void activate() {
heartbeatManager = new BrokerHeartbeatManager(logContext, time, sessionTimeoutNs);
for (BrokerRegistration registration : brokerRegistrations.values()) {
heartbeatManager.touch(registration.id(), registration.fenced(), -1);
}
}
/**
* Transition this ClusterControlManager to standby.
*/
public void deactivate() {
heartbeatManager = null;
}
Map<Integer, BrokerRegistration> brokerRegistrations() {
return brokerRegistrations;
}
Set<Integer> fencedBrokerIds() {
return brokerRegistrations.values()
.stream()
.filter(BrokerRegistration::fenced)
.map(BrokerRegistration::id)
.collect(Collectors.toSet());
}
/**
* Process an incoming broker registration request.
*/
public ControllerResult<BrokerRegistrationReply> registerBroker(
BrokerRegistrationRequestData request,
long brokerEpoch,
FeatureMapAndEpoch finalizedFeatures) {
if (heartbeatManager == null) {
throw new RuntimeException("ClusterControlManager is not active.");
}
int brokerId = request.brokerId();
BrokerRegistration existing = brokerRegistrations.get(brokerId);
if (existing != null) {
if (heartbeatManager.hasValidSession(brokerId)) {
if (!existing.incarnationId().equals(request.incarnationId())) {
throw new DuplicateBrokerRegistrationException("Another broker is " +
"registered with that broker id.");
}
} else {
if (!existing.incarnationId().equals(request.incarnationId())) {
// Remove any existing session for the old broker incarnation.
heartbeatManager.remove(brokerId);
existing = null;
}
}
}
RegisterBrokerRecord record = new RegisterBrokerRecord().setBrokerId(brokerId).
setIncarnationId(request.incarnationId()).
setBrokerEpoch(brokerEpoch).
setRack(request.rack());
for (BrokerRegistrationRequestData.Listener listener : request.listeners()) {
record.endPoints().add(new BrokerEndpoint().
setHost(listener.host()).
setName(listener.name()).
setPort(listener.port()).
setSecurityProtocol(listener.securityProtocol()));
}
for (BrokerRegistrationRequestData.Feature feature : request.features()) {
Optional<VersionRange> finalized = finalizedFeatures.map().get(feature.name());
if (finalized.isPresent()) {
if (!finalized.get().contains(new VersionRange(feature.minSupportedVersion(),
feature.maxSupportedVersion()))) {
throw new UnsupportedVersionException("Unable to register because " +
"the broker has an unsupported version of " + feature.name());
}
}
record.features().add(new BrokerFeature().
setName(feature.name()).
setMinSupportedVersion(feature.minSupportedVersion()).
setMaxSupportedVersion(feature.maxSupportedVersion()));
}
if (existing == null) {
heartbeatManager.touch(brokerId, true, -1);
} else {
heartbeatManager.touch(brokerId, existing.fenced(), -1);
}
List<ApiMessageAndVersion> records = new ArrayList<>();
records.add(new ApiMessageAndVersion(record,
REGISTER_BROKER_RECORD.highestSupportedVersion()));
return ControllerResult.of(records, new BrokerRegistrationReply(brokerEpoch));
}
public void replay(RegisterBrokerRecord record) {
int brokerId = record.brokerId();
List<Endpoint> listeners = new ArrayList<>();
for (BrokerEndpoint endpoint : record.endPoints()) {
listeners.add(new Endpoint(endpoint.name(),
SecurityProtocol.forId(endpoint.securityProtocol()),
endpoint.host(), endpoint.port()));
}
Map<String, VersionRange> features = new HashMap<>();
for (BrokerFeature feature : record.features()) {
features.put(feature.name(), new VersionRange(
feature.minSupportedVersion(), feature.maxSupportedVersion()));
}
// Update broker registrations.
brokerRegistrations.put(brokerId, new BrokerRegistration(brokerId,
record.brokerEpoch(), record.incarnationId(), listeners, features,
Optional.ofNullable(record.rack()), record.fenced()));
BrokerRegistration prevRegistration = brokerRegistrations.get(brokerId);
if (prevRegistration == null) {
log.info("Registered new broker: {}", record);
} else if (prevRegistration.incarnationId().equals(record.incarnationId())) {
log.info("Re-registered broker incarnation: {}", record);
} else {
log.info("Re-registered broker id {}: {}", brokerId, record);
}
}
public void replay(UnregisterBrokerRecord record) {
int brokerId = record.brokerId();
BrokerRegistration registration = brokerRegistrations.get(brokerId);
if (registration == null) {
throw new RuntimeException(String.format("Unable to replay %s: no broker " +
"registration found for that id", record.toString()));
} else if (registration.epoch() != record.brokerEpoch()) {
throw new RuntimeException(String.format("Unable to replay %s: no broker " +
"registration with that epoch found", record.toString()));
} else {
brokerRegistrations.remove(brokerId);
log.info("Unregistered broker: {}", record);
}
}
public void replay(FenceBrokerRecord record) {
int brokerId = record.id();
BrokerRegistration registration = brokerRegistrations.get(brokerId);
if (registration == null) {
throw new RuntimeException(String.format("Unable to replay %s: no broker " +
"registration found for that id", record.toString()));
} else if (registration.epoch() != record.epoch()) {
throw new RuntimeException(String.format("Unable to replay %s: no broker " +
"registration with that epoch found", record.toString()));
} else {
brokerRegistrations.put(brokerId, registration.cloneWithFencing(true));
log.info("Fenced broker: {}", record);
}
}
public void replay(UnfenceBrokerRecord record) {
int brokerId = record.id();
BrokerRegistration registration = brokerRegistrations.get(brokerId);
if (registration == null) {
throw new RuntimeException(String.format("Unable to replay %s: no broker " +
"registration found for that id", record.toString()));
} else if (registration.epoch() != record.epoch()) {
throw new RuntimeException(String.format("Unable to replay %s: no broker " +
"registration with that epoch found", record.toString()));
} else {
brokerRegistrations.put(brokerId, registration.cloneWithFencing(false));
log.info("Unfenced broker: {}", record);
}
if (readyBrokersFuture.isPresent()) {
if (readyBrokersFuture.get().check()) {
readyBrokersFuture.get().future.complete(null);
readyBrokersFuture = Optional.empty();
}
}
}
public List<List<Integer>> placeReplicas(int startPartition,
int numPartitions,
short numReplicas) {
if (heartbeatManager == null) {
throw new RuntimeException("ClusterControlManager is not active.");
}
return heartbeatManager.placeReplicas(startPartition, numPartitions, numReplicas,
id -> brokerRegistrations.get(id).rack(), replicaPlacer);
}
public boolean unfenced(int brokerId) {
BrokerRegistration registration = brokerRegistrations.get(brokerId);
if (registration == null) return false;
return !registration.fenced();
}
BrokerHeartbeatManager heartbeatManager() {
if (heartbeatManager == null) {
throw new RuntimeException("ClusterControlManager is not active.");
}
return heartbeatManager;
}
public void checkBrokerEpoch(int brokerId, long brokerEpoch) {
BrokerRegistration registration = brokerRegistrations.get(brokerId);
if (registration == null) {
throw new StaleBrokerEpochException("No broker registration found for " +
"broker id " + brokerId);
}
if (registration.epoch() != brokerEpoch) {
throw new StaleBrokerEpochException("Expected broker epoch " +
registration.epoch() + ", but got broker epoch " + brokerEpoch);
}
}
public void addReadyBrokersFuture(CompletableFuture<Void> future, int minBrokers) {
readyBrokersFuture = Optional.of(new ReadyBrokersFuture(future, minBrokers));
if (readyBrokersFuture.get().check()) {
readyBrokersFuture.get().future.complete(null);
readyBrokersFuture = Optional.empty();
}
}
class ClusterControlIterator implements Iterator<List<ApiMessageAndVersion>> {
private final Iterator<Entry<Integer, BrokerRegistration>> iterator;
ClusterControlIterator(long epoch) {
this.iterator = brokerRegistrations.entrySet(epoch).iterator();
}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public List<ApiMessageAndVersion> next() {
if (!hasNext()) throw new NoSuchElementException();
Entry<Integer, BrokerRegistration> entry = iterator.next();
int brokerId = entry.getKey();
BrokerRegistration registration = entry.getValue();
BrokerEndpointCollection endpoints = new BrokerEndpointCollection();
for (Entry<String, Endpoint> endpointEntry : registration.listeners().entrySet()) {
endpoints.add(new BrokerEndpoint().setName(endpointEntry.getKey()).
setHost(endpointEntry.getValue().host()).
setPort(endpointEntry.getValue().port()).
setSecurityProtocol(endpointEntry.getValue().securityProtocol().id));
}
BrokerFeatureCollection features = new BrokerFeatureCollection();
for (Entry<String, VersionRange> featureEntry : registration.supportedFeatures().entrySet()) {
features.add(new BrokerFeature().setName(featureEntry.getKey()).
setMaxSupportedVersion(featureEntry.getValue().max()).
setMinSupportedVersion(featureEntry.getValue().min()));
}
List<ApiMessageAndVersion> batch = new ArrayList<>();
batch.add(new ApiMessageAndVersion(new RegisterBrokerRecord().
setBrokerId(brokerId).
setIncarnationId(registration.incarnationId()).
setBrokerEpoch(registration.epoch()).
setEndPoints(endpoints).
setFeatures(features).
setRack(registration.rack().orElse(null)).
setFenced(registration.fenced()),
REGISTER_BROKER_RECORD.highestSupportedVersion()));
return batch;
}
}
ClusterControlIterator iterator(long epoch) {
return new ClusterControlIterator(epoch);
}
}
| |
/**
* Copyright 2014 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.viper.flume2storm.connection.sender;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import com.google.common.base.Preconditions;
/**
* Statistics related to the {@link EventSender}. This class is thread-safe.
*/
public final class EventSenderStats implements EventSenderStatsMBean {
protected final String eventSenderId;
protected final AtomicInteger nbClients;
protected final AtomicLong nbEventIn;
protected final AtomicLong nbEventOut;
protected final AtomicLong nbEventFailed;
/**
* Default constructor
*
* @param eventSenderId
* The {@link EventSender} identifier
*/
public EventSenderStats(final String eventSenderId) {
this.eventSenderId = eventSenderId;
nbClients = new AtomicInteger();
nbEventIn = new AtomicLong();
nbEventOut = new AtomicLong();
nbEventFailed = new AtomicLong();
}
/**
* @see com.comcast.viper.flume2storm.connection.sender.EventSenderStatsMBean#getEventSenderId()
*/
@Override
public String getEventSenderId() {
return eventSenderId;
}
/**
* @see com.comcast.viper.flume2storm.connection.sender.EventSenderStatsMBean#reset()
*/
@Override
public final EventSenderStats reset() {
nbClients.set(0);
nbEventIn.set(0);
nbEventOut.set(0);
nbEventFailed.set(0);
return this;
}
/**
* @see com.comcast.viper.flume2storm.connection.sender.EventSenderStatsMBean#getNbClients()
*/
@Override
public final int getNbClients() {
return nbClients.get();
}
/**
* See {@link #getNbClients()}
*
* @return This object
*/
public final EventSenderStats incrClients() {
nbClients.incrementAndGet();
return this;
}
/**
* See {@link #getNbClients()}
*
* @return This object
*/
public final EventSenderStats decrClients() {
nbClients.decrementAndGet();
return this;
}
/**
* See {@link #getNbClients()}
*
* @return This object
*/
public final EventSenderStatsMBean resetClients() {
nbClients.set(0);
return this;
}
/**
* @see com.comcast.viper.flume2storm.connection.sender.EventSenderStatsMBean#getNbEventsIn()
*/
@Override
public final long getNbEventsIn() {
return nbEventIn.get();
}
/**
* See {@link #getNbEventsIn()}
*
* @return This object
*/
public final EventSenderStats incrEventsIn() {
return incrEventsIn(1);
}
/**
* See {@link #getNbEventsIn()}
*
* @param i
* The number of events to increment
*
* @return This object
*/
public final EventSenderStats incrEventsIn(final int i) {
nbEventIn.addAndGet(i);
return this;
}
/**
* @see com.comcast.viper.flume2storm.connection.sender.EventSenderStatsMBean#getNbEventsOut()
*/
@Override
public final long getNbEventsOut() {
return nbEventOut.get();
}
/**
* See {@link #getNbEventsOut()}
*
* @return This object
*/
public final EventSenderStats incrEventsOut() {
return incrEventsOut(1);
}
/**
* See {@link #getNbEventsOut()}
*
* @param i
* The number of events to increment
*
* @return This object
*/
public final EventSenderStats incrEventsOut(final int i) {
nbEventOut.addAndGet(i);
return this;
}
/**
* @see com.comcast.viper.flume2storm.connection.sender.EventSenderStatsMBean#getNbEventsFailed()
*/
@Override
public final long getNbEventsFailed() {
return nbEventFailed.get();
}
/**
* See {@link #getNbEventsFailed()}
*
* @return This object
*/
public final EventSenderStatsMBean incrEventsFailed() {
return incrEventsFailed(1);
}
/**
* See {@link #getNbEventsFailed()}
*
* @param i
* The number of events to increment
*
* @return This object
*/
public final EventSenderStatsMBean incrEventsFailed(final int i) {
nbEventFailed.addAndGet(i);
return this;
}
/**
* @param other
* Another KryoNet Event Sender statistics object
* @return True if the stats are the same
*/
public boolean sameAs(EventSenderStats other) {
Preconditions.checkNotNull(other);
if (getNbClients() != other.getNbClients())
return false;
if (getNbEventsIn() != other.getNbEventsIn())
return false;
if (getNbEventsOut() != other.getNbEventsOut())
return false;
if (getNbEventsFailed() != other.getNbEventsFailed())
return false;
return true;
}
/**
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return new HashCodeBuilder().append(nbClients).append(nbEventIn).append(nbEventOut).append(nbEventFailed)
.hashCode();
}
/**
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
EventSenderStats other = (EventSenderStats) obj;
return new EqualsBuilder().append(this.nbClients, other.nbClients).append(this.nbEventIn, other.nbEventIn)
.append(this.nbEventOut, other.nbEventOut).append(this.nbEventFailed, other.nbEventFailed).isEquals();
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE).append("NbClients", getNbClients())
.append("NbEventsIn", getNbEventsIn()).append("NbEventsOut", getNbEventsOut())
.append("NbEventsFailed", getNbEventsFailed()).toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.vfs2.provider.sftp;
import java.io.File;
import java.io.IOException;
import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.commons.vfs2.FileSystemOptions;
import org.apache.commons.vfs2.util.Os;
import com.jcraft.jsch.ConfigRepository;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.JSchException;
import com.jcraft.jsch.Logger;
import com.jcraft.jsch.OpenSSHConfig;
import com.jcraft.jsch.Proxy;
import com.jcraft.jsch.ProxyHTTP;
import com.jcraft.jsch.ProxySOCKS5;
import com.jcraft.jsch.Session;
import com.jcraft.jsch.UserInfo;
/**
* Create a JSch Session instance.
*/
public final class SftpClientFactory {
private static final String SSH_DIR_NAME = ".ssh";
private static final String OPENSSH_CONFIG_NAME = "config";
private static final Log LOG = LogFactory.getLog(SftpClientFactory.class);
static {
JSch.setLogger(new JSchLogger());
}
private SftpClientFactory() {
}
/**
* Creates a new connection to the server.
*
* @param hostname The name of the host to connect to.
* @param port The port to use.
* @param username The user's id.
* @param password The user's password.
* @param fileSystemOptions The FileSystem options.
* @return A Session, never null.
* @throws FileSystemException if an error occurs.
*/
public static Session createConnection(final String hostname, final int port, final char[] username,
final char[] password, final FileSystemOptions fileSystemOptions) throws FileSystemException {
final JSch jsch = new JSch();
File sshDir = null;
// new style - user passed
final SftpFileSystemConfigBuilder builder = SftpFileSystemConfigBuilder.getInstance();
final File knownHostsFile = builder.getKnownHosts(fileSystemOptions);
final IdentityProvider[] identities = builder.getIdentityProvider(fileSystemOptions);
final IdentityRepositoryFactory repositoryFactory = builder.getIdentityRepositoryFactory(fileSystemOptions);
final ConfigRepository configRepository = builder.getConfigRepository(fileSystemOptions);
final boolean loadOpenSSHConfig = builder.isLoadOpenSSHConfig(fileSystemOptions);
sshDir = findSshDir();
setKnownHosts(jsch, sshDir, knownHostsFile);
if (repositoryFactory != null) {
jsch.setIdentityRepository(repositoryFactory.create(jsch));
}
addIdentities(jsch, sshDir, identities);
setConfigRepository(jsch, sshDir, configRepository, loadOpenSSHConfig);
Session session;
try {
session = jsch.getSession(new String(username), hostname, port);
if (password != null) {
session.setPassword(new String(password));
}
final Integer sessionTimeout = builder.getSessionTimeoutMillis(fileSystemOptions);
if (sessionTimeout != null) {
session.setTimeout(sessionTimeout.intValue());
}
final UserInfo userInfo = builder.getUserInfo(fileSystemOptions);
if (userInfo != null) {
session.setUserInfo(userInfo);
}
final Properties config = new Properties();
// set StrictHostKeyChecking property
final String strictHostKeyChecking = builder.getStrictHostKeyChecking(fileSystemOptions);
if (strictHostKeyChecking != null) {
config.setProperty("StrictHostKeyChecking", strictHostKeyChecking);
}
// set PreferredAuthentications property
final String preferredAuthentications = builder.getPreferredAuthentications(fileSystemOptions);
if (preferredAuthentications != null) {
config.setProperty("PreferredAuthentications", preferredAuthentications);
}
// set compression property
final String compression = builder.getCompression(fileSystemOptions);
if (compression != null) {
config.setProperty("compression.s2c", compression);
config.setProperty("compression.c2s", compression);
}
final String keyExchangeAlgorithm = builder.getKeyExchangeAlgorithm(fileSystemOptions);
if (keyExchangeAlgorithm != null) {
config.setProperty("kex", keyExchangeAlgorithm);
}
final String proxyHost = builder.getProxyHost(fileSystemOptions);
if (proxyHost != null) {
final int proxyPort = builder.getProxyPort(fileSystemOptions);
final SftpFileSystemConfigBuilder.ProxyType proxyType = builder.getProxyType(fileSystemOptions);
final String proxyUser = builder.getProxyUser(fileSystemOptions);
final String proxyPassword = builder.getProxyPassword(fileSystemOptions);
Proxy proxy = null;
if (SftpFileSystemConfigBuilder.PROXY_HTTP.equals(proxyType)) {
proxy = createProxyHTTP(proxyHost, proxyPort);
((ProxyHTTP)proxy).setUserPasswd(proxyUser, proxyPassword);
} else if (SftpFileSystemConfigBuilder.PROXY_SOCKS5.equals(proxyType)) {
proxy = createProxySOCKS5(proxyHost, proxyPort);
((ProxySOCKS5)proxy).setUserPasswd(proxyUser, proxyPassword);
} else if (SftpFileSystemConfigBuilder.PROXY_STREAM.equals(proxyType)) {
proxy = createStreamProxy(proxyHost, proxyPort, fileSystemOptions, builder);
}
if (proxy != null) {
session.setProxy(proxy);
}
}
// set properties for the session
if (config.size() > 0) {
session.setConfig(config);
}
session.setDaemonThread(true);
session.connect();
} catch (final Exception exc) {
throw new FileSystemException("vfs.provider.sftp/connect.error", exc, hostname);
}
return session;
}
private static void addIdentities(final JSch jsch, final File sshDir, final IdentityProvider[] identities)
throws FileSystemException {
if (identities != null) {
for (final IdentityProvider info : identities) {
addIdentity(jsch, info);
}
} else {
// Load the private key (rsa-key only)
final File privateKeyFile = new File(sshDir, "id_rsa");
if (privateKeyFile.isFile() && privateKeyFile.canRead()) {
addIdentity(jsch, new IdentityInfo(privateKeyFile));
}
}
}
private static void setConfigRepository(final JSch jsch, final File sshDir, final ConfigRepository configRepository, final boolean loadOpenSSHConfig) throws FileSystemException {
if (configRepository != null) {
jsch.setConfigRepository(configRepository);
} else if (loadOpenSSHConfig) {
try {
// loading openssh config (~/.ssh/config)
final ConfigRepository openSSHConfig = OpenSSHConfig.parseFile(new File(sshDir, OPENSSH_CONFIG_NAME).getAbsolutePath());
jsch.setConfigRepository(openSSHConfig);
} catch (final IOException e) {
throw new FileSystemException("vfs.provider.sftp/load-openssh-config.error", e);
}
}
}
private static void addIdentity(final JSch jsch, final IdentityProvider identity) throws FileSystemException {
try {
identity.addIdentity(jsch);
} catch (final JSchException e) {
throw new FileSystemException("vfs.provider.sftp/load-private-key.error", identity, e);
}
}
private static void setKnownHosts(final JSch jsch, final File sshDir, File knownHostsFile)
throws FileSystemException {
try {
if (knownHostsFile != null) {
jsch.setKnownHosts(knownHostsFile.getAbsolutePath());
} else {
// Load the known hosts file
knownHostsFile = new File(sshDir, "known_hosts");
if (knownHostsFile.isFile() && knownHostsFile.canRead()) {
jsch.setKnownHosts(knownHostsFile.getAbsolutePath());
}
}
} catch (final JSchException e) {
throw new FileSystemException("vfs.provider.sftp/known-hosts.error", knownHostsFile.getAbsolutePath(), e);
}
}
private static Proxy createStreamProxy(final String proxyHost, final int proxyPort,
final FileSystemOptions fileSystemOptions, final SftpFileSystemConfigBuilder builder) {
Proxy proxy;
// Use a stream proxy, i.e. it will use a remote host as a proxy
// and run a command (e.g. netcat) that forwards input/output
// to the target host.
// Here we get the settings for connecting to the proxy:
// user, password, options and a command
final String proxyUser = builder.getProxyUser(fileSystemOptions);
final String proxyPassword = builder.getProxyPassword(fileSystemOptions);
final FileSystemOptions proxyOptions = builder.getProxyOptions(fileSystemOptions);
final String proxyCommand = builder.getProxyCommand(fileSystemOptions);
// Create the stream proxy
proxy = new SftpStreamProxy(proxyCommand, proxyUser, proxyHost, proxyPort, proxyPassword, proxyOptions);
return proxy;
}
private static ProxySOCKS5 createProxySOCKS5(final String proxyHost, final int proxyPort) {
return proxyPort == 0 ? new ProxySOCKS5(proxyHost) : new ProxySOCKS5(proxyHost, proxyPort);
}
private static ProxyHTTP createProxyHTTP(final String proxyHost, final int proxyPort) {
return proxyPort == 0 ? new ProxyHTTP(proxyHost) : new ProxyHTTP(proxyHost, proxyPort);
}
/**
* Finds the {@code .ssh} directory.
* <p>
* The lookup order is:
* </p>
* <ol>
* <li>The system property {@code vfs.sftp.sshdir} (the override mechanism)</li>
* <li>{@code user.home}/.ssh</li>
* <li>On Windows only: {@code C:\cygwin\home[user.name]\.ssh}</li>
* <li>The current directory, as a last resort.</li>
* </ol>
*
* <h2>Windows Notes</h2>
* <p>
* The default installation directory for Cygwin is {@code C:\cygwin}. On my set up (Gary here), I have Cygwin in
* {@code C:\bin\cygwin}, not the default. Also, my .ssh directory was created in the {@code user.home} directory.
* </p>
*
* @return The {@code .ssh} directory
*/
private static File findSshDir() {
String sshDirPath;
sshDirPath = System.getProperty("vfs.sftp.sshdir");
if (sshDirPath != null) {
final File sshDir = new File(sshDirPath);
if (sshDir.exists()) {
return sshDir;
}
}
File sshDir = new File(System.getProperty("user.home"), SSH_DIR_NAME);
if (sshDir.exists()) {
return sshDir;
}
if (Os.isFamily(Os.OS_FAMILY_WINDOWS)) {
// TODO - this may not be true
final String userName = System.getProperty("user.name");
sshDir = new File("C:\\cygwin\\home\\" + userName + "\\" + SSH_DIR_NAME);
if (sshDir.exists()) {
return sshDir;
}
}
return new File("");
}
/** Interface JSchLogger with JCL. */
private static class JSchLogger implements Logger {
@Override
public boolean isEnabled(final int level) {
switch (level) {
case FATAL:
return LOG.isFatalEnabled();
case ERROR:
return LOG.isErrorEnabled();
case WARN:
return LOG.isDebugEnabled();
case DEBUG:
return LOG.isDebugEnabled();
case INFO:
return LOG.isInfoEnabled();
default:
return LOG.isDebugEnabled();
}
}
@Override
public void log(final int level, final String msg) {
switch (level) {
case FATAL:
LOG.fatal(msg);
break;
case ERROR:
LOG.error(msg);
break;
case WARN:
LOG.warn(msg);
break;
case DEBUG:
LOG.debug(msg);
break;
case INFO:
LOG.info(msg);
break;
default:
LOG.debug(msg);
}
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.wm.impl;
import com.intellij.ide.IdeEventQueue;
import com.intellij.ide.IdeTooltipManager;
import com.intellij.ide.dnd.DnDAware;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.ui.Divider;
import com.intellij.openapi.ui.Painter;
import com.intellij.openapi.ui.impl.GlassPaneDialogWrapperPeer;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Weighted;
import com.intellij.openapi.wm.IdeGlassPane;
import com.intellij.openapi.wm.IdeGlassPaneUtil;
import com.intellij.util.containers.FactoryMap;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.MouseEventAdapter;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.MenuDragMouseEvent;
import javax.swing.text.html.HTMLEditorKit;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import java.util.List;
public class IdeGlassPaneImpl extends JPanel implements IdeGlassPaneEx, IdeEventQueue.EventDispatcher {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.wm.impl.IdeGlassPaneImpl");
private final List<EventListener> myMouseListeners = new ArrayList<EventListener>();
private final Set<EventListener> mySortedMouseListeners = new TreeSet<EventListener>(new Comparator<EventListener>() {
@Override
public int compare(EventListener o1, EventListener o2) {
double weight1 = 0;
double weight2 = 0;
if (o1 instanceof Weighted) {
weight1 = ((Weighted)o1).getWeight();
}
if (o2 instanceof Weighted) {
weight2 = ((Weighted)o2).getWeight();
}
return weight1 > weight2 ? 1 : weight1 < weight2 ? -1 : myMouseListeners.indexOf(o1) - myMouseListeners.indexOf(o2);
}
});
private final JRootPane myRootPane;
private final Map<String, PaintersHelper> myNamedPainters = new FactoryMap<String, PaintersHelper>() {
@Nullable
@Override
protected PaintersHelper create(String key) {
return new PaintersHelper(IdeGlassPaneImpl.this);
}
};
private boolean myPreprocessorActive;
private final Map<Object, Cursor> myListener2Cursor = new LinkedHashMap<Object, Cursor>();
private Component myLastCursorComponent;
private Cursor myLastOriginalCursor;
private MouseEvent myPrevPressEvent;
private JPanel myFocusProxy = new JPanel(){
@Override
public String toString() {
return "FocusProxy";
}
};
public IdeGlassPaneImpl(JRootPane rootPane) {
myRootPane = rootPane;
setOpaque(false);
setVisible(false);
setLayout(null);
if (myRootPane instanceof IdeRootPane) {
IdeBackgroundUtil.initFramePainters(getNamedPainters("ide"));
IdeBackgroundUtil.initEditorPainters(getNamedPainters("editor"));
}
myFocusProxy.setOpaque(false);
myFocusProxy.setPreferredSize(JBUI.emptySize());
myFocusProxy.setFocusable(true);
UIUtil.setFocusProxy(myFocusProxy, true);
}
@Override
public void addNotify() {
super.addNotify();
if (myFocusProxy.getParent() != null) {
myFocusProxy.getParent().remove(myFocusProxy);
}
if (myFocusProxy.getParent() != getParent()) {
getParent().add(myFocusProxy);
myFocusProxy.setBounds(0, 0, 0, 0);
}
}
public boolean dispatch(final AWTEvent e) {
JRootPane eventRootPane = myRootPane;
if (e instanceof MouseEvent) {
MouseEvent me = (MouseEvent)e;
Window eventWindow = UIUtil.getWindow(me.getComponent());
if (isContextMenu(eventWindow)) return false;
final Window thisGlassWindow = SwingUtilities.getWindowAncestor(myRootPane);
if (eventWindow instanceof JWindow) {
eventRootPane = ((JWindow)eventWindow).getRootPane();
if (eventRootPane != null) {
if (!(eventRootPane.getGlassPane() instanceof IdeGlassPane)) {
final Container parentWindow = eventWindow.getParent();
if (parentWindow instanceof Window) {
eventWindow = (Window)parentWindow;
}
}
}
}
if (eventWindow != thisGlassWindow) return false;
}
if (e.getID() == MouseEvent.MOUSE_DRAGGED) {
if (ApplicationManager.getApplication() != null) {
IdeTooltipManager.getInstance().hideCurrent((MouseEvent)e, null, null);
}
}
boolean dispatched;
if (e.getID() == MouseEvent.MOUSE_PRESSED || e.getID() == MouseEvent.MOUSE_RELEASED || e.getID() == MouseEvent.MOUSE_CLICKED) {
dispatched = preprocess((MouseEvent)e, false, eventRootPane);
}
else if (e.getID() == MouseEvent.MOUSE_MOVED || e.getID() == MouseEvent.MOUSE_DRAGGED) {
dispatched = preprocess((MouseEvent)e, true, eventRootPane);
}
else if (e.getID() == MouseEvent.MOUSE_EXITED || e.getID() == MouseEvent.MOUSE_ENTERED) {
dispatched = preprocess((MouseEvent)e, false, eventRootPane);
}
else {
return false;
}
MouseEvent me = (MouseEvent)e;
final Component meComponent = me.getComponent();
if (!dispatched && meComponent != null) {
final Window eventWindow = UIUtil.getWindow(meComponent);
if (eventWindow != SwingUtilities.getWindowAncestor(myRootPane)) {
return false;
}
int button1 = MouseEvent.BUTTON1_MASK | MouseEvent.BUTTON1_DOWN_MASK;
final boolean pureMouse1Event = (me.getModifiersEx() | button1) == button1;
if (pureMouse1Event && me.getClickCount() <= 1 && !me.isPopupTrigger()) {
final Point point = SwingUtilities.convertPoint(meComponent, me.getPoint(), myRootPane.getContentPane());
JMenuBar menuBar = myRootPane.getJMenuBar();
point.y += menuBar != null ? menuBar.getHeight() : 0;
final Component target =
SwingUtilities.getDeepestComponentAt(myRootPane.getContentPane().getParent(), point.x, point.y);
if (target instanceof DnDAware) {
final Point targetPoint = SwingUtilities.convertPoint(myRootPane.getContentPane().getParent(), point.x, point.y, target);
final boolean overSelection = ((DnDAware)target).isOverSelection(targetPoint);
if (overSelection) {
final MouseListener[] listeners = target.getListeners(MouseListener.class);
final MouseEvent mouseEvent = convertEvent(me, target);
switch (me.getID()) {
case MouseEvent.MOUSE_PRESSED:
boolean consumed = false;
if (target.isFocusable()) target.requestFocus();
for (final MouseListener listener : listeners) {
final String className = listener.getClass().getName();
if (className.indexOf("BasicTreeUI$") >= 0 || className.indexOf("MacTreeUI$") >= 0) continue;
fireMouseEvent(listener, mouseEvent);
if (mouseEvent.isConsumed()) {
consumed = true;
break;
}
}
if (!mouseEvent.isConsumed()) {
final AWTEventListener[] eventListeners = Toolkit.getDefaultToolkit().getAWTEventListeners(MouseEvent.MOUSE_EVENT_MASK);
if (eventListeners != null && eventListeners.length > 0) {
for (final AWTEventListener eventListener : eventListeners) {
eventListener.eventDispatched(me);
if (me.isConsumed()) break;
}
if (me.isConsumed()) {
consumed = true;
break;
}
}
}
if (!consumed) {
myPrevPressEvent = mouseEvent;
}
else {
me.consume();
}
dispatched = true;
break;
case MouseEvent.MOUSE_RELEASED:
if (myPrevPressEvent != null && myPrevPressEvent.getComponent() == target) {
for (final MouseListener listener : listeners) {
final String className = listener.getClass().getName();
if (className.indexOf("BasicTreeUI$") >= 0 || className.indexOf("MacTreeUI$") >= 0) {
fireMouseEvent(listener, myPrevPressEvent);
fireMouseEvent(listener, mouseEvent);
if (mouseEvent.isConsumed()) {
break;
}
}
fireMouseEvent(listener, mouseEvent);
if (mouseEvent.isConsumed()) {
break;
}
}
if (mouseEvent.isConsumed()) {
me.consume();
}
myPrevPressEvent = null;
dispatched = true;
}
break;
default:
myPrevPressEvent = null;
break;
}
}
}
}
}
if (isVisible() && getComponentCount() == 0) {
boolean cursorSet = false;
if (meComponent != null) {
final Point point = SwingUtilities.convertPoint(meComponent, me.getPoint(), myRootPane.getContentPane());
if (myRootPane.getMenuBar() != null && myRootPane.getMenuBar().isVisible()) {
point.y += myRootPane.getMenuBar().getHeight();
}
final Component target =
SwingUtilities.getDeepestComponentAt(myRootPane.getContentPane().getParent(), point.x, point.y);
if (target != null) {
setCursor(target.getCursor());
cursorSet = true;
}
}
if (!cursorSet) {
setCursor(Cursor.getDefaultCursor());
}
}
return dispatched;
}
private static boolean isContextMenu(Window window) {
if (window != null) {
for (Component component : window.getComponents()) {
if (component instanceof JComponent
&& UIUtil.findComponentOfType((JComponent)component, JPopupMenu.class) != null) {
return true;
}
}
}
return false;
}
private boolean preprocess(final MouseEvent e, final boolean motion, JRootPane eventRootPane) {
try {
final MouseEvent event = convertEvent(e, eventRootPane);
if (!IdeGlassPaneUtil.canBePreprocessed(e)) {
return false;
}
for (EventListener each : mySortedMouseListeners) {
if (motion && each instanceof MouseMotionListener) {
fireMouseMotion((MouseMotionListener)each, event);
}
else if (!motion && each instanceof MouseListener) {
fireMouseEvent((MouseListener)each, event);
}
if (event.isConsumed()) {
e.consume();
return true;
}
}
return false;
}
finally {
if (eventRootPane == myRootPane) {
Cursor cursor;
if (!myListener2Cursor.isEmpty()) {
cursor = myListener2Cursor.values().iterator().next();
final Point point = SwingUtilities.convertPoint(e.getComponent(), e.getPoint(), myRootPane.getContentPane());
Component target =
SwingUtilities.getDeepestComponentAt(myRootPane.getContentPane().getParent(), point.x, point.y);
if (canProcessCursorFor(target)) {
target = getCompWithCursor(target);
restoreLastComponent(target);
if (target != null) {
if (myLastCursorComponent != target) {
myLastCursorComponent = target;
myLastOriginalCursor = target.getCursor();
}
if (cursor != null && !cursor.equals(target.getCursor())) {
target.setCursor(cursor);
}
}
getRootPane().setCursor(cursor);
}
}
else {
cursor = Cursor.getDefaultCursor();
JRootPane rootPane = getRootPane();
if (rootPane != null) {
rootPane.setCursor(cursor);
} else {
LOG.warn("Root pane is null. Event: " + e);
}
restoreLastComponent(null);
myLastOriginalCursor = null;
myLastCursorComponent = null;
}
myListener2Cursor.clear();
}
}
}
private boolean canProcessCursorFor(Component target) {
if (target instanceof JMenu ||
target instanceof JMenuItem ||
target instanceof Divider ||
target instanceof JSeparator ||
(target instanceof JEditorPane && ((JEditorPane)target).getEditorKit() instanceof HTMLEditorKit)) {
return false;
}
return true;
}
private Component getCompWithCursor(Component c) {
Component eachParentWithCursor = c;
while (eachParentWithCursor != null) {
if (eachParentWithCursor.isCursorSet()) return eachParentWithCursor;
eachParentWithCursor = eachParentWithCursor.getParent();
}
return null;
}
private void restoreLastComponent(Component newC) {
if (myLastCursorComponent != null && myLastCursorComponent != newC) {
myLastCursorComponent.setCursor(myLastOriginalCursor);
}
}
public void setCursor(Cursor cursor, @NotNull Object requestor) {
if (cursor == null) {
myListener2Cursor.remove(requestor);
}
else {
myListener2Cursor.put(requestor, cursor);
}
}
private static MouseEvent convertEvent(final MouseEvent e, final Component target) {
final Point point = SwingUtilities.convertPoint(e.getComponent(), e.getPoint(), target);
return MouseEventAdapter.convert(e, target, e.getID(), e.getWhen(), e.getModifiersEx(), point.x, point.y);
}
private static void fireMouseEvent(final MouseListener listener, final MouseEvent event) {
switch (event.getID()) {
case MouseEvent.MOUSE_PRESSED:
listener.mousePressed(event);
break;
case MouseEvent.MOUSE_RELEASED:
listener.mouseReleased(event);
break;
case MouseEvent.MOUSE_ENTERED:
listener.mouseEntered(event);
break;
case MouseEvent.MOUSE_EXITED:
listener.mouseExited(event);
break;
case MouseEvent.MOUSE_CLICKED:
listener.mouseClicked(event);
break;
}
}
private static void fireMouseMotion(MouseMotionListener listener, final MouseEvent event) {
switch (event.getID()) {
case MouseEvent.MOUSE_DRAGGED:
listener.mouseDragged(event);
case MouseEvent.MOUSE_MOVED:
listener.mouseMoved(event);
}
}
public void addMousePreprocessor(final MouseListener listener, Disposable parent) {
_addListener(listener, parent);
}
public void addMouseMotionPreprocessor(final MouseMotionListener listener, final Disposable parent) {
_addListener(listener, parent);
}
private void _addListener(final EventListener listener, final Disposable parent) {
if (!myMouseListeners.contains(listener)) {
myMouseListeners.add(listener);
updateSortedList();
}
activateIfNeeded();
Disposer.register(parent, new Disposable() {
public void dispose() {
UIUtil.invokeLaterIfNeeded(new Runnable() {
public void run() {
removeListener(listener);
}
});
}
});
}
public void removeMousePreprocessor(final MouseListener listener) {
removeListener(listener);
}
public void removeMouseMotionPreprocessor(final MouseMotionListener listener) {
removeListener(listener);
}
private void removeListener(final EventListener listener) {
if (myMouseListeners.remove(listener)) {
updateSortedList();
}
deactivateIfNeeded();
}
private void updateSortedList() {
mySortedMouseListeners.clear();
mySortedMouseListeners.addAll(myMouseListeners);
}
private void deactivateIfNeeded() {
if (myPreprocessorActive && myMouseListeners.isEmpty()) {
myPreprocessorActive = false;
}
applyActivationState();
}
private void activateIfNeeded() {
if (!myPreprocessorActive && !myMouseListeners.isEmpty()) {
myPreprocessorActive = true;
}
applyActivationState();
}
private void applyActivationState() {
boolean wasVisible = isVisible();
boolean hasWork = getPainters().hasPainters() || getComponentCount() > 0;
if (wasVisible != hasWork) {
setVisible(hasWork);
}
IdeEventQueue queue = IdeEventQueue.getInstance();
if (!queue.containsDispatcher(this) && (myPreprocessorActive || isVisible())) {
queue.addDispatcher(this, null);
}
else if (queue.containsDispatcher(this) && !myPreprocessorActive && !isVisible()) {
queue.removeDispatcher(this);
}
if (wasVisible != isVisible()) {
revalidate();
repaint();
}
}
@NotNull
PaintersHelper getNamedPainters(@NotNull String name) {
return myNamedPainters.get(name);
}
@NotNull
private PaintersHelper getPainters() {
return getNamedPainters("glass");
}
public void addPainter(final Component component, final Painter painter, final Disposable parent) {
getPainters().addPainter(painter, component);
activateIfNeeded();
Disposer.register(parent, new Disposable() {
public void dispose() {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
removePainter(painter);
}
});
}
});
}
public void removePainter(final Painter painter) {
getPainters().removePainter(painter);
deactivateIfNeeded();
}
@Override
protected void addImpl(Component comp, Object constraints, int index) {
super.addImpl(comp, constraints, index);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
activateIfNeeded();
}
});
}
@Override
public void remove(final Component comp) {
super.remove(comp);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
deactivateIfNeeded();
}
});
}
public boolean isInModalContext() {
final Component[] components = getComponents();
for (Component component : components) {
if (component instanceof GlassPaneDialogWrapperPeer.TransparentLayeredPane) {
return true;
}
}
return false;
}
protected void paintComponent(final Graphics g) {
getPainters().paint(g);
}
@Override
protected void paintChildren(Graphics g) {
super.paintChildren(g);
}
public Component getTargetComponentFor(MouseEvent e) {
Component candidate = findComponent(e, myRootPane.getLayeredPane());
if (candidate != null) return candidate;
candidate = findComponent(e, myRootPane.getContentPane());
if (candidate != null) return candidate;
return e.getComponent();
}
private static Component findComponent(final MouseEvent e, final Container container) {
final Point lpPoint = SwingUtilities.convertPoint(e.getComponent(), e.getPoint(), container);
return SwingUtilities.getDeepestComponentAt(container, lpPoint.x, lpPoint.y);
}
@Override
public boolean isOptimizedDrawingEnabled() {
return !getPainters().hasPainters() && super.isOptimizedDrawingEnabled();
}
@Override
public JComponent getProxyComponent() {
return myFocusProxy;
}
}
| |
/*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.sso.saml.dto;
import org.wso2.carbon.identity.application.common.model.ClaimMapping;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
public class SAMLSSOAuthnReqDTO implements Serializable {
private String username;
private String password;
private String issuer;
private String subject;
private String assertionConsumerURL;
private String id;
private String claim;
private String audience;
private String recipient;
private String nameIDFormat;
private String logoutURL;
private String loginPageURL;
private String rpSessionId;
private String requestMessageString;
private String queryString;
private String destination;
private String[] requestedClaims;
private String[] requestedAudiences;
private String[] requestedRecipients;
private boolean doSingleLogout;
private boolean doSignResponse;
private boolean doSignAssertions;
private boolean useFullyQualifiedUsernameAsSubject;
private boolean isStratosDeployment = false;
private int attributeConsumingServiceIndex;
private String nameIdClaimUri;
private boolean isIdPInitSSO;
private boolean doEnableEncryptedAssertion;
private boolean doValidateSignatureInRequests;
private Map<ClaimMapping, String> userAttributes = new HashMap<ClaimMapping, String>();
private Map<String, String> claimMapping = null;
private String tenantDomain;
private String certAlias;
public String getNameIdClaimUri() {
return nameIdClaimUri;
}
public void setNameIdClaimUri(String nameIdClaimUri) {
this.nameIdClaimUri = nameIdClaimUri;
}
public int getAttributeConsumingServiceIndex() {
return attributeConsumingServiceIndex;
}
public void setAttributeConsumingServiceIndex(
int attributeConsumingServiceIndex) {
this.attributeConsumingServiceIndex = attributeConsumingServiceIndex;
}
public String getCertAlias() {
return certAlias;
}
public void setCertAlias(String certAlias) {
this.certAlias = certAlias;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getIssuer() {
if (issuer.contains("@")) {
String[] splitIssuer = issuer.split("@");
return splitIssuer[0];
}
return issuer;
}
public void setIssuer(String issuer) {
this.issuer = issuer;
}
public String getIssuerWithDomain() {
return issuer;
}
public String getSubject() {
return subject;
}
public void setSubject(String subject) {
this.subject = subject;
}
public String getAssertionConsumerURL() {
return assertionConsumerURL;
}
public void setAssertionConsumerURL(String assertionConsumerURL) {
this.assertionConsumerURL = assertionConsumerURL;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getNameIDFormat() {
return nameIDFormat;
}
public void setNameIDFormat(String nameIDFormat) {
this.nameIDFormat = nameIDFormat;
}
public String getClaim() {
return claim;
}
public void setClaim(String claim) {
this.claim = claim;
}
public String getAudience() {
return audience;
}
public void setAudience(String audience) {
this.audience = audience;
}
public String getRecipient() {
return recipient;
}
public void setRecipient(String recipient) {
this.recipient = recipient;
}
public String getLogoutURL() {
return logoutURL;
}
public void setLogoutURL(String logoutURL) {
this.logoutURL = logoutURL;
}
public boolean getUseFullyQualifiedUsernameAsSubject() {
return useFullyQualifiedUsernameAsSubject;
}
public void setUseFullyQualifiedUsernameAsSubject(
boolean useFullyQualifiedUsernameAsSubject) {
this.useFullyQualifiedUsernameAsSubject = useFullyQualifiedUsernameAsSubject;
}
public boolean isDoSingleLogout() {
return doSingleLogout;
}
public void setDoSingleLogout(boolean doSingleLogout) {
this.doSingleLogout = doSingleLogout;
}
public String getLoginPageURL() {
return loginPageURL;
}
public void setLoginPageURL(String loginPageURL) {
this.loginPageURL = loginPageURL;
}
public String getRpSessionId() {
return rpSessionId;
}
public void setRpSessionId(String rpSessionId) {
this.rpSessionId = rpSessionId;
}
public boolean getDoSignAssertions() {
return doSignAssertions;
}
public void setDoSignAssertions(boolean doSignAssertions) {
this.doSignAssertions = doSignAssertions;
}
/**
* @return
*/
public String getRequestMessageString() {
return requestMessageString;
}
/**
* @param requestMessageString
*/
public void setRequestMessageString(String requestMessageString) {
this.requestMessageString = requestMessageString;
}
public String[] getRequestedClaims() {
return requestedClaims;
}
public void setRequestedClaims(String[] requestedClaims) {
this.requestedClaims = requestedClaims;
}
public String[] getRequestedAudiences() {
return requestedAudiences;
}
public void setRequestedAudiences(String[] requestedAudiences) {
this.requestedAudiences = requestedAudiences;
}
public String[] getRequestedRecipients() {
return requestedRecipients;
}
public void setRequestedRecipients(String[] requestedRecipients) {
this.requestedRecipients = requestedRecipients;
}
public boolean isStratosDeployment() {
return isStratosDeployment;
}
public void setStratosDeployment(boolean isStratosDeployment) {
this.isStratosDeployment = isStratosDeployment;
}
/**
* @return the queryString
*/
public String getQueryString() {
return queryString;
}
/**
* @param queryString the queryString to set
*/
public void setQueryString(String queryString) {
this.queryString = queryString;
}
/**
* @return the doSignResponse
*/
public boolean isDoSignResponse() {
return doSignResponse;
}
/**
* @param doSignResponse the doSignResponse to set
*/
public void setDoSignResponse(boolean doSignResponse) {
this.doSignResponse = doSignResponse;
}
/**
* @return the 'destination' attribute of the SAML request
*/
public String getDestination() {
return destination;
}
/**
* @param destination Set the SAML request's 'destination' attribute
*/
public void setDestination(String destination) {
this.destination = destination;
}
public boolean isIdPInitSSO() {
return isIdPInitSSO;
}
public void setIdPInitSSO(boolean isIdPInitSSO) {
this.isIdPInitSSO = isIdPInitSSO;
}
public boolean isDoEnableEncryptedAssertion() {
return doEnableEncryptedAssertion;
}
public void setDoEnableEncryptedAssertion(boolean doEnableEncryptedAssertion) {
this.doEnableEncryptedAssertion = doEnableEncryptedAssertion;
}
public boolean isDoValidateSignatureInRequests() {
return doValidateSignatureInRequests;
}
public void setDoValidateSignatureInRequests(
boolean doValidateSignatureInRequests) {
this.doValidateSignatureInRequests = doValidateSignatureInRequests;
}
public Map<ClaimMapping, String> getUserAttributes() {
return userAttributes;
}
public void setUserAttributes(Map<ClaimMapping, String> subjectAttributes) {
this.userAttributes = subjectAttributes;
}
public Map<String, String> getClaimMapping() {
return claimMapping;
}
public void setClaimMapping(Map<String, String> claimMapping) {
this.claimMapping = claimMapping;
}
public String getTenantDomain() {
return tenantDomain;
}
public void setTenantDomain(String tenantDomain) {
this.tenantDomain = tenantDomain;
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFOxmTunnelIpv4SrcMaskedVer14 implements OFOxmTunnelIpv4SrcMasked {
private static final Logger logger = LoggerFactory.getLogger(OFOxmTunnelIpv4SrcMaskedVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int LENGTH = 12;
private final static IPv4Address DEFAULT_VALUE = IPv4Address.NONE;
private final static IPv4Address DEFAULT_VALUE_MASK = IPv4Address.NONE;
// OF message fields
private final IPv4Address value;
private final IPv4Address mask;
//
// Immutable default instance
final static OFOxmTunnelIpv4SrcMaskedVer14 DEFAULT = new OFOxmTunnelIpv4SrcMaskedVer14(
DEFAULT_VALUE, DEFAULT_VALUE_MASK
);
// package private constructor - used by readers, builders, and factory
OFOxmTunnelIpv4SrcMaskedVer14(IPv4Address value, IPv4Address mask) {
if(value == null) {
throw new NullPointerException("OFOxmTunnelIpv4SrcMaskedVer14: property value cannot be null");
}
if(mask == null) {
throw new NullPointerException("OFOxmTunnelIpv4SrcMaskedVer14: property mask cannot be null");
}
this.value = value;
this.mask = mask;
}
// Accessors for OF message fields
@Override
public long getTypeLen() {
return 0x13f08L;
}
@Override
public IPv4Address getValue() {
return value;
}
@Override
public IPv4Address getMask() {
return mask;
}
@Override
public MatchField<IPv4Address> getMatchField() {
return MatchField.TUNNEL_IPV4_SRC;
}
@Override
public boolean isMasked() {
return true;
}
public OFOxm<IPv4Address> getCanonical() {
if (IPv4Address.NO_MASK.equals(mask)) {
return new OFOxmTunnelIpv4SrcVer14(value);
} else if(IPv4Address.FULL_MASK.equals(mask)) {
return null;
} else {
return this;
}
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
public OFOxmTunnelIpv4SrcMasked.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFOxmTunnelIpv4SrcMasked.Builder {
final OFOxmTunnelIpv4SrcMaskedVer14 parentMessage;
// OF message fields
private boolean valueSet;
private IPv4Address value;
private boolean maskSet;
private IPv4Address mask;
BuilderWithParent(OFOxmTunnelIpv4SrcMaskedVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public long getTypeLen() {
return 0x13f08L;
}
@Override
public IPv4Address getValue() {
return value;
}
@Override
public OFOxmTunnelIpv4SrcMasked.Builder setValue(IPv4Address value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public IPv4Address getMask() {
return mask;
}
@Override
public OFOxmTunnelIpv4SrcMasked.Builder setMask(IPv4Address mask) {
this.mask = mask;
this.maskSet = true;
return this;
}
@Override
public MatchField<IPv4Address> getMatchField() {
return MatchField.TUNNEL_IPV4_SRC;
}
@Override
public boolean isMasked() {
return true;
}
@Override
public OFOxm<IPv4Address> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.4");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFOxmTunnelIpv4SrcMasked build() {
IPv4Address value = this.valueSet ? this.value : parentMessage.value;
if(value == null)
throw new NullPointerException("Property value must not be null");
IPv4Address mask = this.maskSet ? this.mask : parentMessage.mask;
if(mask == null)
throw new NullPointerException("Property mask must not be null");
//
return new OFOxmTunnelIpv4SrcMaskedVer14(
value,
mask
);
}
}
static class Builder implements OFOxmTunnelIpv4SrcMasked.Builder {
// OF message fields
private boolean valueSet;
private IPv4Address value;
private boolean maskSet;
private IPv4Address mask;
@Override
public long getTypeLen() {
return 0x13f08L;
}
@Override
public IPv4Address getValue() {
return value;
}
@Override
public OFOxmTunnelIpv4SrcMasked.Builder setValue(IPv4Address value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public IPv4Address getMask() {
return mask;
}
@Override
public OFOxmTunnelIpv4SrcMasked.Builder setMask(IPv4Address mask) {
this.mask = mask;
this.maskSet = true;
return this;
}
@Override
public MatchField<IPv4Address> getMatchField() {
return MatchField.TUNNEL_IPV4_SRC;
}
@Override
public boolean isMasked() {
return true;
}
@Override
public OFOxm<IPv4Address> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.4");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
//
@Override
public OFOxmTunnelIpv4SrcMasked build() {
IPv4Address value = this.valueSet ? this.value : DEFAULT_VALUE;
if(value == null)
throw new NullPointerException("Property value must not be null");
IPv4Address mask = this.maskSet ? this.mask : DEFAULT_VALUE_MASK;
if(mask == null)
throw new NullPointerException("Property mask must not be null");
return new OFOxmTunnelIpv4SrcMaskedVer14(
value,
mask
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFOxmTunnelIpv4SrcMasked> {
@Override
public OFOxmTunnelIpv4SrcMasked readFrom(ChannelBuffer bb) throws OFParseError {
// fixed value property typeLen == 0x13f08L
int typeLen = bb.readInt();
if(typeLen != 0x13f08)
throw new OFParseError("Wrong typeLen: Expected=0x13f08L(0x13f08L), got="+typeLen);
IPv4Address value = IPv4Address.read4Bytes(bb);
IPv4Address mask = IPv4Address.read4Bytes(bb);
OFOxmTunnelIpv4SrcMaskedVer14 oxmTunnelIpv4SrcMaskedVer14 = new OFOxmTunnelIpv4SrcMaskedVer14(
value,
mask
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", oxmTunnelIpv4SrcMaskedVer14);
return oxmTunnelIpv4SrcMaskedVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFOxmTunnelIpv4SrcMaskedVer14Funnel FUNNEL = new OFOxmTunnelIpv4SrcMaskedVer14Funnel();
static class OFOxmTunnelIpv4SrcMaskedVer14Funnel implements Funnel<OFOxmTunnelIpv4SrcMaskedVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFOxmTunnelIpv4SrcMaskedVer14 message, PrimitiveSink sink) {
// fixed value property typeLen = 0x13f08L
sink.putInt(0x13f08);
message.value.putTo(sink);
message.mask.putTo(sink);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFOxmTunnelIpv4SrcMaskedVer14> {
@Override
public void write(ChannelBuffer bb, OFOxmTunnelIpv4SrcMaskedVer14 message) {
// fixed value property typeLen = 0x13f08L
bb.writeInt(0x13f08);
message.value.write4Bytes(bb);
message.mask.write4Bytes(bb);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFOxmTunnelIpv4SrcMaskedVer14(");
b.append("value=").append(value);
b.append(", ");
b.append("mask=").append(mask);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFOxmTunnelIpv4SrcMaskedVer14 other = (OFOxmTunnelIpv4SrcMaskedVer14) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
if (mask == null) {
if (other.mask != null)
return false;
} else if (!mask.equals(other.mask))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
result = prime * result + ((mask == null) ? 0 : mask.hashCode());
return result;
}
}
| |
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.fs;
import gw.test.TestClass;
import java.util.Arrays;
/**
* Created by IntelliJ IDEA.
* User: akeefer
* Date: Nov 19, 2010
* Time: 10:15:48 AM
* To change this template use File | Settings | File Templates.
*/
public class ResourcePathTest extends TestClass {
public void testParseWithWindowsDriveLetter() {
ResourcePath path = ResourcePath.parse("C:");
assertEquals("C:\\", path.getPathString("\\"));
assertPathMatches(path, "C:");
}
public void testParseWithLowerCaseWindowsDriveLetter() {
ResourcePath path = ResourcePath.parse("c:");
assertEquals("C:\\", path.getPathString("\\"));
assertPathMatches(path, "C:");
}
public void testParseWithWindowsDriveLetterAndWindowsSlash() {
ResourcePath path = ResourcePath.parse("C:\\");
assertEquals("C:\\", path.getPathString("\\"));
assertPathMatches(path, "C:");
}
public void testParseWithWindowsDriveLetterAndUnixSlash() {
ResourcePath path = ResourcePath.parse("C:/");
assertEquals("C:\\", path.getPathString("\\"));
assertPathMatches(path, "C:");
}
public void testParseWithWindowsDriveLetterAndOneComponent() {
ResourcePath path = ResourcePath.parse("C:\\foo");
assertEquals("C:\\foo", path.getPathString("\\"));
assertPathMatches(path, "foo", "C:");
}
public void testParseWithWindowsDriveLetterAndThreeComponents() {
ResourcePath path = ResourcePath.parse("C:\\foo\\bar\\baz.txt");
assertEquals("C:\\foo\\bar\\baz.txt", path.getPathString("\\"));
assertPathMatches(path, "baz.txt", "bar", "foo", "C:");
}
public void testParseWithWindowsDriveLetterAndThreeComponentsAndMixedSlashes() {
ResourcePath path = ResourcePath.parse("C:/foo\\bar/baz.txt");
assertEquals("C:\\foo\\bar\\baz.txt", path.getPathString("\\"));
assertPathMatches(path, "baz.txt", "bar", "foo", "C:");
}
public void testParseWithAlternativeWindowsDriveLetter() {
ResourcePath path = ResourcePath.parse("k:/foo\\bar/baz.txt");
assertEquals("K:\\foo\\bar\\baz.txt", path.getPathString("\\"));
assertPathMatches(path, "baz.txt", "bar", "foo", "K:");
}
// TODO - AHK - The tests here aren't 100% right . . .
public void testParseWithWindowsNetworkStart() {
ResourcePath path = ResourcePath.parse("\\\\");
assertEquals("\\\\", path.getPathString("\\"));
assertPathMatches(path, "\\\\");
}
public void testParseWithWindowsNetworkStartAndOneComponent() {
ResourcePath path = ResourcePath.parse("\\\\foo");
assertEquals("\\\\foo", path.getPathString("\\"));
assertPathMatches(path, "foo", "\\\\");
}
public void testParseWithWindowsNetworkStartAndThreeComponents() {
ResourcePath path = ResourcePath.parse("\\\\foo\\bar\\baz.txt");
assertEquals("\\\\foo\\bar\\baz.txt", path.getPathString("\\"));
assertPathMatches(path, "baz.txt", "bar", "foo", "\\\\");
}
public void testParseWithWindowsNetworkStartAndThreeComponentsAndMixedSlashes() {
ResourcePath path = ResourcePath.parse("\\\\foo/bar\\baz.txt");
assertEquals("\\\\foo\\bar\\baz.txt", path.getPathString("\\"));
assertPathMatches(path, "baz.txt", "bar", "foo", "\\\\");
}
public void testParseWithUnixRoot() {
ResourcePath path = ResourcePath.parse("/");
assertEquals("/", path.getPathString("/"));
assertPathMatches(path, "");
}
public void testParseWithUnixRootAndOneComponent() {
ResourcePath path = ResourcePath.parse("/foo");
assertEquals("/foo", path.getPathString("/"));
assertPathMatches(path, "foo", "");
}
public void testParseWithUnixRootAndThreeComponents() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz.txt");
assertEquals("/foo/bar/baz.txt", path.getPathString("/"));
assertPathMatches(path, "baz.txt", "bar", "foo", "");
}
public void testParseWithUnixRootAndThreeComponentsAndMixedSlashes() {
ResourcePath path = ResourcePath.parse("/foo\\bar/baz.txt");
assertEquals("/foo/bar/baz.txt", path.getPathString("/"));
assertPathMatches(path, "baz.txt", "bar", "foo", "");
}
public void testParseIgnoresDots() {
ResourcePath path = ResourcePath.parse("/./foo/bar/././baz.txt");
assertEquals("/foo/bar/baz.txt", path.getPathString("/"));
assertPathMatches(path, "baz.txt", "bar", "foo", "");
}
public void testParseTraversesBackwardsWithDoubleDots() {
ResourcePath path = ResourcePath.parse("/foo/bar/../../baz.txt");
assertEquals("/baz.txt", path.getPathString("/"));
assertPathMatches(path, "baz.txt", "");
}
public void testParseIgnoresTrailingSlashes() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz/");
assertEquals("/foo/bar/baz", path.getPathString("/"));
assertPathMatches(path, "baz", "bar", "foo", "");
}
// TODO - Error conditions: just dot, double dots traversing backwards,, etc.
// tests for join(String)
public void testJoinWithEmptyStringIsANoOp() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join("");
assertEquals("/foo/bar/baz", path.getPathString("/"));
assertPathMatches(path, "baz", "bar", "foo", "");
}
public void testJoinWithDotIsANoOp() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join(".");
assertEquals("/foo/bar/baz", path.getPathString("/"));
assertPathMatches(path, "baz", "bar", "foo", "");
}
public void testJoinWithWindowsSlashIsANoOp() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join("\\");
assertEquals("/foo/bar/baz", path.getPathString("/"));
assertPathMatches(path, "baz", "bar", "foo", "");
}
public void testJoinWithUnixSlashIsANoOp() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join("/");
assertEquals("/foo/bar/baz", path.getPathString("/"));
assertPathMatches(path, "baz", "bar", "foo", "");
}
public void testJoinWithSimpleName() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join("test");
assertEquals("/foo/bar/baz/test", path.getPathString("/"));
assertPathMatches(path, "test", "baz", "bar", "foo", "");
}
public void testJoinWithDotDot() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join("..");
assertEquals("/foo/bar", path.getPathString("/"));
assertPathMatches(path, "bar", "foo", "");
}
public void testJoinWithThreePathComponentsWithWindowsSlashes() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join("test1\\test2\\test3");
assertEquals("/foo/bar/baz/test1/test2/test3", path.getPathString("/"));
assertPathMatches(path, "test3", "test2", "test1", "baz", "bar", "foo", "");
}
public void testJoinWithThreePathComponentsWithUnixSlashes() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join("test1/test2/test3");
assertEquals("/foo/bar/baz/test1/test2/test3", path.getPathString("/"));
assertPathMatches(path, "test3", "test2", "test1", "baz", "bar", "foo", "");
}
public void testJoinWithThreePathComponentsWithMixedSlashes() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join("test1/test2\\test3");
assertEquals("/foo/bar/baz/test1/test2/test3", path.getPathString("/"));
assertPathMatches(path, "test3", "test2", "test1", "baz", "bar", "foo", "");
}
public void testJoinWithThreePathComponentsWithLeadingSlash() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join("/test1/test2/test3");
assertEquals("/foo/bar/baz/test1/test2/test3", path.getPathString("/"));
assertPathMatches(path, "test3", "test2", "test1", "baz", "bar", "foo", "");
}
public void testJoinWithThreePathComponentsWithTrailingSlash() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join("/test1/test2/test3/");
assertEquals("/foo/bar/baz/test1/test2/test3", path.getPathString("/"));
assertPathMatches(path, "test3", "test2", "test1", "baz", "bar", "foo", "");
}
public void testJoinWithConfusingMixOfDotsAndDotDotsAndSlashes() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join("/./../test1/.././test2/test3/.././");
assertEquals("/foo/bar/test2", path.getPathString("/"));
assertPathMatches(path, "test2", "bar", "foo", "");
}
public void testJoinWithNullArgumentThrowsIllegalArgumentException() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
try {
path.join(null);
fail();
} catch (IllegalArgumentException e) {
// Expected
}
}
public void testJoinThatTraversesBackToTheRoot() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
path = path.join("../../..");
assertEquals("/", path.getPathString("/"));
assertPathMatches(path, "");
}
public void testJoinThatTraversesBackPastTheRootThrowsIllegalArgumentException() {
ResourcePath path = ResourcePath.parse("/foo/bar/baz");
try {
path.join("../../../..");
fail();
} catch (IllegalArgumentException e) {
// Expected
}
}
// TODO - AHK - Error conditions around join: mangled string, .. that recurses back past the root, etc.
// tests for relativePath(ResourcePath)
// TODO - AHK - Duplicate tests for use with alternative or mixed slashes?
public void testRelativePathReturnsNullForIdenticalPath() {
ResourcePath path = ResourcePath.parse("C:\\foo\\bar");
assertNull(path.relativePath(path));
}
public void testRelativePathReturnsNullForPathWithNoCommonRoot() {
ResourcePath path = ResourcePath.parse("C:\\foo\\bar");
ResourcePath path2 = ResourcePath.parse("C:\\baz\\other");
assertNull(path.relativePath(path2));
assertNull(path2.relativePath(path));
}
public void testRelativePathReturnsNullForAncestorPath() {
ResourcePath path = ResourcePath.parse("C:\\foo\\bar\\baz");
ResourcePath path2 = ResourcePath.parse("C:\\foo");
assertNull(path.relativePath(path2));
}
public void testRelativePathReturnsNullForParentPath() {
ResourcePath path = ResourcePath.parse("C:\\foo\\bar\\baz");
ResourcePath path2 = ResourcePath.parse("C:\\foo\\bar");
assertNull(path.relativePath(path2));
}
public void testRelativePathReturnsOneElementPathForChildPath() {
ResourcePath path = ResourcePath.parse("C:\\foo\\bar\\baz");
ResourcePath path2 = ResourcePath.parse("C:\\foo\\bar");
assertEquals("baz", path2.relativePath(path));
}
public void testRelativePathReturnsCorrectPathForDescendantPath() {
ResourcePath path = ResourcePath.parse("C:\\foo\\bar\\baz\\boo\\other");
ResourcePath path2 = ResourcePath.parse("C:\\foo\\bar");
assertEquals("baz/boo/other", path2.relativePath(path, "/"));
}
// TODO - AHK - Make sure relativePath respects the option passed in, and that the default is the file system separator
// --------------------- Private Helper Methods
private void assertPathMatches(ResourcePath path, String... reversedPath) {
ResourcePath currentPath = path;
for (String s : reversedPath) {
if (currentPath == null) {
fail("Resource path did not contain " + s);
}
assertEquals(s, currentPath.getName());
currentPath = currentPath.getParent();
}
}
}
| |
/*
* Copyright 2010 Vodafone Group Services Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.onesocialweb.gwt.client.ui.widget.compose;
import java.util.Date;
import org.onesocialweb.gwt.client.OswClient;
import org.onesocialweb.gwt.client.handler.PictureHandler;
import org.onesocialweb.gwt.client.i18n.UserInterfaceText;
import org.onesocialweb.gwt.client.task.DefaultTaskInfo;
import org.onesocialweb.gwt.client.task.TaskMonitor;
import org.onesocialweb.gwt.client.task.TaskInfo.Status;
import org.onesocialweb.gwt.client.ui.dialog.AlertDialog;
import org.onesocialweb.gwt.client.ui.dialog.PictureChooserDialog;
import org.onesocialweb.gwt.client.ui.event.ComponentEvent;
import org.onesocialweb.gwt.client.ui.event.ComponentHelper;
import org.onesocialweb.gwt.client.ui.event.ComponentListener;
import org.onesocialweb.gwt.service.OswService;
import org.onesocialweb.gwt.service.OswServiceFactory;
import org.onesocialweb.gwt.service.RequestCallback;
import org.onesocialweb.gwt.util.ListModel;
import org.onesocialweb.model.acl.AclAction;
import org.onesocialweb.model.acl.AclRule;
import org.onesocialweb.model.acl.AclSubject;
import org.onesocialweb.model.activity.ActivityEntry;
import org.onesocialweb.model.activity.ActivityObject;
import org.onesocialweb.model.activity.ActivityVerb;
import org.onesocialweb.model.atom.AtomFactory;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Image;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.PushButton;
public class NewActivityPanel extends Composite {
// internationalization
private UserInterfaceText uiText = (UserInterfaceText) GWT.create(UserInterfaceText.class);
private final ListModel<ActivityObject> pictureAttachments = new ListModel<ActivityObject>();
private final ComponentHelper componentHelper = new ComponentHelper();
private final InternalComponentListener componentListener = new InternalComponentListener();
public NewActivityPanel() {
composePanel();
}
public void reset() {
// Empty the text area
textareaUpdate.setText("");
// Tell the listener to ignore events, we'll fire a single event when we
// are done
componentListener.setIgnoreEvent(true);
// Remove all attachments
pictureAttachmentPanel.reset();
privacyAttachmentPanel.reset();
shoutAttachmentPanel.reset();
// And hide the panels
pictureAttachmentPanel.hide();
privacyAttachmentPanel.hide();
shoutAttachmentPanel.hide();
// Fire a single resize event and reactivate the listener
componentHelper.fireComponentResized(this);
componentListener.setIgnoreEvent(false);
}
public void addComponentListener(ComponentListener listener) {
componentHelper.addComponentListener(listener);
}
public void removeComponentListener(ComponentListener listener) {
componentHelper.removeComponentListener(listener);
}
private void postStatusUpdate() {
if (textareaUpdate.getText().length() == 0) {
AlertDialog
.getInstance()
.showDialog(
uiText.EmptyUpdate(),
uiText.Oops());
return;
}
// TODO disable the update panel during the update
final OswService service = OswServiceFactory.getService();
final AtomFactory atomFactory = service.getAtomFactory();
Date now = new Date();
String status = textareaUpdate.getText();
ActivityObject object = service.getActivityFactory().object(
ActivityObject.STATUS_UPDATE);
object.addContent(service.getAtomFactory().content(status,
"text/plain", null));
object.setPublished(now);
// the basics
ActivityEntry entry = service.getActivityFactory().entry();
entry.setTitle(status);
entry.addVerb(service.getActivityFactory().verb(ActivityVerb.POST));
entry.addObject(object);
entry.setPublished(now);
// add attachments if there are any
for (ActivityObject current : pictureAttachments) {
entry.addObject(current);
}
// Add recipients if there are any
for (String recipient : shoutAttachmentPanel.getRecipients()) {
entry.addRecipient(atomFactory.reply(null, recipient, null, null));
}
// setup access control
AclRule rule = service.getAclFactory().aclRule();
rule.addAction(service.getAclFactory().aclAction(AclAction.ACTION_VIEW,
AclAction.PERMISSION_GRANT));
// check privacy settings
String visibilityValue = privacyAttachmentPanel.getPrivacyValue();
if (visibilityValue.equals(uiText.Everyone())) {
rule.addSubject(service.getAclFactory().aclSubject(null,
AclSubject.EVERYONE));
} else {
rule.addSubject(service.getAclFactory().aclSubject(visibilityValue,
AclSubject.GROUP));
}
entry.addAclRule(rule);
// we got everything we need -> clean up UI
reset();
// Prepare a task to monitor status
final DefaultTaskInfo task = new DefaultTaskInfo(
uiText.UpdatingStatus(), false);
TaskMonitor.getInstance().addTask(task);
service.post(entry, new RequestCallback<ActivityEntry>() {
@Override
public void onFailure() {
task.complete(uiText.UpdateFailure(), Status.failure);
}
@Override
public void onSuccess(ActivityEntry result) {
task.complete(uiText.UpdateSuccess(), Status.succes);
}
});
}
// UI stuff
private void composePanel() {
// Init attachment dialogs
pictureChooserDialog = new PictureChooserDialog(new PictureHandler() {
public void handlePicture(String pictureUrl) {
if (pictureUrl != null && pictureUrl.length() > 0) {
OswService service = OswClient.getInstance().getService();
ActivityObject object = service.getActivityFactory()
.object(ActivityObject.PICTURE);
object.addLink(service.getAtomFactory().link(pictureUrl,
"alternate", null, null));
pictureAttachments.add(object);
}
}
});
// Add components to page
flow.add(addRecipients);
flow.add(addPhoto);
flow.add(addPrivacy);
flow.add(buttonUpdate);
// Create panel
statusPanel.add(textareaUpdate);
statusPanel.add(attachmentsPanel);
statusPanel.add(flow);
// AttachmentsPanel
pictureAttachmentPanel = new MultiplePictureAttachmentPanel();
pictureAttachmentPanel.setModel(pictureAttachments);
pictureAttachmentPanel.addComponentListener(componentListener);
attachmentsPanel.add(pictureAttachmentPanel);
shoutAttachmentPanel = new ShoutAttachmentPanel();
shoutAttachmentPanel.addComponentListener(componentListener);
attachmentsPanel.add(shoutAttachmentPanel);
privacyAttachmentPanel = new PrivacyAttachmentPanel();
privacyAttachmentPanel.addComponentListener(componentListener);
attachmentsPanel.add(privacyAttachmentPanel);
// Add CSS classes
buttonUpdate.addStyleName("buttonUpdate");
attachment.addStyleName("updateLabel");
flow.addStyleName("options");
attachmentsPanel.addStyleName("attachmentWrapper");
statusPanel.setStyleName("topPanel");
// Set tooltips
addPhoto.setTitle(uiText.AddPicture());
addPrivacy.setTitle(uiText.ChangePrivacy());
addRecipients.setTitle(uiText.ShoutToContacts());
initWidget(statusPanel);
buttonUpdate.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
postStatusUpdate();
}
});
addRecipients.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
shoutAttachmentPanel.show();
}
});
addPrivacy.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
privacyAttachmentPanel.show();
}
});
addPhoto.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
pictureChooserDialog.show();
}
});
}
private MultiplePictureAttachmentPanel pictureAttachmentPanel;
private PrivacyAttachmentPanel privacyAttachmentPanel;
private ShoutAttachmentPanel shoutAttachmentPanel;
private PictureChooserDialog pictureChooserDialog;
private final FlowPanel statusPanel = new FlowPanel();
private final FlowPanel attachmentsPanel = new FlowPanel();
private final FlowPanel flow = new FlowPanel();
private final Label attachment = new Label(uiText.Add());
private final Button buttonUpdate = new Button(uiText.Share());
private final TextareaUpdate textareaUpdate = new TextareaUpdate();
private final PushButton addRecipients = new PushButton(new Image(OswClient
.getInstance().getPreference("theme_folder")
+ "assets/i-shout.png"));
private final PushButton addPrivacy = new PushButton(new Image(OswClient
.getInstance().getPreference("theme_folder")
+ "assets/i-key.png"));
private final PushButton addPhoto = new PushButton(new Image(OswClient
.getInstance().getPreference("theme_folder")
+ "assets/i-camera2.png"));
private class InternalComponentListener implements ComponentListener {
private boolean ignoreEvent = false;
public void setIgnoreEvent(boolean ignoreEvent) {
this.ignoreEvent = ignoreEvent;
}
@Override
public void componentHidden(ComponentEvent e) {
if (!ignoreEvent) {
componentHelper.fireComponentHidden(e);
}
}
@Override
public void componentMoved(ComponentEvent e) {
if (!ignoreEvent) {
componentHelper.fireComponentMoved(e);
}
}
@Override
public void componentResized(ComponentEvent e) {
if (!ignoreEvent) {
System.out.println("component resized!");
componentHelper.fireComponentResized(e);
}
}
@Override
public void componentShown(ComponentEvent e) {
if (!ignoreEvent) {
componentHelper.fireComponentShown(e);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.aws.wag;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import okhttp3.mockwebserver.RecordedRequest;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.processors.aws.AbstractAWSCredentialsProviderProcessor;
import org.apache.nifi.processors.aws.credentials.provider.service.AWSCredentialsProviderControllerService;
import org.apache.nifi.provenance.ProvenanceEventRecord;
import org.apache.nifi.provenance.ProvenanceEventType;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.junit.Assert;
import org.junit.Test;
public abstract class TestInvokeAWSGatewayApiCommon {
private static final String SUCCESS_RESPONSE_BODY = "{\"status\":\"200\"}";
private static final String APPLICATION_JSON = "application/json";
public TestRunner runner;
protected MockWebServer mockWebServer;
protected void setupControllerService() throws InitializationException {
final AWSCredentialsProviderControllerService serviceImpl = new AWSCredentialsProviderControllerService();
runner.addControllerService("awsCredentialsProvider", serviceImpl);
runner.setProperty(serviceImpl, InvokeAWSGatewayApi.ACCESS_KEY, "awsAccessKey");
runner.setProperty(serviceImpl, InvokeAWSGatewayApi.SECRET_KEY, "awsSecretKey");
runner.enableControllerService(serviceImpl);
runner.setProperty(InvokeAWSGatewayApi.AWS_CREDENTIALS_PROVIDER_SERVICE,
"awsCredentialsProvider");
}
protected void setupAuth() {
runner.setProperty(InvokeAWSGatewayApi.ACCESS_KEY, "testAccessKey");
runner.setProperty(InvokeAWSGatewayApi.SECRET_KEY, "testSecretKey");
}
protected void setupCredFile() {
runner.setProperty(AbstractAWSCredentialsProviderProcessor.CREDENTIALS_FILE,
"src/test/resources/mock-aws-credentials.properties");
}
public void setupEndpointAndRegion() {
runner.setProperty(InvokeAWSGatewayApi.PROP_AWS_GATEWAY_API_REGION, "us-east-1");
runner.setProperty(InvokeAWSGatewayApi.PROP_AWS_API_KEY, "abcd");
runner.setProperty(InvokeAWSGatewayApi.PROP_AWS_GATEWAY_API_ENDPOINT, mockWebServer.url("/").toString());
}
private void enqueueSuccess() {
mockWebServer.enqueue(
new MockResponse()
.setResponseCode(200)
.addHeader("Content-Type", APPLICATION_JSON)
.setBody(SUCCESS_RESPONSE_BODY)
);
}
@Test
public void test200() throws Exception {
enqueueSuccess();
enqueueSuccess();
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
createFlowFiles(runner);
// verify that call works with or without flowfile being sent for GET
// there should only be 1 REQ, but 2 RESPONSE
runner.run();
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 2);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in request status.code and status.message
// original flow file (+attributes)
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
// expected in response of each message
// status code, status message, all headers from server response --> ff attributes
// server response message body into payload of ff
assert200Response(runner.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(0), true);
assert200Response(runner.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(1), false);
final List<ProvenanceEventRecord> provEvents = runner.getProvenanceEvents();
assertEquals(3, provEvents.size());
boolean forkEvent = false;
boolean fetchEvent = false;
boolean recieveEvent = false;
for (final ProvenanceEventRecord event : provEvents) {
if (event.getEventType() == ProvenanceEventType.FORK) {
forkEvent = true;
} else if (event.getEventType() == ProvenanceEventType.FETCH) {
fetchEvent = true;
} else if (event.getEventType() == ProvenanceEventType.RECEIVE) {
recieveEvent = true;
}
}
assertTrue(forkEvent);
assertTrue(fetchEvent);
assertTrue(recieveEvent);
}
private void assert200Response(final MockFlowFile bundle, final boolean requestWithInput) throws IOException {
bundle.assertContentEquals("{\"status\":\"200\"}".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Content-Type", "application/json");
// check any input FlowFile attributes were included in the Response FlowFile
if (requestWithInput) {
bundle.assertAttributeEquals("Foo", "Bar");
} else {
bundle.assertAttributeNotExists("Foo");
}
}
@Test
public void testOutputResponseRegardless() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(404));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.PROP_OUTPUT_RESPONSE_REGARDLESS, "true");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in request status.code and status.message
// original flow file (+attributes)
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_NO_RETRY_NAME).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "404");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "Not Found");
bundle.assertAttributeEquals("Foo", "Bar");
// expected in response
// status code, status message, all headers from server response --> ff attributes
// server response message body into payload of ff
final MockFlowFile bundle1 = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(0);
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "404");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "Not Found");
bundle1.assertAttributeEquals("Foo", "Bar");
}
@Test
public void testOutputResponseRegardlessWithOutputInAttribute() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(404));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.PROP_OUTPUT_RESPONSE_REGARDLESS, "true");
runner.setProperty(InvokeAWSGatewayApi.PROP_PUT_OUTPUT_IN_ATTRIBUTE, "outputBody");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in request status.code and status.message
// original flow file (+attributes)
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_NO_RETRY_NAME).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "404");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "Not Found");
bundle.assertAttributeEquals("Foo", "Bar");
// expected in response
// status code, status message, all headers from server response --> ff attributes
// server response message body into payload of ff
final MockFlowFile bundle1 = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(0);
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "404");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "Not Found");
bundle1.assertAttributeEquals("Foo", "Bar");
}
@Test
public void testOutputResponseSetMimeTypeToResponseContentType() throws Exception {
enqueueSuccess();
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
createFlowFiles(runner);
runner.setProperty(InvokeAWSGatewayApi.PROP_OUTPUT_RESPONSE_REGARDLESS, "true");
runner.setProperty(InvokeAWSGatewayApi.PROP_PUT_OUTPUT_IN_ATTRIBUTE, "outputBody");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in request status.code and status.message
// original flow file (+attributes)
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
// expected in response
// status code, status message, all headers from server response --> ff attributes
// server response message body into payload of ff
final MockFlowFile bundle1 = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(0);
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Foo", "Bar");
bundle1.assertAttributeEquals("Content-Type", "application/json");
bundle1.assertAttributeEquals("mime.type", "application/json");
}
@Test
public void testOutputResponseRegardlessWithOutputInAttributeLarge() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(404));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
createFlowFiles(runner);
runner.setProperty(InvokeAWSGatewayApi.PROP_OUTPUT_RESPONSE_REGARDLESS, "true");
runner.setProperty(InvokeAWSGatewayApi.PROP_PUT_OUTPUT_IN_ATTRIBUTE, "outputBody");
runner.setProperty(InvokeAWSGatewayApi.PROP_PUT_ATTRIBUTE_MAX_LENGTH, "11");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in request status.code and status.message
// original flow file (+attributes)
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_NO_RETRY_NAME).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "404");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "Not Found");
bundle.assertAttributeEquals("Foo", "Bar");
// expected in response
// status code, status message, all headers from server response --> ff attributes
// server response message body into payload of ff
final MockFlowFile bundle1 = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(0);
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "404");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "Not Found");
bundle1.assertAttributeEquals("Foo", "Bar");
}
@Test
// NOTE : Amazon does not support multiple headers with the same name!!!
public void testMultipleSameHeaders() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("double", "2").addHeader("double", "2"));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in request status.code and status.message
// original flow file (+attributes)
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
// expected in response
// status code, status message, all headers from server response --> ff attributes
// server response message body into payload of ff
final MockFlowFile bundle1 = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(0);
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Foo", "Bar");
// amazon does not support headers with the same name, we'll only get 2 here
// this is in the amazon layer, we only get Map<String,String> for headers so the 1 has been stripped
// already
bundle1.assertAttributeEquals("double", "2");
}
@Test
public void testPutResponseHeadersInRequest() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("Content-Type", APPLICATION_JSON).addHeader("double", "2"));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.PROP_ADD_HEADERS_TO_REQUEST, "true");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in request status.code and status.message
// original flow file (+all attributes from response)
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
bundle.assertAttributeEquals("double", "2");
bundle.assertAttributeEquals("Content-Type", APPLICATION_JSON);
// expected in response
// status code, status message, all headers from server response --> ff attributes
// server response message body into payload of ff
final MockFlowFile bundle1 = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(0);
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Foo", "Bar");
bundle1.assertAttributeEquals("double", "2");
bundle1.assertAttributeEquals("Content-Type", APPLICATION_JSON);
}
@Test
public void testToRequestAttribute() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(200));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.PROP_PUT_OUTPUT_IN_ATTRIBUTE, "outputBody");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in request status.code, status.message and body of response in attribute
// original flow file (+attributes)
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
}
@Test
public void testNoInput() {
enqueueSuccess();
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setIncomingConnection(false);
runner.setNonLoopConnection(false);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in response
// status code, status message, all headers from server response --> ff attributes
// server response message body into payload of ff
final MockFlowFile bundle1 = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(0);
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Content-Type", APPLICATION_JSON);
}
@Test
public void testNoInputWithAttributes() {
enqueueSuccess();
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.PROP_ATTRIBUTES_TO_SEND, "myAttribute");
runner.setIncomingConnection(false);
runner.setNonLoopConnection(false);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in response
// status code, status message, all headers from server response --> ff attributes
// server response message body into payload of ff
final MockFlowFile bundle1 = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(0);
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Content-Type", "application/json");
}
@Test
public void testNoInputSendToAttribute() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(200));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.PROP_PUT_OUTPUT_IN_ATTRIBUTE, "outputBody");
runner.setIncomingConnection(false);
runner.setNonLoopConnection(false);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE, 0);
runner.assertPenalizeCount(0);
// expected in request
// status code, status message, no ff content
// server response message body into attribute of ff
final MockFlowFile bundle1 = runner.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ).get(0);
bundle1.assertContentEquals("".getBytes(StandardCharsets.UTF_8));
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
}
@Test
public void test500() {
mockWebServer.enqueue(new MockResponse().setResponseCode(500));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/500");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(1);
// expected in response
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RETRY_NAME).get(0);
final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "500");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "Internal Server Error");
final String expected = "Hello";
Assert.assertEquals(expected, actual);
bundle.assertAttributeEquals("Foo", "Bar");
}
@Test
public void test300() {
mockWebServer.enqueue(new MockResponse().setResponseCode(302));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/302");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in response
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_NO_RETRY_NAME).get(0);
final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "302");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "Moved Temporarily");
final String expected = "Hello";
Assert.assertEquals(expected, actual);
bundle.assertAttributeEquals("Foo", "Bar");
}
@Test
public void test304() {
mockWebServer.enqueue(new MockResponse().setResponseCode(304));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/304");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in response
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_NO_RETRY_NAME).get(0);
final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "304");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "Not Modified");
final String expected = "Hello";
Assert.assertEquals(expected, actual);
bundle.assertAttributeEquals("Foo", "Bar");
}
@Test
public void test400() {
mockWebServer.enqueue(new MockResponse().setResponseCode(400));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/400");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in response
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_NO_RETRY_NAME).get(0);
final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "400");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "Bad Request");
final String expected = "Hello";
Assert.assertEquals(expected, actual);
bundle.assertAttributeEquals("Foo", "Bar");
}
@Test
public void test400WithPenalizeNoRetry() {
mockWebServer.enqueue(new MockResponse().setResponseCode(400));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/400");
runner.setProperty(InvokeAWSGatewayApi.PROP_PENALIZE_NO_RETRY, "true");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(1);
// expected in response
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_NO_RETRY_NAME).get(0);
final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "400");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "Bad Request");
final String expected = "Hello";
Assert.assertEquals(expected, actual);
bundle.assertAttributeEquals("Foo", "Bar");
}
@Test
public void test412() {
mockWebServer.enqueue(new MockResponse().setResponseCode(412));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/412");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
// expected in response
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_NO_RETRY_NAME).get(0);
final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "412");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "Precondition Failed");
final String expected = "Hello";
Assert.assertEquals(expected, actual);
bundle.assertAttributeEquals("Foo", "Bar");
}
@Test
public void testHead() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(200));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "HEAD");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
final MockFlowFile bundle1 = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(0);
bundle1.assertContentEquals("".getBytes(StandardCharsets.UTF_8));
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Foo", "Bar");
final String actual1 = new String(bundle1.toByteArray(), StandardCharsets.UTF_8);
final String expected1 = "";
Assert.assertEquals(expected1, actual1);
}
@Test
public void testPost() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(200));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/post");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "POST");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
final MockFlowFile bundle1 = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(0);
bundle1.assertContentEquals("".getBytes(StandardCharsets.UTF_8));
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Foo", "Bar");
bundle1.assertAttributeNotExists("Content-Type");
final String actual1 = new String(bundle1.toByteArray(), StandardCharsets.UTF_8);
final String expected1 = "";
Assert.assertEquals(expected1, actual1);
}
@Test
public void testPostWithMimeType() {
final String suppliedMimeType = "text/plain";
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("Content-Type", suppliedMimeType));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/post");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "POST");
final Map<String, String> attrs = new HashMap<>();
attrs.put(CoreAttributes.MIME_TYPE.key(), suppliedMimeType);
runner.enqueue("Hello".getBytes(), attrs);
runner.run(1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
}
@Test
public void testPostWithEmptyELExpression() {
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("Content-Type", InvokeAWSGatewayApi.DEFAULT_CONTENT_TYPE));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/post");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "POST");
final Map<String, String> attrs = new HashMap<>();
attrs.put(CoreAttributes.MIME_TYPE.key(), "");
runner.enqueue("Hello".getBytes(), attrs);
runner.run(1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
}
@Test
public void testPostWithContentTypeProperty() {
final String suppliedMimeType = "text/plain";
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("Content-Type", suppliedMimeType));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/post");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "POST");
runner.setProperty(InvokeAWSGatewayApi.PROP_CONTENT_TYPE, suppliedMimeType);
final Map<String, String> attrs = new HashMap<>();
attrs.put(CoreAttributes.MIME_TYPE.key(), "text/csv");
runner.enqueue("Hello".getBytes(), attrs);
runner.run(1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
}
@Test
public void testPostWithEmptyBodySet() {
final String suppliedMimeType = "";
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("Content-Type", suppliedMimeType));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/post");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "POST");
runner.setProperty(InvokeAWSGatewayApi.PROP_CONTENT_TYPE, suppliedMimeType);
runner.setProperty(InvokeAWSGatewayApi.PROP_SEND_BODY, "false");
final Map<String, String> attrs = new HashMap<>();
attrs.put(CoreAttributes.MIME_TYPE.key(), suppliedMimeType);
runner.enqueue("Hello".getBytes(), attrs);
runner.run(1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
}
@Test
public void testPutWithMimeType() {
final String suppliedMimeType = "text/plain";
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("Content-Type", suppliedMimeType));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/post");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "PUT");
final Map<String, String> attrs = new HashMap<>();
attrs.put(CoreAttributes.MIME_TYPE.key(), suppliedMimeType);
runner.enqueue("Hello".getBytes(), attrs);
runner.run(1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE, 1);
}
@Test
public void testPutWithEmptyELExpression() {
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("Content-Type", InvokeAWSGatewayApi.DEFAULT_CONTENT_TYPE));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/post");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "PUT");
final Map<String, String> attrs = new HashMap<>();
attrs.put(CoreAttributes.MIME_TYPE.key(), "");
runner.enqueue("Hello".getBytes(), attrs);
runner.run(1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE, 1);
}
@Test
public void testPutWithContentTypeProperty() {
final String suppliedMimeType = "text/plain";
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("Content-Type", suppliedMimeType));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/post");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "PUT");
runner.setProperty(InvokeAWSGatewayApi.PROP_CONTENT_TYPE, suppliedMimeType);
final Map<String, String> attrs = new HashMap<>();
attrs.put(CoreAttributes.MIME_TYPE.key(), "text/csv");
runner.enqueue("Hello".getBytes(), attrs);
runner.run(1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE, 1);
}
@Test
public void testPut() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(200));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/post");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "PUT");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE, 0);
runner.assertPenalizeCount(0);
final MockFlowFile bundle = runner.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
final MockFlowFile bundle1 = runner.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE).get(0);
bundle1.assertContentEquals("".getBytes(StandardCharsets.UTF_8));
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Foo", "Bar");
bundle1.assertAttributeNotExists("Content-Type");
final String actual1 = new String(bundle1.toByteArray(), StandardCharsets.UTF_8);
final String expected1 = "";
Assert.assertEquals(expected1, actual1);
}
@Test
public void testPatch() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(200));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/patch");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "PATCH");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE, 0);
runner.assertPenalizeCount(0);
final MockFlowFile bundle = runner.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
final MockFlowFile bundle1 = runner.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE).get(0);
bundle1.assertContentEquals("".getBytes(StandardCharsets.UTF_8));
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Foo", "Bar");
bundle1.assertAttributeNotExists("Content-Type");
final String actual1 = new String(bundle1.toByteArray(), StandardCharsets.UTF_8);
final String expected1 = "";
Assert.assertEquals(expected1, actual1);
}
@Test
public void testPatchWithMimeType() {
final String suppliedMimeType = "text/plain";
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("Content-Type", suppliedMimeType));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/patch");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "PATCH");
final Map<String, String> attrs = new HashMap<>();
attrs.put(CoreAttributes.MIME_TYPE.key(), suppliedMimeType);
runner.enqueue("Hello".getBytes(), attrs);
runner.run(1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE, 1);
}
@Test
public void testPatchWithEmptyELExpression() {
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("Content-Type", InvokeAWSGatewayApi.DEFAULT_CONTENT_TYPE));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/patch");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "PATCH");
final Map<String, String> attrs = new HashMap<>();
attrs.put(CoreAttributes.MIME_TYPE.key(), "");
runner.enqueue("Hello".getBytes(), attrs);
runner.run(1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE, 1);
}
@Test
public void testPatchWithContentTypeProperty() {
final String suppliedMimeType = "text/plain";
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("Content-Type", suppliedMimeType));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/patch");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "PATCH");
runner.setProperty(InvokeAWSGatewayApi.PROP_CONTENT_TYPE, suppliedMimeType);
final Map<String, String> attrs = new HashMap<>();
attrs.put(CoreAttributes.MIME_TYPE.key(), "text/csv");
runner.enqueue("Hello".getBytes(), attrs);
runner.run(1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE, 1);
}
@Test
public void testDelete() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(200));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "DELETE");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE, 0);
runner.assertPenalizeCount(0);
final MockFlowFile bundle = runner.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
final MockFlowFile bundle1 = runner.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE).get(0);
bundle1.assertContentEquals("".getBytes(StandardCharsets.UTF_8));
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Foo", "Bar");
final String actual1 = new String(bundle1.toByteArray(), StandardCharsets.UTF_8);
final String expected1 = "";
Assert.assertEquals(expected1, actual1);
}
@Test
public void testOptions() throws Exception {
mockWebServer.enqueue(new MockResponse().setResponseCode(200));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "OPTIONS");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE, 0);
runner.assertPenalizeCount(0);
final MockFlowFile bundle = runner.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
final MockFlowFile bundle1 = runner.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE).get(0);
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Foo", "Bar");
}
@Test
public void testSendAttributes() throws Exception {
enqueueSuccess();
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.PROP_ATTRIBUTES_TO_SEND, "F.*");
final String dynamicValue = "testing";
runner.setProperty("dynamicHeader", dynamicValue);
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 0);
runner.assertPenalizeCount(0);
//expected in request status.code and status.message
//original flow file (+attributes)
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
//expected in response
//status code, status message, all headers from server response --> ff attributes
//server response message body into payload of ff
final MockFlowFile bundle1 = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE_NAME).get(0);
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Foo", "Bar");
bundle1.assertAttributeEquals("Content-Type", APPLICATION_JSON);
final RecordedRequest recordedRequest = mockWebServer.takeRequest();
assertEquals(dynamicValue, recordedRequest.getHeader("dynamicHeader"));
}
@Test
public void testReadTimeout() {
mockWebServer.enqueue(new MockResponse().setResponseCode(200).setHeadersDelay(1, TimeUnit.SECONDS));
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.TIMEOUT, "500 ms");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 1);
runner.assertPenalizeCount(1);
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_FAILURE_NAME).get(0);
final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
final String expected = "Hello";
Assert.assertEquals(expected, actual);
bundle.assertAttributeEquals("Foo", "Bar");
}
@Test
public void testConnectFailBadPort() {
setupEndpointAndRegion();
// this is the bad urls
final String badurlport = "http://localhost:" + 445;
runner.setProperty(InvokeAWSGatewayApi.PROP_AWS_GATEWAY_API_ENDPOINT, badurlport);
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/doesnotExist");
runner.setProperty(InvokeAWSGatewayApi.TIMEOUT, "1 sec");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 1);
runner.assertPenalizeCount(1);
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_FAILURE_NAME).get(0);
final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
final String expected = "Hello";
Assert.assertEquals(expected, actual);
bundle.assertAttributeEquals("Foo", "Bar");
}
@Test
public void testConnectFailBadHost() {
setupEndpointAndRegion();
final String badurlhost = "http://localhOOst:" + 445;
runner.setProperty(InvokeAWSGatewayApi.PROP_AWS_GATEWAY_API_ENDPOINT, badurlhost);
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/doesnotExist");
runner.setProperty(InvokeAWSGatewayApi.TIMEOUT, "1 sec");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY_NAME, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE_NAME, 1);
runner.assertPenalizeCount(1);
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_FAILURE_NAME).get(0);
final String actual = new String(bundle.toByteArray(), StandardCharsets.UTF_8);
final String expected = "Hello";
Assert.assertEquals(expected, actual);
bundle.assertAttributeEquals("Foo", "Bar");
}
@Test
public void testArbitraryRequestFailsValidation() {
setupEndpointAndRegion();
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.PROP_METHOD, "FETCH");
createFlowFiles(runner);
assertThrows(AssertionError.class, runner::run);
}
@Test
public void testProxy() throws Exception {
final String contentType = "text/plain;charset=iso-8859-1";
mockWebServer.enqueue(new MockResponse().setResponseCode(200).addHeader("Content-Type", contentType));
setupEndpointAndRegion();
URL proxyURL = mockWebServer.url("/").url();
runner.setVariable("proxy.host", proxyURL.getHost());
runner.setVariable("proxy.port", String.valueOf(proxyURL.getPort()));
runner.setVariable("proxy.username", "username");
runner.setVariable("proxy.password", "password");
runner.setProperty(InvokeAWSGatewayApi.PROP_AWS_GATEWAY_API_ENDPOINT, "http://nifi.apache.org/");
runner.setProperty(InvokeAWSGatewayApi.PROP_RESOURCE_NAME, "/status/200");
runner.setProperty(InvokeAWSGatewayApi.PROXY_HOST, "${proxy.host}");
runner.setProperty(InvokeAWSGatewayApi.PROXY_HOST_PORT, "${proxy.port}");
runner.setProperty(InvokeAWSGatewayApi.PROXY_USERNAME, "${proxy.username}");
runner.assertNotValid();
runner.setProperty(InvokeAWSGatewayApi.PROXY_PASSWORD, "${proxy.password}");
createFlowFiles(runner);
runner.run();
runner.assertTransferCount(InvokeAWSGatewayApi.REL_SUCCESS_REQ, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RESPONSE, 1);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_RETRY, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_NO_RETRY, 0);
runner.assertTransferCount(InvokeAWSGatewayApi.REL_FAILURE, 0);
runner.assertPenalizeCount(0);
//expected in request status.code and status.message
//original flow file (+attributes)
final MockFlowFile bundle = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_SUCCESS_REQ).get(0);
bundle.assertContentEquals("Hello".getBytes(StandardCharsets.UTF_8));
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle.assertAttributeEquals("Foo", "Bar");
//expected in response
//status code, status message, all headers from server response --> ff attributes
//server response message body into payload of ff
final MockFlowFile bundle1 = runner
.getFlowFilesForRelationship(InvokeAWSGatewayApi.REL_RESPONSE).get(0);
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_CODE, "200");
bundle1.assertAttributeEquals(InvokeAWSGatewayApi.STATUS_MESSAGE, "OK");
bundle1.assertAttributeEquals("Foo", "Bar");
bundle1.assertAttributeEquals("Content-Type", contentType);
}
public static void createFlowFiles(final TestRunner testRunner) {
final Map<String, String> attributes = new HashMap<>();
attributes.put(CoreAttributes.MIME_TYPE.key(), "text/plain");
attributes.put("Foo", "Bar");
testRunner.enqueue("Hello".getBytes(StandardCharsets.UTF_8), attributes);
}
}
| |
import com.cycling74.max.*;
import com.cycling74.msp.MSPBuffer;
import core.*;
import effects.*;
public class Statconcat extends MaxObject {
private static int unitsize;
private static int position;
private static int windowtype;
private static boolean normalize;
private static boolean resynthesis;
private static float[] weights;
private static int[] markov;
private static boolean[] corpusprint;
private static boolean[] targetprint;
private static boolean[] mosaicprint;
private static boolean[] databaseprint;
private static boolean[] clusterprint;
private static float[] buffer;
private static Corpus corpus;
private static Database database;
private static Target target;
private static Window window;
private static AmplitudeFX ampFX;
private static GranularFX grnFX;
public Statconcat() {
database = new Database();
window = new Window();
ampFX = new AmplitudeFX();
grnFX = new GranularFX();
corpusprint = new boolean[4];
targetprint = new boolean[5];
mosaicprint = new boolean[5];
databaseprint = new boolean[2];
clusterprint = new boolean[2];
weights = new float[7];
markov = new int[2];
unitsize = 1024;
position = 0;
normalize = true;
declareTypedIO("a", "a");
createInfoOutlet(false);
}
public void weights(Atom[] w) {
for(int i = 0; i < weights.length; i++) {
weights[i] = w[i].getFloat();
}
}
public void markov(Atom[] w) {
for(int i = 0; i < markov.length; i++) {
markov[i] = w[i].getInt();
}
}
public void corpusprint(Atom[] w) {
for(int i = 0; i < corpusprint.length; i++) {
int j = w[i].getInt();
if(j == 0) {
corpusprint[i] = false;
} else {
corpusprint[i] = true;
}
}
}
public void targetprint(Atom[] w) {
for(int i = 0; i < targetprint.length; i++) {
int j = w[i].getInt();
if(j == 0) {
targetprint[i] = false;
} else {
targetprint[i] = true;
}
}
}
public void mosaicprint(Atom[] w) {
for(int i = 0; i < mosaicprint.length; i++) {
int j = w[i].getInt();
if(j == 0) {
mosaicprint[i] = false;
} else {
mosaicprint[i] = true;
}
}
}
public void databaseprint(Atom[] w) {
for(int i = 0; i < databaseprint.length; i++) {
int j = w[i].getInt();
if(j == 0) {
databaseprint[i] = false;
} else {
databaseprint[i] = true;
}
}
}
public void clusterprint(Atom[] w) {
for(int i = 0; i < clusterprint.length; i++) {
int j = w[i].getInt();
if(j == 0) {
clusterprint[i] = false;
} else {
clusterprint[i] = true;
}
}
}
public void resynthesis(float f) {
if (f == 1f) {
resynthesis = true;
} else {
resynthesis = false;
}
}
public void window(float f) {
windowtype = (int) f;
}
public void normalize(float f) {
if (f == 1f) {
normalize = true;
} else {
normalize = false;
}
}
public void reset() {
database.removeAll();
pd_prt_databaseSize();
}
public void unitsize(float f) {
unitsize = (int) f;
}
public void extract(String buffer_name, String filename) throws Exception {
database.storeAll(new Corpus(MSPBuffer.peek(buffer_name),
filename,
unitsize,
windowtype,
corpusprint));
pd_prt_databaseSize();
MaxSystem.post("Completed Analysis: "+filename);
MaxSystem.sendMessageToBoundObject("search_next_set", "bang", null);
}
public void mosaic(String buffer_name, String filename) throws Exception {
MaxSystem.post("Start: "+filename);
corpus = new Corpus(MSPBuffer.peek(buffer_name),
filename,
unitsize,
windowtype,
targetprint);
target = new Target(corpus,database,databaseprint);
if(resynthesis == true) {
target.markov_mosaic(markov[0], markov[1], weights,clusterprint);
}
if(resynthesis == false) {
target.mosaic(weights);
}
buffer = new float[target.getbuffersize()];
position = 0;
Atom[] a = {Atom.newAtom(true)};
build(a);
}
public void build(Atom[] a) {
if (position != target.getlength() - 1) {
if (position < target.getlength() && a.length == 3) {
jv_dsp_concatenate(a);
outlet(0, pd_sym_unitFileLocation());
} else if (a.length == 1) {
outlet(0, pd_sym_unitFileLocation());
}
} else {
pd_dsp_resynthesis();
}
}
private void jv_dsp_concatenate(Atom[] a) {
String name = a[0].getString();
int start = a[1].getInt();
int end = a[2].getInt();
float[] grain = window.envelope(MSPBuffer.peek(name, 1, start,
end - start), windowtype);
for(int i = 0; i < unitsize; i++) {
buffer[i + (position * unitsize)] = grain[i];
}
}
private void pd_dsp_resynthesis() {
buffer = grnFX.offset(buffer, unitsize);
if (normalize == true) {
buffer = ampFX.normalize(buffer);
}
target.removeall();
MSPBuffer.setSize("mosaic", 1, buffer.length);
MSPBuffer.poke("mosaic", buffer);
MaxSystem.post("Statistical Concatenative Synthesis Complete!");
}
private Atom[] pd_sym_unitFileLocation() {
position += 1;
Unit u = target.getunitat(position);
Atom[] b = {
Atom.newAtom(u.getFilename()),
Atom.newAtom(u.start()),
Atom.newAtom(u.end())
};
return b;
}
private void pd_prt_databaseSize() {
Atom[] a = {Atom.newAtom(database.size())};
MaxSystem.sendMessageToBoundObject("units_set", "float", a);
}
}
| |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kns.web.struts.action;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.actions.DispatchAction;
import org.kuali.rice.coreservice.framework.CoreFrameworkServiceLocator;
import org.kuali.rice.coreservice.framework.parameter.ParameterService;
import org.kuali.rice.core.api.CoreApiServiceLocator;
import org.kuali.rice.core.api.encryption.EncryptionService;
import org.kuali.rice.core.api.util.RiceConstants;
import org.kuali.rice.kew.api.KewApiConstants;
import org.kuali.rice.kim.api.KimConstants;
import org.kuali.rice.kim.api.services.KimApiServiceLocator;
import org.kuali.rice.kns.document.authorization.DocumentAuthorizerBase;
import org.kuali.rice.kns.lookup.LookupUtils;
import org.kuali.rice.kns.service.BusinessObjectAuthorizationService;
import org.kuali.rice.kns.service.KNSServiceLocator;
import org.kuali.rice.kns.util.KNSGlobalVariables;
import org.kuali.rice.kns.util.WebUtils;
import org.kuali.rice.kns.web.struts.form.KualiDocumentFormBase;
import org.kuali.rice.kns.web.struts.form.KualiForm;
import org.kuali.rice.kns.web.struts.form.LookupForm;
import org.kuali.rice.kns.web.struts.form.pojo.PojoForm;
import org.kuali.rice.kns.web.struts.form.pojo.PojoFormBase;
import org.kuali.rice.krad.bo.BusinessObject;
import org.kuali.rice.krad.exception.AuthorizationException;
import org.kuali.rice.krad.service.KRADServiceLocatorWeb;
import org.kuali.rice.krad.service.KualiModuleService;
import org.kuali.rice.krad.service.ModuleService;
import org.kuali.rice.krad.util.GlobalVariables;
import org.kuali.rice.krad.util.KRADConstants;
import org.kuali.rice.krad.util.KRADUtils;
import org.kuali.rice.krad.util.UrlFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/**
* <p>The base {@link org.apache.struts.action.Action} class for all KNS-based Actions. Extends from the standard
* {@link org.apache.struts.actions.DispatchAction} which allows for a <i>methodToCall</i> request parameter to
* be used to indicate which method to invoke.</p>
*
* <p>This Action overrides #execute to set methodToCall for image submits. Also performs other setup
* required for KNS framework calls.</p>
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*
* @deprecated KNS Struts deprecated, use KRAD and the Spring MVC framework.
*/
@Deprecated
public abstract class KualiAction extends DispatchAction {
private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(KualiAction.class);
private static KualiModuleService kualiModuleService = null;
private static BusinessObjectAuthorizationService businessObjectAuthorizationService = null;
private static EncryptionService encryptionService = null;
private static Boolean OUTPUT_ENCRYPTION_WARNING = null;
private static String applicationBaseUrl = null;
private Set<String> methodToCallsToNotCheckAuthorization = new HashSet<String>();
{
methodToCallsToNotCheckAuthorization.add( "performLookup" );
methodToCallsToNotCheckAuthorization.add( "performQuestion" );
methodToCallsToNotCheckAuthorization.add( "performQuestionWithInput" );
methodToCallsToNotCheckAuthorization.add( "performQuestionWithInputAgainBecauseOfErrors" );
methodToCallsToNotCheckAuthorization.add( "performQuestionWithoutInput" );
methodToCallsToNotCheckAuthorization.add( "performWorkgroupLookup" );
}
/**
* Entry point to all actions.
*
* NOTE: No need to hook into execute for handling framework setup anymore. Just implement the methodToCall for the framework
* setup, Constants.METHOD_REQUEST_PARAMETER will contain the full parameter, which can be sub stringed for getting framework
* parameters.
*
* @see org.apache.struts.action.Action#execute(org.apache.struts.action.ActionMapping, org.apache.struts.action.ActionForm,
* javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
ActionForward returnForward = null;
String methodToCall = findMethodToCall(form, request);
if(isModuleLocked(form, methodToCall, request)) {
return mapping.findForward(RiceConstants.MODULE_LOCKED_MAPPING);
}
if (form instanceof KualiForm && StringUtils.isNotEmpty(((KualiForm) form).getMethodToCall())) {
if (StringUtils.isNotBlank(getImageContext(request, KRADConstants.ANCHOR))) {
((KualiForm) form).setAnchor(getImageContext(request, KRADConstants.ANCHOR));
}
else if (StringUtils.isNotBlank(request.getParameter(KRADConstants.ANCHOR))) {
((KualiForm) form).setAnchor(request.getParameter(KRADConstants.ANCHOR));
}
else {
((KualiForm) form).setAnchor(KRADConstants.ANCHOR_TOP_OF_FORM);
}
}
// if found methodToCall, pass control to that method, else return the basic forward
if (StringUtils.isNotBlank(methodToCall)) {
if ( LOG.isDebugEnabled() ) {
LOG.debug("methodToCall: '" + methodToCall+"'");
}
returnForward = dispatchMethod(mapping, form, request, response, methodToCall);
if ( returnForward!=null && returnForward.getRedirect() && returnForward.getName()!=null && returnForward.getName().equals(KRADConstants.KRAD_INITIATED_DOCUMENT_VIEW_NAME)) {
return returnForward;
}
}
else {
returnForward = defaultDispatch(mapping, form, request, response);
}
// make sure the user can do what they're trying to according to the module that owns the functionality
if ( !methodToCallsToNotCheckAuthorization.contains(methodToCall) ) {
if ( LOG.isDebugEnabled() ) {
LOG.debug( "'" + methodToCall + "' not in set of excempt methods: " + methodToCallsToNotCheckAuthorization);
}
checkAuthorization(form, methodToCall);
} else {
if ( LOG.isDebugEnabled() ) {
LOG.debug("'" + methodToCall + "' is exempt from auth checks." );
}
}
// Add the ActionForm to GlobalVariables
// This will allow developers to retrieve both the Document and any request parameters that are not
// part of the Form and make them available in ValueFinder classes and other places where they are needed.
if(KNSGlobalVariables.getKualiForm() == null) {
KNSGlobalVariables.setKualiForm((KualiForm)form);
}
return returnForward;
}
/**
* When no methodToCall is specified, the defaultDispatch method is invoked. Default implementation
* returns the "basic" ActionForward.
*/
protected ActionForward defaultDispatch(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
@Override
protected ActionForward dispatchMethod(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response, String methodToCall) throws Exception {
GlobalVariables.getUserSession().addObject(DocumentAuthorizerBase.USER_SESSION_METHOD_TO_CALL_OBJECT_KEY, methodToCall);
return super.dispatchMethod(mapping, form, request, response, methodToCall);
}
protected String findMethodToCall(ActionForm form, HttpServletRequest request) throws Exception {
String methodToCall;
if (form instanceof KualiForm && StringUtils.isNotEmpty(((KualiForm) form).getMethodToCall())) {
methodToCall = ((KualiForm) form).getMethodToCall();
}
else {
// call utility method to parse the methodToCall from the request.
methodToCall = WebUtils.parseMethodToCall(form, request);
}
return methodToCall;
}
/**
* Toggles the tab state in the ui
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward toggleTab(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
KualiForm kualiForm = (KualiForm) form;
String tabToToggle = getTabToToggle(request);
if (StringUtils.isNotBlank(tabToToggle)) {
if (kualiForm.getTabState(tabToToggle).equals(KualiForm.TabState.OPEN.name())) {
kualiForm.getTabStates().remove(tabToToggle);
kualiForm.getTabStates().put(tabToToggle, KualiForm.TabState.CLOSE.name());
}
else {
kualiForm.getTabStates().remove(tabToToggle);
kualiForm.getTabStates().put(tabToToggle, KualiForm.TabState.OPEN.name());
}
}
doProcessingAfterPost( kualiForm, request );
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
/**
* Toggles all tabs to open
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward showAllTabs(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
return this.doTabOpenOrClose(mapping, form, request, response, true);
}
/**
* Toggles all tabs to closed
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward hideAllTabs(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
return this.doTabOpenOrClose(mapping, form, request, response, false);
}
/**
*
* Toggles all tabs to open of closed depending on the boolean flag.
*
* @param mapping the mapping
* @param form the form
* @param request the request
* @param response the response
* @param open whether to open of close the tabs
* @return the action forward
*/
private ActionForward doTabOpenOrClose(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response, boolean open) {
KualiForm kualiForm = (KualiForm) form;
Map<String, String> tabStates = kualiForm.getTabStates();
Map<String, String> newTabStates = new HashMap<String, String>();
for (String tabKey: tabStates.keySet()) {
newTabStates.put(tabKey, open ? "OPEN" : "CLOSE");
}
kualiForm.setTabStates(newTabStates);
doProcessingAfterPost( kualiForm, request );
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
/**
* Default refresh method. Called from returning frameworks.
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward refresh(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
/**
* Parses the method to call attribute to pick off the line number which should be deleted.
*
* @param request
* @return
*/
protected int getLineToDelete(HttpServletRequest request) {
return getSelectedLine(request);
}
/**
* Parses the method to call attribute to pick off the line number which should be edited.
*
* @param request
* @return
*/
protected int getLineToEdit(HttpServletRequest request) {
return getSelectedLine(request);
}
/**
* Parses the method to call attribute to pick off the line number which should have an action performed on it.
*
* @param request
* @return
*/
protected int getSelectedLine(HttpServletRequest request) {
int selectedLine = -1;
String parameterName = (String) request.getAttribute(KRADConstants.METHOD_TO_CALL_ATTRIBUTE);
if (StringUtils.isNotBlank(parameterName)) {
String lineNumber = StringUtils.substringBetween(parameterName, ".line", ".");
if (StringUtils.isEmpty(lineNumber)) {
return selectedLine;
}
selectedLine = Integer.parseInt(lineNumber);
}
return selectedLine;
}
/**
* Determines which tab was requested to be toggled
*
* @param request
* @return
*/
protected String getTabToToggle(HttpServletRequest request) {
String tabToToggle = "";
String parameterName = (String) request.getAttribute(KRADConstants.METHOD_TO_CALL_ATTRIBUTE);
if (StringUtils.isNotBlank(parameterName)) {
tabToToggle = StringUtils.substringBetween(parameterName, ".tab", ".");
}
return tabToToggle;
}
/**
* Retrieves the header tab to navigate to.
*
* @param request
* @return
*/
protected String getHeaderTabNavigateTo(HttpServletRequest request) {
String headerTabNavigateTo = RiceConstants.MAPPING_BASIC;
String imageContext = getImageContext(request, KRADConstants.NAVIGATE_TO);
if (StringUtils.isNotBlank(imageContext)) {
headerTabNavigateTo = imageContext;
}
return headerTabNavigateTo;
}
/**
* Retrieves the header tab dispatch.
*
* @param request
* @return
*/
protected String getHeaderTabDispatch(HttpServletRequest request) {
String headerTabDispatch = null;
String imageContext = getImageContext(request, KRADConstants.HEADER_DISPATCH);
if (StringUtils.isNotBlank(imageContext)) {
headerTabDispatch = imageContext;
}
else {
// In some cases it might be in request params instead
headerTabDispatch = request.getParameter(KRADConstants.METHOD_TO_CALL_ATTRIBUTE);
}
return headerTabDispatch;
}
/**
* Retrieves the image context
*
* @param request
* @param contextKey
* @return
*/
protected String getImageContext(HttpServletRequest request, String contextKey) {
String imageContext = "";
String parameterName = (String) request.getAttribute(KRADConstants.METHOD_TO_CALL_ATTRIBUTE);
if (StringUtils.isBlank(parameterName)) {
parameterName = request.getParameter("methodToCallPath");
}
if (StringUtils.isNotBlank(parameterName)) {
imageContext = StringUtils.substringBetween(parameterName, contextKey, ".");
}
return imageContext;
}
protected String getReturnLocation(HttpServletRequest request, ActionMapping mapping) {
String mappingPath = mapping.getPath();
String basePath = getApplicationBaseUrl();
return basePath + ("/lookup".equals(mappingPath) || "/maintenance".equals(mappingPath) || "/multipleValueLookup".equals(mappingPath) ? "/kr" : "") + mappingPath + ".do";
}
/**
* Retrieves the value of a parameter to be passed into the lookup or inquiry frameworks. The default implementation of this method will attempt to look
* in the request to determine wheter the appropriate value exists as a request parameter. If not, it will attempt to look through the form object to find
* the property.
*
* @param boClass a class implementing boClass, representing the BO that will be looked up
* @param parameterName the name of the parameter
* @param parameterValuePropertyName the property (relative to the form object) where the value to be passed into the lookup/inquiry may be found
* @param form
* @param request
* @return
*/
protected String retrieveLookupParameterValue(Class<? extends BusinessObject> boClass, String parameterName, String parameterValuePropertyName, ActionForm form, HttpServletRequest request) throws Exception {
String value;
if (StringUtils.contains(parameterValuePropertyName, "'")) {
value = StringUtils.replace(parameterValuePropertyName, "'", "");
} else if (request.getParameterMap().containsKey(parameterValuePropertyName)) {
value = request.getParameter(parameterValuePropertyName);
} else if (request.getParameterMap().containsKey(KewApiConstants.DOCUMENT_ATTRIBUTE_FIELD_PREFIX + parameterValuePropertyName)) {
value = request.getParameter(KewApiConstants.DOCUMENT_ATTRIBUTE_FIELD_PREFIX + parameterValuePropertyName);
} else {
if (form instanceof KualiForm) {
value = ((KualiForm) form).retrieveFormValueForLookupInquiryParameters(parameterName, parameterValuePropertyName);
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Unable to retrieve lookup/inquiry parameter value for parameter name " + parameterName + " parameter value property " + parameterValuePropertyName);
}
value = null;
}
}
if (value != null && boClass != null && getBusinessObjectAuthorizationService().attributeValueNeedsToBeEncryptedOnFormsAndLinks(boClass, parameterName)) {
LOG.warn("field name " + parameterName + " is a secure value and not returned in parameter result value");
value = null;
}
return value;
}
/**
* Takes care of storing the action form in the User session and forwarding to the lookup action.
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
@SuppressWarnings("unchecked")
public ActionForward performLookup(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
// parse out the important strings from our methodToCall parameter
String fullParameter = (String) request.getAttribute(KRADConstants.METHOD_TO_CALL_ATTRIBUTE);
validateLookupInquiryFullParameter(request, form, fullParameter);
KualiForm kualiForm = (KualiForm) form;
// when we return from the lookup, our next request's method to call is going to be refresh
kualiForm.registerEditableProperty(KRADConstants.DISPATCH_REQUEST_PARAMETER);
// parse out the baseLookupUrl if there is one
String baseLookupUrl = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM14_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM14_RIGHT_DEL);
// parse out business object class name for lookup
String boClassName = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_BOPARM_LEFT_DEL, KRADConstants.METHOD_TO_CALL_BOPARM_RIGHT_DEL);
if (StringUtils.isBlank(boClassName)) {
throw new RuntimeException("Illegal call to perform lookup, no business object class name specified.");
}
Class boClass = null;
try{
boClass = Class.forName(boClassName);
} catch(ClassNotFoundException cnfex){
if ((StringUtils.isNotEmpty(baseLookupUrl) && baseLookupUrl.startsWith(getApplicationBaseUrl() + "/kr/"))
|| StringUtils.isEmpty(baseLookupUrl)) {
throw new IllegalArgumentException("The class (" + boClassName + ") cannot be found by this particular "
+ "application. " + "ApplicationBaseUrl: " + getApplicationBaseUrl()
+ " ; baseLookupUrl: " + baseLookupUrl);
} else {
LOG.info("The class (" + boClassName + ") cannot be found by this particular application. "
+ "ApplicationBaseUrl: " + getApplicationBaseUrl() + " ; baseLookupUrl: " + baseLookupUrl);
}
}
// build the parameters for the lookup url
Properties parameters = new Properties();
String conversionFields = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM1_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM1_RIGHT_DEL);
if (StringUtils.isNotBlank(conversionFields)) {
parameters.put(KRADConstants.CONVERSION_FIELDS_PARAMETER, conversionFields);
// register each of the destination parameters of the field conversion string as editable
String[] fieldConversions = conversionFields.split(KRADConstants.FIELD_CONVERSIONS_SEPARATOR);
for (String fieldConversion : fieldConversions) {
String destination = fieldConversion.split(KRADConstants.FIELD_CONVERSION_PAIR_SEPARATOR, 2)[1];
kualiForm.registerEditableProperty(destination);
}
}
// pass values from form that should be pre-populated on lookup search
String parameterFields = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM2_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM2_RIGHT_DEL);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "fullParameter: " + fullParameter );
LOG.debug( "parameterFields: " + parameterFields );
}
if (StringUtils.isNotBlank(parameterFields)) {
String[] lookupParams = parameterFields.split(KRADConstants.FIELD_CONVERSIONS_SEPARATOR);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "lookupParams: " + Arrays.toString(lookupParams) );
}
for (String lookupParam : lookupParams) {
String[] keyValue = lookupParam.split(KRADConstants.FIELD_CONVERSION_PAIR_SEPARATOR, 2);
if (keyValue.length != 2) {
throw new RuntimeException("malformed field conversion pair: " + Arrays.toString(keyValue));
}
String lookupParameterValue = retrieveLookupParameterValue(boClass, keyValue[1], keyValue[0], form, request);
if (StringUtils.isNotBlank(lookupParameterValue)) {
parameters.put(keyValue[1], lookupParameterValue);
}
if ( LOG.isDebugEnabled() ) {
LOG.debug( "keyValue[0]: " + keyValue[0] );
LOG.debug( "keyValue[1]: " + keyValue[1] );
}
}
}
// pass values from form that should be read-Only on lookup search
String readOnlyFields = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM8_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM8_RIGHT_DEL);
if (StringUtils.isNotBlank(readOnlyFields)) {
parameters.put(KRADConstants.LOOKUP_READ_ONLY_FIELDS, readOnlyFields);
}
if ( LOG.isDebugEnabled() ) {
LOG.debug( "fullParameter: " + fullParameter );
LOG.debug( "readOnlyFields: " + readOnlyFields );
}
// grab whether or not the "return value" link should be hidden or not
String hideReturnLink = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM3_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM3_RIGHT_DEL);
if (StringUtils.isNotBlank(hideReturnLink)) {
parameters.put(KRADConstants.HIDE_LOOKUP_RETURN_LINK, hideReturnLink);
}
// add the optional extra button source and parameters string
String extraButtonSource = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM4_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM4_RIGHT_DEL);
if (StringUtils.isNotBlank(extraButtonSource)) {
parameters.put(KRADConstants.EXTRA_BUTTON_SOURCE, extraButtonSource);
}
String extraButtonParams = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM5_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM5_RIGHT_DEL);
if (StringUtils.isNotBlank(extraButtonParams)) {
parameters.put(KRADConstants.EXTRA_BUTTON_PARAMS, extraButtonParams);
}
String lookupAction = KRADConstants.LOOKUP_ACTION;
// is this a multi-value return?
boolean isMultipleValue = false;
String multipleValues = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM6_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM6_RIGHT_DEL);
if ((new Boolean(multipleValues).booleanValue())) {
parameters.put(KRADConstants.MULTIPLE_VALUE, multipleValues);
lookupAction = KRADConstants.MULTIPLE_VALUE_LOOKUP_ACTION;
isMultipleValue = true;
}
// the name of the collection being looked up (primarily for multivalue lookups
String lookedUpCollectionName = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM11_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM11_RIGHT_DEL);
if (StringUtils.isNotBlank(lookedUpCollectionName)) {
parameters.put(KRADConstants.LOOKED_UP_COLLECTION_NAME, lookedUpCollectionName);
}
// grab whether or not the "suppress actions" column should be hidden or not
String suppressActions = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM7_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM7_RIGHT_DEL);
if (StringUtils.isNotBlank(suppressActions)) {
parameters.put(KRADConstants.SUPPRESS_ACTIONS, suppressActions);
}
// grab the references that should be refreshed upon returning from the lookup
String referencesToRefresh = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM10_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM10_RIGHT_DEL);
if (StringUtils.isNotBlank(referencesToRefresh)) {
parameters.put(KRADConstants.REFERENCES_TO_REFRESH, referencesToRefresh);
}
// anchor, if it exists
if (form instanceof KualiForm && StringUtils.isNotEmpty(((KualiForm) form).getAnchor())) {
parameters.put(KRADConstants.LOOKUP_ANCHOR, ((KualiForm) form).getAnchor());
}
// now add required parameters
parameters.put(KRADConstants.DISPATCH_REQUEST_PARAMETER, "start");
// pass value from form that shows if autoSearch is desired for lookup search
String autoSearch = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM9_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM9_RIGHT_DEL);
if (StringUtils.isNotBlank(autoSearch)) {
parameters.put(KRADConstants.LOOKUP_AUTO_SEARCH, autoSearch);
if ("YES".equalsIgnoreCase(autoSearch)){
parameters.put(KRADConstants.DISPATCH_REQUEST_PARAMETER, "search");
}
}
parameters.put(KRADConstants.DOC_FORM_KEY, GlobalVariables.getUserSession().addObjectWithGeneratedKey(form));
parameters.put(KRADConstants.BUSINESS_OBJECT_CLASS_ATTRIBUTE, boClassName);
parameters.put(KRADConstants.RETURN_LOCATION_PARAMETER, getReturnLocation(request, mapping));
if (form instanceof KualiDocumentFormBase) {
String docNum = ((KualiDocumentFormBase) form).getDocument().getDocumentNumber();
if(docNum != null){
parameters.put(KRADConstants.DOC_NUM, docNum);
}
}else if(form instanceof LookupForm){
String docNum = ((LookupForm) form).getDocNum();
if(docNum != null){
parameters.put(KRADConstants.DOC_NUM, ((LookupForm) form).getDocNum());
}
}
if (boClass != null) {
ModuleService responsibleModuleService = getKualiModuleService().getResponsibleModuleService(boClass);
if(responsibleModuleService!=null && responsibleModuleService.isExternalizable(boClass)){
Map<String, String> parameterMap = new HashMap<String, String>();
Enumeration<Object> e = parameters.keys();
while (e.hasMoreElements()) {
String paramName = (String) e.nextElement();
parameterMap.put(paramName, parameters.getProperty(paramName));
}
return new ActionForward(responsibleModuleService.getExternalizableBusinessObjectLookupUrl(boClass, parameterMap), true);
}
}
if (StringUtils.isBlank(baseLookupUrl)) {
baseLookupUrl = getApplicationBaseUrl() + "/kr/" + lookupAction;
} else {
if (isMultipleValue) {
LookupUtils.transformLookupUrlToMultiple(baseLookupUrl);
}
}
String lookupUrl = UrlFactory.parameterizeUrl(baseLookupUrl, parameters);
return new ActionForward(lookupUrl, true);
}
protected void validateLookupInquiryFullParameter(HttpServletRequest request, ActionForm form, String fullParameter){
PojoFormBase pojoFormBase = (PojoFormBase) form;
if(WebUtils.isFormSessionDocument((PojoFormBase) form)){
if(!pojoFormBase.isPropertyEditable(fullParameter)) {
throw new RuntimeException("The methodToCallAttribute is not registered as an editable property.");
}
}
}
@SuppressWarnings("unchecked")
public ActionForward performInquiry(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
// parse out the important strings from our methodToCall parameter
String fullParameter = (String) request.getAttribute(KRADConstants.METHOD_TO_CALL_ATTRIBUTE);
validateLookupInquiryFullParameter(request, form, fullParameter);
// when javascript is disabled, the inquiry will appear in the same window as the document. when we close the inquiry,
// our next request's method to call is going to be refresh
KualiForm kualiForm = (KualiForm) form;
kualiForm.registerEditableProperty(KRADConstants.DISPATCH_REQUEST_PARAMETER);
// parse out business object class name for lookup
String boClassName = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_BOPARM_LEFT_DEL, KRADConstants.METHOD_TO_CALL_BOPARM_RIGHT_DEL);
if (StringUtils.isBlank(boClassName)) {
throw new RuntimeException("Illegal call to perform inquiry, no business object class name specified.");
}
// build the parameters for the inquiry url
Properties parameters = new Properties();
parameters.put(KRADConstants.BUSINESS_OBJECT_CLASS_ATTRIBUTE, boClassName);
parameters.put(KRADConstants.RETURN_LOCATION_PARAMETER, getReturnLocation(request, mapping));
// pass values from form that should be pre-populated on inquiry
String parameterFields = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM2_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM2_RIGHT_DEL);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "fullParameter: " + fullParameter );
LOG.debug( "parameterFields: " + parameterFields );
}
if (StringUtils.isNotBlank(parameterFields)) {
// TODO : create a method for this to be used by both lookup & inquiry ?
String[] inquiryParams = parameterFields.split(KRADConstants.FIELD_CONVERSIONS_SEPARATOR);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "inquiryParams: " + inquiryParams );
}
Class<? extends BusinessObject> boClass = (Class<? extends BusinessObject>) Class.forName(boClassName);
for (String inquiryParam : inquiryParams) {
String[] keyValue = inquiryParam.split(KRADConstants.FIELD_CONVERSION_PAIR_SEPARATOR, 2);
String inquiryParameterValue = retrieveLookupParameterValue(boClass, keyValue[1], keyValue[0], form, request);
if (inquiryParameterValue == null) {
parameters.put(keyValue[1], "directInquiryKeyNotSpecified");
}
else {
parameters.put(keyValue[1], inquiryParameterValue);
}
if ( LOG.isDebugEnabled() ) {
LOG.debug( "keyValue[0]: " + keyValue[0] );
LOG.debug( "keyValue[1]: " + keyValue[1] );
}
}
}
parameters.put(KRADConstants.DISPATCH_REQUEST_PARAMETER, "start");
parameters.put(KRADConstants.DOC_FORM_KEY, GlobalVariables.getUserSession().addObjectWithGeneratedKey(form));
String inquiryUrl = null;
try {
Class.forName(boClassName);
inquiryUrl = getApplicationBaseUrl() + "/kr/" + KRADConstants.DIRECT_INQUIRY_ACTION;
} catch ( ClassNotFoundException ex ) {
// allow inquiry url to be null (and therefore no inquiry link will be displayed) but at least log a warning
LOG.warn("Class name does not represent a valid class which this application understands: " + boClassName);
}
inquiryUrl = UrlFactory.parameterizeUrl(inquiryUrl, parameters);
return new ActionForward(inquiryUrl, true);
}
/**
* This method handles rendering the question component, but without any of the extra error fields
*
* @param mapping
* @param form
* @param request
* @param response
* @param questionId
* @param questionText
* @param questionType
* @param caller
* @param context
* @return ActionForward
* @throws Exception
*/
protected ActionForward performQuestionWithoutInput(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response, String questionId, String questionText, String questionType, String caller, String context) throws Exception {
return performQuestion(mapping, form, request, response, questionId, questionText, questionType, caller, context, false, "", "", "", "");
}
/**
* Handles rendering a question prompt - without a specified context.
*
* @param mapping
* @param form
* @param request
* @param response
* @param questionId
* @param questionText
* @param questionType
* @param caller
* @param context
* @return ActionForward
* @throws Exception
*/
protected ActionForward performQuestionWithInput(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response, String questionId, String questionText, String questionType, String caller, String context) throws Exception {
return performQuestion(mapping, form, request, response, questionId, questionText, questionType, caller, context, true, "", "", "", "");
}
/**
* Handles re-rendering a question prompt because of an error on what was submitted.
*
* @param mapping
* @param form
* @param request
* @param response
* @param questionId
* @param questionText
* @param questionType
* @param caller
* @param context
* @param reason
* @param errorKey
* @param errorPropertyName
* @param errorParameter
* @return ActionForward
* @throws Exception
*/
protected ActionForward performQuestionWithInputAgainBecauseOfErrors(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response, String questionId, String questionText, String questionType, String caller, String context, String reason, String errorKey, String errorPropertyName, String errorParameter) throws Exception {
return performQuestion(mapping, form, request, response, questionId, questionText, questionType, caller, context, true, reason, errorKey, errorPropertyName, errorParameter);
}
/**
* Handles rendering a question prompt - with a specified context.
*
* @param mapping
* @param form
* @param request
* @param response
* @param questionId
* @param questionText
* @param questionType
* @param caller
* @param context
* @param showReasonField
* @param reason
* @param errorKey
* @param errorPropertyName
* @param errorParameter
* @return ActionForward
* @throws Exception
*/
private ActionForward performQuestion(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response, String questionId, String questionText, String questionType, String caller, String context, boolean showReasonField, String reason, String errorKey, String errorPropertyName, String errorParameter) throws Exception {
Properties parameters = new Properties();
parameters.put(KRADConstants.DISPATCH_REQUEST_PARAMETER, "start");
parameters.put(KRADConstants.DOC_FORM_KEY, GlobalVariables.getUserSession().addObjectWithGeneratedKey(form));
parameters.put(KRADConstants.CALLING_METHOD, caller);
parameters.put(KRADConstants.QUESTION_INST_ATTRIBUTE_NAME, questionId);
parameters.put(KRADConstants.QUESTION_IMPL_ATTRIBUTE_NAME, questionType);
//parameters.put(KRADConstants.QUESTION_TEXT_ATTRIBUTE_NAME, questionText);
parameters.put(KRADConstants.RETURN_LOCATION_PARAMETER, getReturnLocation(request, mapping));
parameters.put(KRADConstants.QUESTION_CONTEXT, context);
parameters.put(KRADConstants.QUESTION_SHOW_REASON_FIELD, Boolean.toString(showReasonField));
parameters.put(KRADConstants.QUESTION_REASON_ATTRIBUTE_NAME, reason);
parameters.put(KRADConstants.QUESTION_ERROR_KEY, errorKey);
parameters.put(KRADConstants.QUESTION_ERROR_PROPERTY_NAME, errorPropertyName);
parameters.put(KRADConstants.QUESTION_ERROR_PARAMETER, errorParameter);
parameters.put(KRADConstants.QUESTION_ANCHOR, form instanceof KualiForm ? ObjectUtils.toString(((KualiForm) form).getAnchor()) : "");
Object methodToCallAttribute = request.getAttribute(KRADConstants.METHOD_TO_CALL_ATTRIBUTE);
if (methodToCallAttribute != null) {
parameters.put(KRADConstants.METHOD_TO_CALL_PATH, methodToCallAttribute);
((PojoForm) form).registerEditableProperty(String.valueOf(methodToCallAttribute));
}
if (form instanceof KualiDocumentFormBase) {
String docNum = ((KualiDocumentFormBase) form).getDocument().getDocumentNumber();
if(docNum != null){
parameters.put(KRADConstants.DOC_NUM, ((KualiDocumentFormBase) form)
.getDocument().getDocumentNumber());
}
}
// KULRICE-8077: PO Quote Limitation of Only 9 Vendors
String questionTextAttributeName = KRADConstants.QUESTION_TEXT_ATTRIBUTE_NAME + questionId;
GlobalVariables.getUserSession().addObject(questionTextAttributeName, (Object)questionText);
String questionUrl = UrlFactory.parameterizeUrl(getApplicationBaseUrl() + "/kr/" + KRADConstants.QUESTION_ACTION, parameters);
return new ActionForward(questionUrl, true);
}
/**
* Takes care of storing the action form in the User session and forwarding to the workflow workgroup lookup action.
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward performWorkgroupLookup(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
String returnUrl = null;
if ("/kr".equals(mapping.getModuleConfig().getPrefix())) {
returnUrl = getApplicationBaseUrl() + mapping.getModuleConfig().getPrefix() + mapping.getPath() + ".do";
} else {
returnUrl = getApplicationBaseUrl() + mapping.getPath() + ".do";
}
String fullParameter = (String) request.getAttribute(KRADConstants.METHOD_TO_CALL_ATTRIBUTE);
String conversionFields = StringUtils.substringBetween(fullParameter, KRADConstants.METHOD_TO_CALL_PARM1_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM1_RIGHT_DEL);
String deploymentBaseUrl = CoreApiServiceLocator.getKualiConfigurationService().getPropertyValueAsString(
KRADConstants.WORKFLOW_URL_KEY);
String workgroupLookupUrl = deploymentBaseUrl + "/Lookup.do?lookupableImplServiceName=WorkGroupLookupableImplService&methodToCall=start&docFormKey=" + GlobalVariables.getUserSession().addObjectWithGeneratedKey(form);
if (conversionFields != null) {
workgroupLookupUrl += "&conversionFields=" + conversionFields;
}
if (form instanceof KualiDocumentFormBase) {
workgroupLookupUrl +="&docNum="+ ((KualiDocumentFormBase) form).getDocument().getDocumentNumber();
}
workgroupLookupUrl += "&returnLocation=" + returnUrl;
return new ActionForward(workgroupLookupUrl, true);
}
/**
* Handles requests that originate via Header Tabs.
*
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward headerTab(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
// header tab actions can do two things - 1, call into an action and perform what needs to happen in there and 2, forward to
// a new location.
String headerTabDispatch = getHeaderTabDispatch(request);
if (StringUtils.isNotEmpty(headerTabDispatch)) {
ActionForward forward = dispatchMethod(mapping, form, request, response, headerTabDispatch);
if (GlobalVariables.getMessageMap().getNumberOfPropertiesWithErrors() > 0) {
return mapping.findForward(RiceConstants.MAPPING_BASIC);
}
this.doTabOpenOrClose(mapping, form, request, response, false);
if (forward.getRedirect()) {
return forward;
}
}
return dispatchMethod(mapping, form, request, response, getHeaderTabNavigateTo(request));
}
/**
* Override this method to provide action-level access controls to the application.
*
* @param form
* @throws AuthorizationException
*/
protected void checkAuthorization( ActionForm form, String methodToCall) throws AuthorizationException
{
String principalId = GlobalVariables.getUserSession().getPrincipalId();
Map<String, String> roleQualifier = new HashMap<String, String>(getRoleQualification(form, methodToCall));
Map<String, String> permissionDetails = KRADUtils.getNamespaceAndActionClass(this.getClass());
if (!KimApiServiceLocator.getPermissionService().isAuthorizedByTemplate(principalId,
KRADConstants.KNS_NAMESPACE, KimConstants.PermissionTemplateNames.USE_SCREEN, permissionDetails,
roleQualifier))
{
throw new AuthorizationException(GlobalVariables.getUserSession().getPerson().getPrincipalName(),
methodToCall,
this.getClass().getSimpleName());
}
}
/**
* override this method to add data from the form for role qualification in the authorization check
*/
protected Map<String,String> getRoleQualification(ActionForm form, String methodToCall) {
return new HashMap<String,String>();
}
protected static KualiModuleService getKualiModuleService() {
if ( kualiModuleService == null ) {
kualiModuleService = KRADServiceLocatorWeb.getKualiModuleService();
}
return kualiModuleService;
}
/**
* Constant defined to match with TextArea.jsp and updateTextArea function in core.js
* <p>Value is textAreaFieldName
*/
public static final String TEXT_AREA_FIELD_NAME="textAreaFieldName";
/**
* Constant defined to match with TextArea.jsp and updateTextArea function in core.js
* <p>Value is textAreaFieldLabel
*/
public static final String TEXT_AREA_FIELD_LABEL="textAreaFieldLabel";
/**
* Constant defined to match with TextArea.jsp and updateTextArea function in core.js
* <p>Value is textAreaReadOnly
*/
public static final String TEXT_AREA_READ_ONLY="textAreaReadOnly";
/**
* Constant defined to match with TextArea.jsp and updateTextArea function in core.js
* <p>Value is textAreaFieldAnchor
*/
public static final String TEXT_AREA_FIELD_ANCHOR="textAreaFieldAnchor";
/**
* Constant defined to match with TextArea.jsp and updateTextArea function in core.js
* <p>Value is textAreaFieldAnchor
*/
public static final String TEXT_AREA_MAX_LENGTH="textAreaMaxLength";
/**
* Constant defined to match with TextArea.jsp and updateTextArea function in core.js
* <p>Value is htmlFormAction
*/
public static final String FORM_ACTION="htmlFormAction";
/**
* Constant defined to match input parameter from URL and from TextArea.jsp.
* <p>Value is methodToCall
*/
public static final String METHOD_TO_CALL="methodToCall";
/**
* Constant defined to match with global forwarding in struts-config.xml
* for Text Area Update.
* <p>Value is updateTextArea
*/
public static final String FORWARD_TEXT_AREA_UPDATE="updateTextArea";
/**
* Constant defined to match with method to call in TextArea.jsp.
* <p>Value is postTextAreaToParent
*/
public static final String POST_TEXT_AREA_TO_PARENT="postTextAreaToParent";
/**
* Constant defined to match with local forwarding in struts-config.xml
* for the parent of the Updated Text Area.
* <p>Value is forwardNext
*/
public static final String FORWARD_NEXT="forwardNext";
/**
* This method is invoked when Java Script is turned off from the web browser. It
* setup the information that the update text area requires for copying current text
* in the calling page text area and returning to the calling page. The information
* is passed to the JSP through Http Request attributes. All other parameters are
* forwarded
*
* @param mapping
* @param form
* @param request
* @param response
* @return
*/
public ActionForward updateTextArea(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) {
if (LOG.isTraceEnabled()) {
String lm=String.format("ENTRY %s%n%s", form.getClass().getSimpleName(),
request.getRequestURI());
LOG.trace(lm);
}
final String[] keyValue = getTextAreaParams(request);
request.setAttribute(TEXT_AREA_FIELD_NAME, keyValue[0]);
request.setAttribute(FORM_ACTION,keyValue[1]);
request.setAttribute(TEXT_AREA_FIELD_LABEL,keyValue[2]);
request.setAttribute(TEXT_AREA_READ_ONLY,keyValue[3]);
request.setAttribute(TEXT_AREA_MAX_LENGTH,keyValue[4]);
if (form instanceof KualiForm && StringUtils.isNotEmpty(((KualiForm) form).getAnchor())) {
request.setAttribute(TEXT_AREA_FIELD_ANCHOR,((KualiForm) form).getAnchor());
}
// Set document related parameter
String docWebScope=(String)request.getAttribute(KRADConstants.DOCUMENT_WEB_SCOPE);
if (docWebScope != null && docWebScope.trim().length() >= 0) {
request.setAttribute(KRADConstants.DOCUMENT_WEB_SCOPE, docWebScope);
}
request.setAttribute(KRADConstants.DOC_FORM_KEY, GlobalVariables.getUserSession().addObjectWithGeneratedKey(form));
ActionForward forward=mapping.findForward(FORWARD_TEXT_AREA_UPDATE);
if (LOG.isTraceEnabled()) {
String lm=String.format("EXIT %s", (forward==null)?"null":forward.getPath());
LOG.trace(lm);
}
return forward;
}
/**
* This method takes the {@link org.kuali.rice.krad.util.KRADConstants.METHOD_TO_CALL_ATTRIBUTE} out of the request
* and parses it returning the required fields needed for a text area. The fields returned
* are the following in this order.
* <ol>
* <li>{@link #TEXT_AREA_FIELD_NAME}</li>
* <li>{@link #FORM_ACTION}</li>
* <li>{@link #TEXT_AREA_FIELD_LABEL}</li>
* <li>{@link #TEXT_AREA_READ_ONLY}</li>
* <li>{@link #TEXT_AREA_MAX_LENGTH}</li>
* </ol>
*
* @param request the request to retrieve the textarea parameters
* @return a string array holding the parsed fields
*/
private String[] getTextAreaParams(HttpServletRequest request) {
// parse out the important strings from our methodToCall parameter
String fullParameter = (String) request.getAttribute(
KRADConstants.METHOD_TO_CALL_ATTRIBUTE);
// parse textfieldname:htmlformaction
String parameterFields = StringUtils.substringBetween(fullParameter,
KRADConstants.METHOD_TO_CALL_PARM2_LEFT_DEL,
KRADConstants.METHOD_TO_CALL_PARM2_RIGHT_DEL);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "fullParameter: " + fullParameter );
LOG.debug( "parameterFields: " + parameterFields );
}
String[] keyValue = null;
if (StringUtils.isNotBlank(parameterFields)) {
String[] textAreaParams = parameterFields.split(
KRADConstants.FIELD_CONVERSIONS_SEPARATOR);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "lookupParams: " + textAreaParams );
}
for (final String textAreaParam : textAreaParams) {
keyValue = textAreaParam.split(KRADConstants.FIELD_CONVERSION_PAIR_SEPARATOR, 2);
if ( LOG.isDebugEnabled() ) {
LOG.debug( "keyValue[0]: " + keyValue[0] );
LOG.debug( "keyValue[1]: " + keyValue[1] );
LOG.debug( "keyValue[2]: " + keyValue[2] );
LOG.debug( "keyValue[3]: " + keyValue[3] );
LOG.debug( "keyValue[4]: " + keyValue[4] );
}
}
}
return keyValue;
}
/**
* This method is invoked from the TextArea.jsp for posting its value to the parent
* page that called the extended text area page. The invocation is done through
* Struts action. The default forwarding id is RiceContants.MAPPING_BASIC. This
* can be overridden using the parameter key FORWARD_NEXT.
*
* @param mapping
* @param form
* @param request
* @param response
* @return
*/
public ActionForward postTextAreaToParent(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) {
if (LOG.isTraceEnabled()) {
String lm=String.format("ENTRY %s%n%s", form.getClass().getSimpleName(),
request.getRequestURI());
LOG.trace(lm);
}
String forwardingId=request.getParameter(FORWARD_NEXT);
if (forwardingId == null) {
forwardingId=RiceConstants.MAPPING_BASIC;
}
ActionForward forward=mapping.findForward(forwardingId);
if (LOG.isTraceEnabled()) {
String lm=String.format("EXIT %s", (forward==null)?"null":forward.getPath());
LOG.trace(lm);
}
return forward;
}
/**
* Use to add a methodToCall to the a list which will not have authorization checks.
* This assumes that the call will be redirected (as in the case of a lookup) that will perform
* the authorization.
*/
protected final void addMethodToCallToUncheckedList( String methodToCall ) {
methodToCallsToNotCheckAuthorization.add(methodToCall);
}
/**
* This method does all special processing on a document that should happen on each HTTP post (ie, save, route, approve, etc).
*/
protected void doProcessingAfterPost( KualiForm form, HttpServletRequest request ) {
}
protected BusinessObjectAuthorizationService getBusinessObjectAuthorizationService() {
if (businessObjectAuthorizationService == null) {
businessObjectAuthorizationService = KNSServiceLocator.getBusinessObjectAuthorizationService();
}
return businessObjectAuthorizationService;
}
protected EncryptionService getEncryptionService() {
if (encryptionService == null) {
encryptionService = CoreApiServiceLocator.getEncryptionService();
}
return encryptionService;
}
public static String getApplicationBaseUrl() {
if ( applicationBaseUrl == null ) {
applicationBaseUrl = CoreApiServiceLocator.getKualiConfigurationService().getPropertyValueAsString(
KRADConstants.APPLICATION_URL_KEY);
}
return applicationBaseUrl;
}
protected boolean isModuleLocked(ActionForm form, String methodToCall, HttpServletRequest request) {
String boClass = request.getParameter(KRADConstants.BUSINESS_OBJECT_CLASS_ATTRIBUTE);
ModuleService moduleService = null;
if(StringUtils.isNotBlank(boClass)) {
try {
moduleService = getKualiModuleService().getResponsibleModuleService(Class.forName(boClass));
} catch (ClassNotFoundException classNotFoundException) {
LOG.warn("BO class not found: " + boClass, classNotFoundException);
}
} else {
moduleService = getKualiModuleService().getResponsibleModuleService(this.getClass());
}
if(moduleService != null && moduleService.isLocked()) {
String principalId = GlobalVariables.getUserSession().getPrincipalId();
String namespaceCode = KRADConstants.KUALI_RICE_SYSTEM_NAMESPACE;
String permissionName = KimConstants.PermissionNames.ACCESS_LOCKED_MODULE;
Map<String, String> qualification = getRoleQualification(form, methodToCall);
if(!KimApiServiceLocator.getPermissionService().isAuthorized(principalId, namespaceCode, permissionName, qualification)) {
ParameterService parameterSerivce = CoreFrameworkServiceLocator.getParameterService();
String messageParamNamespaceCode = moduleService.getModuleConfiguration().getNamespaceCode();
String messageParamComponentCode = KRADConstants.DetailTypes.ALL_DETAIL_TYPE;
String messageParamName = KRADConstants.SystemGroupParameterNames.OLTP_LOCKOUT_MESSAGE_PARM;
String lockoutMessage = parameterSerivce.getParameterValueAsString(messageParamNamespaceCode, messageParamComponentCode, messageParamName);
if(StringUtils.isBlank(lockoutMessage)) {
String defaultMessageParamName = KRADConstants.SystemGroupParameterNames.OLTP_LOCKOUT_DEFAULT_MESSAGE;
lockoutMessage = parameterSerivce.getParameterValueAsString(KRADConstants.KNS_NAMESPACE, messageParamComponentCode, defaultMessageParamName);
}
request.setAttribute(KRADConstants.MODULE_LOCKED_MESSAGE_REQUEST_PARAMETER, lockoutMessage);
return true;
}
}
return false;
}
}
| |
package pgcon2;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Queue;
import java.util.Set;
public class MazeBFSMax {
final int _intMax = Integer.MAX_VALUE; //=2147483647>10^9
final long _longMax = Long.MAX_VALUE; //=9223372036854775807L>10^18
static boolean bElapsed = true;
final int[] dx = { 1, 0, -1, 0 };
final int[] dy = { 0, 1, 0, -1 };
int n;
boolean[][] map;
int[][] history;
Queue<History> queue = new ArrayDeque<>();
Set<Point> answerSet = new HashSet<>();
int answerCnt = -1;
long funcCnt;
void solve(BufferedReader br) throws Exception {
initMap(br);
search();
printMap();
//if (answerCnt == _intMax) {
// answerCnt = -1;
//}
pln(answerCnt);
pln("funcCnt="+funcCnt);
}
void initMap(BufferedReader br) throws Exception {
String line = br.readLine();
n = Integer.parseInt(line);
map = new boolean[n+2][n+2];
history = new int[n+2][n+2];
for (int i=0; i<n+2; i++) {
map[0][i] = true;
map[n+1][i] = true;
map[i][0] = true;
map[i][n+1] = true;
}
for (int y=1; y<=n; y++) {
line = br.readLine();
for (int x=1; x<=n; x++) {
if (line.charAt(x-1) == '#') {
map[y][x] = true;
}
history[y][x] = _intMax;
}
}
}
void printMap() {
for (int y=0; y<=n+1; y++) {
for (int x=0; x<=n+1; x++) {
if (map[y][x]) p("#");
else if (answerSet.contains(new Point(x, y))) p("o");
else if (history[y][x] < _intMax) p("x");
else p(".");
}
pln("");
}
}
void search() {
Point pt = new Point(1, 1);
List<Point> route = new ArrayList<>();
route.add(pt);
queue.offer(new History(pt, route));
//history[1][1] = 0;
while (queue.size() > 0) {
funcCnt++;
if (funcCnt % 1000000 == 0) pln(funcCnt/1000000);
History h = queue.poll();
pln("poll("+h.pt.x+", "+h.pt.y+")");
int cnt = h.route.size()-1;
if (check(h.pt.x, h.pt.y, cnt, h.route)) {
continue;
}
cnt++;
for (int i=0; i<dx.length; i++) {
int nx = h.pt.x+dx[i];
int ny = h.pt.y+dy[i];
//pln("check("+nx+", "+ny+")");
if (isMove(nx, ny, cnt)) {
pt = new Point(nx, ny);
if (h.set.contains(pt)) continue;
route = new ArrayList<>(h.route);
route.add(pt);
queue.offer(new History(pt, route));
//history[ny][nx] = cnt;
}
}
}
}
boolean isMove(int x, int y, int cnt) {
if (map[y][x]) {
return false;
}
//if (cnt >= answerCnt) {
// return false;
//}
//if (cnt >= history[y][x]) {
// return false;
//}
return true;
}
boolean check(int x, int y, int cnt, List<Point> route) {
if (map[y][x]) {
return true;
}
//if (cnt >= answerCnt) {
// return true;
//}
if (x == n && y == n) {
pln(route.toString());
if (cnt > answerCnt) {
answerCnt = cnt;
answerSet = new HashSet<>(route);
}
return true;
}
return false;
}
class History {
Point pt;
List<Point> route = new ArrayList<>();
Set<Point> set;
public History(Point p, List<Point> r) {
pt = p;
route = r;
set = new HashSet<>(route);
}
}
class Point {
int x;
int y;
public Point(int x, int y) {
this.x = x;
this.y = y;
}
public Point(Point p) {
x = p.x;
y = p.y;
}
public boolean equals(Object o) {
if (o instanceof Point) {
Point that = (Point)o;
return (x == that.x) && (y == that.y);
}
return false;
}
public int hashCode() {
return x + (y << 16);
}
public String toString() {
return "(" + x + ", " + y + ")";
}
}
class Info implements Comparable<Info> {
int idx;
int val;
public Info(int idx, int val) {
this.idx = idx;
this.val = val;
}
public int compareTo(Info o) {
return idx - o.idx;
}
public boolean equals(Object o) {
if (o instanceof Info) {
Info that = (Info)o;
return 0 == compareTo(that);
}
return false;
}
public int hashCode() {
return idx + (val << 16);
}
public String toString() {
return "(" + idx + ", " + val + ")";
}
}
class InfoComp implements Comparator<Info> {
public int compare(Info o1, Info o2) {
return o1.val - o2.val;
}
}
void p(char c) {
System.out.print(c);
}
void pln(char c) {
System.out.println(c);
}
void p(double d) {
System.out.print(d);
}
void pln(double d) {
System.out.println(d);
}
void p(long l) {
System.out.print(l);
}
void pln(long l) {
System.out.println(l);
}
void p(String s) {
System.out.print(s);
}
void pln(String s) {
System.out.println(s);
}
String _line;
String[] _flds;
int[] _nums;
static BufferedReader _in;
static PrintWriter _out;
public static void main(String[] args) throws Exception {
long start = System.currentTimeMillis();
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
new MazeBFSMax().solve(br);
long end = System.currentTimeMillis();
if (bElapsed) {
System.out.println((end-start) + "ms");
}
}
}
| |
// ========================================================================
// Copyright (c) 2010 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.util;
import java.util.BitSet;
import java.util.HashMap;
import java.util.Map;
import java.util.StringTokenizer;
/* ------------------------------------------------------------ */
/**
* Internet address map to object
* <p>
* Internet addresses may be specified as absolute address or as a combination of
* four octet wildcard specifications (a.b.c.d) that are defined as follows.
* </p>
* <pre>
* nnn - an absolute value (0-255)
* mmm-nnn - an inclusive range of absolute values,
* with following shorthand notations:
* nnn- => nnn-255
* -nnn => 0-nnn
* - => 0-255
* a,b,... - a list of wildcard specifications
* </pre>
*/
public class IPAddressMap<TYPE> extends HashMap<String, TYPE>
{
private final HashMap<String,IPAddrPattern> _patterns = new HashMap<String,IPAddrPattern>();
/* --------------------------------------------------------------- */
/** Construct empty IPAddressMap.
*/
public IPAddressMap()
{
super(11);
}
/* --------------------------------------------------------------- */
/** Construct empty IPAddressMap.
*
* @param capacity initial capacity
*/
public IPAddressMap(int capacity)
{
super (capacity);
}
/* ------------------------------------------------------------ */
/**
* Insert a new internet address into map
*
* @see java.util.HashMap#put(java.lang.Object, java.lang.Object)
*/
@Override
public TYPE put(String addrSpec, TYPE object)
throws IllegalArgumentException
{
if (addrSpec == null || addrSpec.trim().length() == 0)
throw new IllegalArgumentException("Invalid IP address pattern: "+addrSpec);
String spec = addrSpec.trim();
if (_patterns.get(spec) == null)
_patterns.put(spec,new IPAddrPattern(spec));
return super.put(spec, object);
}
/* ------------------------------------------------------------ */
/**
* Retrieve the object mapped to the specified internet address literal
*
* @see java.util.HashMap#get(java.lang.Object)
*/
@Override
public TYPE get(Object key)
{
return super.get(key);
}
/* ------------------------------------------------------------ */
/**
* Retrieve the first object that is associated with the specified
* internet address by taking into account the wildcard specifications.
*
* @param addr internet address
* @return associated object
*/
public TYPE match(String addr)
{
Map.Entry<String, TYPE> entry = getMatch(addr);
return entry==null ? null : entry.getValue();
}
/* ------------------------------------------------------------ */
/**
* Retrieve the first map entry that is associated with the specified
* internet address by taking into account the wildcard specifications.
*
* @param addr internet address
* @return map entry associated
*/
public Map.Entry<String, TYPE> getMatch(String addr)
{
if (addr != null)
{
for(Map.Entry<String, TYPE> entry: super.entrySet())
{
if (_patterns.get(entry.getKey()).match(addr))
{
return entry;
}
}
}
return null;
}
/* ------------------------------------------------------------ */
/**
* Retrieve a lazy list of map entries associated with specified
* internet address by taking into account the wildcard specifications.
*
* @param addr internet address
* @return lazy list of map entries
*/
public Object getLazyMatches(String addr)
{
if (addr == null)
return LazyList.getList(super.entrySet());
Object entries = null;
for(Map.Entry<String, TYPE> entry: super.entrySet())
{
if (_patterns.get(entry.getKey()).match(addr))
{
entries = LazyList.add(entries,entry);
}
}
return entries;
}
/* ------------------------------------------------------------ */
/**
* IPAddrPattern
*
* Represents internet address wildcard.
* Matches the wildcard to provided internet address.
*/
private static class IPAddrPattern
{
private final OctetPattern[] _octets = new OctetPattern[4];
/* ------------------------------------------------------------ */
/**
* Create new IPAddrPattern
*
* @param value internet address wildcard specification
* @throws IllegalArgumentException if wildcard specification is invalid
*/
public IPAddrPattern(String value)
throws IllegalArgumentException
{
if (value == null || value.trim().length() == 0)
throw new IllegalArgumentException("Invalid IP address pattern: "+value);
try
{
StringTokenizer parts = new StringTokenizer(value, ".");
String part;
for (int idx=0; idx<4; idx++)
{
part = parts.hasMoreTokens() ? parts.nextToken().trim() : "0-255";
int len = part.length();
if (len == 0 && parts.hasMoreTokens())
throw new IllegalArgumentException("Invalid IP address pattern: "+value);
_octets[idx] = new OctetPattern(len==0 ? "0-255" : part);
}
}
catch (IllegalArgumentException ex)
{
throw new IllegalArgumentException("Invalid IP address pattern: "+value, ex);
}
}
/* ------------------------------------------------------------ */
/**
* Match the specified internet address against the wildcard
*
* @param value internet address
* @return true if specified internet address matches wildcard specification
*
* @throws IllegalArgumentException if specified internet address is invalid
*/
public boolean match(String value)
throws IllegalArgumentException
{
if (value == null || value.trim().length() == 0)
throw new IllegalArgumentException("Invalid IP address: "+value);
try
{
StringTokenizer parts = new StringTokenizer(value, ".");
boolean result = true;
for (int idx=0; idx<4; idx++)
{
if (!parts.hasMoreTokens())
throw new IllegalArgumentException("Invalid IP address: "+value);
if (!(result &= _octets[idx].match(parts.nextToken())))
break;
}
return result;
}
catch (IllegalArgumentException ex)
{
throw new IllegalArgumentException("Invalid IP address: "+value, ex);
}
}
}
/* ------------------------------------------------------------ */
/**
* OctetPattern
*
* Represents a single octet wildcard.
* Matches the wildcard to the specified octet value.
*/
private static class OctetPattern extends BitSet
{
private final BitSet _mask = new BitSet(256);
/* ------------------------------------------------------------ */
/**
* Create new OctetPattern
*
* @param octetSpec octet wildcard specification
* @throws IllegalArgumentException if wildcard specification is invalid
*/
public OctetPattern(String octetSpec)
throws IllegalArgumentException
{
try
{
if (octetSpec != null)
{
String spec = octetSpec.trim();
if(spec.length() == 0)
{
_mask.set(0,255);
}
else
{
StringTokenizer parts = new StringTokenizer(spec,",");
while (parts.hasMoreTokens())
{
String part = parts.nextToken().trim();
if (part.length() > 0)
{
if (part.indexOf('-') < 0)
{
Integer value = Integer.valueOf(part);
_mask.set(value);
}
else
{
int low = 0, high = 255;
String[] bounds = part.split("-",-2);
if (bounds.length != 2)
{
throw new IllegalArgumentException("Invalid octet spec: "+octetSpec);
}
if (bounds[0].length() > 0)
{
low = Integer.parseInt(bounds[0]);
}
if (bounds[1].length() > 0)
{
high = Integer.parseInt(bounds[1]);
}
if (low > high)
{
throw new IllegalArgumentException("Invalid octet spec: "+octetSpec);
}
_mask.set(low, high+1);
}
}
}
}
}
}
catch (NumberFormatException ex)
{
throw new IllegalArgumentException("Invalid octet spec: "+octetSpec, ex);
}
}
/* ------------------------------------------------------------ */
/**
* Match specified octet value against the wildcard
*
* @param value octet value
* @return true if specified octet value matches the wildcard
* @throws IllegalArgumentException if specified octet value is invalid
*/
public boolean match(String value)
throws IllegalArgumentException
{
if (value == null || value.trim().length() == 0)
throw new IllegalArgumentException("Invalid octet: "+value);
try
{
int number = Integer.parseInt(value);
return match(number);
}
catch (NumberFormatException ex)
{
throw new IllegalArgumentException("Invalid octet: "+value);
}
}
/* ------------------------------------------------------------ */
/**
* Match specified octet value against the wildcard
*
* @param number octet value
* @return true if specified octet value matches the wildcard
* @throws IllegalArgumentException if specified octet value is invalid
*/
public boolean match(int number)
throws IllegalArgumentException
{
if (number < 0 || number > 255)
throw new IllegalArgumentException("Invalid octet: "+number);
return _mask.get(number);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.protocol.core.impl;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import io.netty.channel.ChannelPipeline;
import org.apache.activemq.artemis.api.core.ActiveMQBuffer;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.ActiveMQExceptionType;
import org.apache.activemq.artemis.api.core.ActiveMQInterruptedException;
import org.apache.activemq.artemis.api.core.Interceptor;
import org.apache.activemq.artemis.api.core.Pair;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.TransportConfiguration;
import org.apache.activemq.artemis.api.core.client.ActiveMQClient;
import org.apache.activemq.artemis.api.core.client.ClientSessionFactory;
import org.apache.activemq.artemis.core.client.ActiveMQClientMessageBundle;
import org.apache.activemq.artemis.core.client.impl.ClientSessionFactoryInternal;
import org.apache.activemq.artemis.core.protocol.ClientPacketDecoder;
import org.apache.activemq.artemis.core.protocol.core.Channel;
import org.apache.activemq.artemis.core.protocol.core.ChannelHandler;
import org.apache.activemq.artemis.core.protocol.core.CoreRemotingConnection;
import org.apache.activemq.artemis.core.protocol.core.Packet;
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.CheckFailoverMessage;
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.CheckFailoverReplyMessage;
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.ClusterTopologyChangeMessage;
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.ClusterTopologyChangeMessage_V2;
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.ClusterTopologyChangeMessage_V3;
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.CreateSessionMessage;
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.CreateSessionResponseMessage;
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.DisconnectMessage;
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.DisconnectMessage_V2;
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.Ping;
import org.apache.activemq.artemis.core.protocol.core.impl.wireformat.SubscribeClusterTopologyUpdatesMessageV2;
import org.apache.activemq.artemis.core.remoting.impl.netty.ActiveMQFrameDecoder2;
import org.apache.activemq.artemis.core.version.Version;
import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection;
import org.apache.activemq.artemis.spi.core.remoting.ClientProtocolManager;
import org.apache.activemq.artemis.spi.core.remoting.Connection;
import org.apache.activemq.artemis.spi.core.remoting.SessionContext;
import org.apache.activemq.artemis.spi.core.remoting.TopologyResponseHandler;
import org.apache.activemq.artemis.utils.VersionLoader;
import org.jboss.logging.Logger;
/**
* This class will return specific packets for different types of actions happening on a messaging protocol.
*
* This is trying to unify the Core client into multiple protocols.
*
* Returning null in certain packets means no action is taken on this specific protocol.
*
* Semantic properties could also be added to this implementation.
*
* Implementations of this class need to be stateless.
*/
public class ActiveMQClientProtocolManager implements ClientProtocolManager {
private static final Logger logger = Logger.getLogger(ActiveMQClientProtocolManager.class);
private static final String handshake = "ARTEMIS";
private final int versionID = VersionLoader.getVersion().getIncrementingVersion();
private ClientSessionFactoryInternal factoryInternal;
/**
* Guards assignments to {@link #inCreateSession} and {@link #inCreateSessionLatch}
*/
private final Object inCreateSessionGuard = new Object();
/**
* Flag that tells whether we are trying to create a session.
*/
private boolean inCreateSession;
/**
* Used to wait for the creation of a session.
*/
private CountDownLatch inCreateSessionLatch;
protected volatile RemotingConnectionImpl connection;
protected TopologyResponseHandler topologyResponseHandler;
/**
* Flag that signals that the communication is closing. Causes many processes to exit.
*/
private volatile boolean alive = true;
private final CountDownLatch waitLatch = new CountDownLatch(1);
public ActiveMQClientProtocolManager() {
}
@Override
public String getName() {
return ActiveMQClient.DEFAULT_CORE_PROTOCOL;
}
@Override
public void setSessionFactory(ClientSessionFactory factory) {
this.factoryInternal = (ClientSessionFactoryInternal) factory;
}
@Override
public ClientSessionFactory getSessionFactory() {
return this.factoryInternal;
}
@Override
public void addChannelHandlers(ChannelPipeline pipeline) {
pipeline.addLast("activemq-decoder", new ActiveMQFrameDecoder2());
}
@Override
public boolean waitOnLatch(long milliseconds) throws InterruptedException {
return waitLatch.await(milliseconds, TimeUnit.MILLISECONDS);
}
public Channel getChannel0() {
if (connection == null) {
return null;
} else {
return connection.getChannel(ChannelImpl.CHANNEL_ID.PING.id, -1);
}
}
@Override
public RemotingConnection getCurrentConnection() {
return connection;
}
public Channel getChannel1() {
if (connection == null) {
return null;
} else {
return connection.getChannel(1, -1);
}
}
@Override
public Lock lockSessionCreation() {
try {
Lock localFailoverLock = factoryInternal.lockFailover();
try {
if (connection == null) {
return null;
}
Lock lock = getChannel1().getLock();
// Lock it - this must be done while the failoverLock is held
while (isAlive() && !lock.tryLock(100, TimeUnit.MILLISECONDS)) {
}
return lock;
} finally {
localFailoverLock.unlock();
}
// We can now release the failoverLock
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return null;
}
}
@Override
public void stop() {
alive = false;
synchronized (inCreateSessionGuard) {
if (inCreateSessionLatch != null)
inCreateSessionLatch.countDown();
}
Channel channel1 = getChannel1();
if (channel1 != null) {
channel1.returnBlocking();
}
waitLatch.countDown();
}
@Override
public boolean isAlive() {
return alive;
}
@Override
public void ping(long connectionTTL) {
Channel channel = connection.getChannel(ChannelImpl.CHANNEL_ID.PING.id, -1);
Ping ping = new Ping(connectionTTL);
channel.send(ping);
connection.flush();
}
@Override
public void sendSubscribeTopology(final boolean isServer) {
getChannel0().send(new SubscribeClusterTopologyUpdatesMessageV2(isServer, VersionLoader.getVersion().getIncrementingVersion()));
}
@Override
public SessionContext createSessionContext(String name,
String username,
String password,
boolean xa,
boolean autoCommitSends,
boolean autoCommitAcks,
boolean preAcknowledge,
int minLargeMessageSize,
int confirmationWindowSize) throws ActiveMQException {
for (Version clientVersion : VersionLoader.getClientVersions()) {
try {
return createSessionContext(clientVersion, name, username, password, xa, autoCommitSends, autoCommitAcks, preAcknowledge, minLargeMessageSize, confirmationWindowSize);
} catch (ActiveMQException e) {
if (e.getType() != ActiveMQExceptionType.INCOMPATIBLE_CLIENT_SERVER_VERSIONS) {
throw e;
}
}
}
connection.destroy();
throw new ActiveMQException(ActiveMQExceptionType.INCOMPATIBLE_CLIENT_SERVER_VERSIONS);
}
public SessionContext createSessionContext(Version clientVersion,
String name,
String username,
String password,
boolean xa,
boolean autoCommitSends,
boolean autoCommitAcks,
boolean preAcknowledge,
int minLargeMessageSize,
int confirmationWindowSize) throws ActiveMQException {
if (!isAlive())
throw ActiveMQClientMessageBundle.BUNDLE.clientSessionClosed();
Channel sessionChannel = null;
CreateSessionResponseMessage response = null;
boolean retry;
do {
retry = false;
Lock lock = null;
try {
lock = lockSessionCreation();
// We now set a flag saying createSession is executing
synchronized (inCreateSessionGuard) {
if (!isAlive())
throw ActiveMQClientMessageBundle.BUNDLE.clientSessionClosed();
inCreateSession = true;
inCreateSessionLatch = new CountDownLatch(1);
}
long sessionChannelID = connection.generateChannelID();
Packet request = newCreateSessionPacket(clientVersion, name, username, password, xa, autoCommitSends, autoCommitAcks, preAcknowledge, minLargeMessageSize, confirmationWindowSize, sessionChannelID);
try {
// channel1 reference here has to go away
response = (CreateSessionResponseMessage) getChannel1().sendBlocking(request, PacketImpl.CREATESESSION_RESP);
} catch (ActiveMQException cause) {
if (!isAlive())
throw cause;
if (cause.getType() == ActiveMQExceptionType.UNBLOCKED) {
// This means the thread was blocked on create session and failover unblocked it
// so failover could occur
retry = true;
continue;
} else {
throw cause;
}
}
sessionChannel = connection.getChannel(sessionChannelID, confirmationWindowSize);
} catch (Throwable t) {
if (lock != null) {
lock.unlock();
lock = null;
}
if (t instanceof ActiveMQException) {
throw (ActiveMQException) t;
} else {
throw ActiveMQClientMessageBundle.BUNDLE.failedToCreateSession(t);
}
} finally {
if (lock != null) {
lock.unlock();
}
// Execution has finished so notify any failover thread that may be waiting for us to be done
inCreateSession = false;
inCreateSessionLatch.countDown();
}
} while (retry);
return newSessionContext(name, confirmationWindowSize, sessionChannel, response);
}
protected Packet newCreateSessionPacket(Version clientVersion,
String name,
String username,
String password,
boolean xa,
boolean autoCommitSends,
boolean autoCommitAcks,
boolean preAcknowledge,
int minLargeMessageSize,
int confirmationWindowSize,
long sessionChannelID) {
return new CreateSessionMessage(name, sessionChannelID, clientVersion.getIncrementingVersion(), username, password, minLargeMessageSize, xa, autoCommitSends, autoCommitAcks, preAcknowledge, confirmationWindowSize, null);
}
protected SessionContext newSessionContext(String name,
int confirmationWindowSize,
Channel sessionChannel,
CreateSessionResponseMessage response) {
// these objects won't be null, otherwise it would keep retrying on the previous loop
return new ActiveMQSessionContext(name, connection, sessionChannel, response.getServerVersion(), confirmationWindowSize);
}
@Override
public boolean cleanupBeforeFailover(ActiveMQException cause) {
boolean needToInterrupt;
CountDownLatch exitLockLatch;
Lock lock = lockSessionCreation();
if (lock == null) {
return false;
}
try {
synchronized (inCreateSessionGuard) {
needToInterrupt = inCreateSession;
exitLockLatch = inCreateSessionLatch;
}
} finally {
lock.unlock();
}
if (needToInterrupt) {
forceReturnChannel1(cause);
// Now we need to make sure that the thread has actually exited and returned it's
// connections
// before failover occurs
while (inCreateSession && isAlive()) {
try {
if (exitLockLatch != null) {
exitLockLatch.await(500, TimeUnit.MILLISECONDS);
}
} catch (InterruptedException e1) {
throw new ActiveMQInterruptedException(e1);
}
}
}
return true;
}
@Override
public boolean checkForFailover(String liveNodeID) throws ActiveMQException {
CheckFailoverMessage packet = new CheckFailoverMessage(liveNodeID);
CheckFailoverReplyMessage message = (CheckFailoverReplyMessage) getChannel1().sendBlocking(packet, PacketImpl.CHECK_FOR_FAILOVER_REPLY);
return message.isOkToFailover();
}
@Override
public RemotingConnection connect(Connection transportConnection,
long callTimeout,
long callFailoverTimeout,
List<Interceptor> incomingInterceptors,
List<Interceptor> outgoingInterceptors,
TopologyResponseHandler topologyResponseHandler) {
this.connection = new RemotingConnectionImpl(getPacketDecoder(), transportConnection, callTimeout, callFailoverTimeout, incomingInterceptors, outgoingInterceptors);
this.topologyResponseHandler = topologyResponseHandler;
getChannel0().setHandler(new Channel0Handler(connection));
sendHandshake(transportConnection);
return connection;
}
protected void sendHandshake(Connection transportConnection) {
if (transportConnection.isUsingProtocolHandling()) {
// no need to send handshake on inVM as inVM is not using the NettyProtocolHandling
ActiveMQBuffer amqbuffer = connection.createTransportBuffer(handshake.length());
amqbuffer.writeBytes(handshake.getBytes());
transportConnection.write(amqbuffer);
}
}
protected ClusterTopologyChangeMessage updateTransportConfiguration(final ClusterTopologyChangeMessage topMessage) {
return topMessage;
}
private class Channel0Handler implements ChannelHandler {
private final CoreRemotingConnection conn;
private Channel0Handler(final CoreRemotingConnection conn) {
this.conn = conn;
}
@Override
public void handlePacket(final Packet packet) {
final byte type = packet.getType();
if (type == PacketImpl.DISCONNECT || type == PacketImpl.DISCONNECT_V2) {
final DisconnectMessage msg = (DisconnectMessage) packet;
String scaleDownTargetNodeID = null;
SimpleString nodeID = msg.getNodeID();
if (packet instanceof DisconnectMessage_V2) {
final DisconnectMessage_V2 msg_v2 = (DisconnectMessage_V2) packet;
scaleDownTargetNodeID = msg_v2.getScaleDownNodeID() == null ? null : msg_v2.getScaleDownNodeID().toString();
}
if (topologyResponseHandler != null)
topologyResponseHandler.nodeDisconnected(conn, nodeID == null ? null : nodeID.toString(), scaleDownTargetNodeID);
} else if (type == PacketImpl.CLUSTER_TOPOLOGY) {
ClusterTopologyChangeMessage topMessage = (ClusterTopologyChangeMessage) packet;
notifyTopologyChange(updateTransportConfiguration(topMessage));
} else if (type == PacketImpl.CLUSTER_TOPOLOGY_V2) {
ClusterTopologyChangeMessage_V2 topMessage = (ClusterTopologyChangeMessage_V2) packet;
notifyTopologyChange(updateTransportConfiguration(topMessage));
} else if (type == PacketImpl.CLUSTER_TOPOLOGY || type == PacketImpl.CLUSTER_TOPOLOGY_V2 || type == PacketImpl.CLUSTER_TOPOLOGY_V3) {
ClusterTopologyChangeMessage topMessage = (ClusterTopologyChangeMessage) packet;
notifyTopologyChange(updateTransportConfiguration(topMessage));
} else if (type == PacketImpl.CHECK_FOR_FAILOVER_REPLY) {
System.out.println("Channel0Handler.handlePacket");
}
}
/**
* @param topMessage
*/
protected void notifyTopologyChange(final ClusterTopologyChangeMessage topMessage) {
final long eventUID;
final String backupGroupName;
final String scaleDownGroupName;
if (topMessage instanceof ClusterTopologyChangeMessage_V3) {
eventUID = ((ClusterTopologyChangeMessage_V3) topMessage).getUniqueEventID();
backupGroupName = ((ClusterTopologyChangeMessage_V3) topMessage).getBackupGroupName();
scaleDownGroupName = ((ClusterTopologyChangeMessage_V3) topMessage).getScaleDownGroupName();
} else if (topMessage instanceof ClusterTopologyChangeMessage_V2) {
eventUID = ((ClusterTopologyChangeMessage_V2) topMessage).getUniqueEventID();
backupGroupName = ((ClusterTopologyChangeMessage_V2) topMessage).getBackupGroupName();
scaleDownGroupName = null;
} else {
eventUID = System.currentTimeMillis();
backupGroupName = null;
scaleDownGroupName = null;
}
if (topMessage.isExit()) {
if (logger.isDebugEnabled()) {
logger.debug("Notifying " + topMessage.getNodeID() + " going down");
}
if (topologyResponseHandler != null) {
topologyResponseHandler.notifyNodeDown(eventUID, topMessage.getNodeID());
}
} else {
Pair<TransportConfiguration, TransportConfiguration> transportConfig = topMessage.getPair();
if (transportConfig.getA() == null && transportConfig.getB() == null) {
transportConfig = new Pair<>(conn.getTransportConnection().getConnectorConfig(), null);
}
if (topologyResponseHandler != null) {
topologyResponseHandler.notifyNodeUp(eventUID, topMessage.getNodeID(), backupGroupName, scaleDownGroupName, transportConfig, topMessage.isLast());
}
}
}
}
protected PacketDecoder getPacketDecoder() {
return ClientPacketDecoder.INSTANCE;
}
private void forceReturnChannel1(ActiveMQException cause) {
if (connection != null) {
Channel channel1 = connection.getChannel(1, -1);
if (channel1 != null) {
channel1.returnBlocking(cause);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.runners.python.scalar;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.java.typeutils.runtime.RowSerializer;
import org.apache.flink.core.memory.DataOutputViewStreamWrapper;
import org.apache.flink.fnexecution.v1.FlinkFnApi;
import org.apache.flink.python.env.ProcessPythonEnvironmentManager;
import org.apache.flink.python.env.PythonDependencyInfo;
import org.apache.flink.python.env.PythonEnvironmentManager;
import org.apache.flink.table.functions.python.PythonFunctionInfo;
import org.apache.flink.table.runtime.typeutils.PythonTypeUtils;
import org.apache.flink.table.runtime.utils.PassThroughPythonScalarFunctionRunner;
import org.apache.flink.table.runtime.utils.PythonTestUtils;
import org.apache.flink.table.types.logical.BigIntType;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.types.Row;
import org.apache.beam.runners.fnexecution.control.JobBundleFactory;
import org.apache.beam.runners.fnexecution.control.RemoteBundle;
import org.apache.beam.runners.fnexecution.control.StageBundleFactory;
import org.apache.beam.sdk.fn.data.FnDataReceiver;
import org.apache.beam.sdk.util.WindowedValue;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Tests for {@link PythonScalarFunctionRunner}. These test that:
*
* <ul>
* <li>The input data type and output data type are properly constructed</li>
* <li>The UDF proto is properly constructed</li>
* </ul>
*/
public class PythonScalarFunctionRunnerTest extends AbstractPythonScalarFunctionRunnerTest<Row> {
@Test
public void testInputOutputDataTypeConstructedProperlyForSingleUDF() throws Exception {
final PythonScalarFunctionRunner runner = (PythonScalarFunctionRunner) createSingleUDFRunner();
// check input TypeSerializer
TypeSerializer inputTypeSerializer = runner.getInputTypeSerializer();
assertEquals(1, ((RowSerializer) inputTypeSerializer).getArity());
}
@Test
public void testInputOutputDataTypeConstructedProperlyForMultipleUDFs() throws Exception {
final PythonScalarFunctionRunner runner = (PythonScalarFunctionRunner) createMultipleUDFRunner();
// check input TypeSerializer
TypeSerializer inputTypeSerializer = runner.getInputTypeSerializer();
assertEquals(3, ((RowSerializer) inputTypeSerializer).getArity());
}
@Test
public void testInputOutputDataTypeConstructedProperlyForChainedUDFs() throws Exception {
final PythonScalarFunctionRunner runner = (PythonScalarFunctionRunner) createChainedUDFRunner();
// check input TypeSerializer
TypeSerializer inputTypeSerializer = runner.getInputTypeSerializer();
assertEquals(5, ((RowSerializer) inputTypeSerializer).getArity());
}
@Test
public void testUDFProtoConstructedProperlyForSingleUDF() throws Exception {
final AbstractPythonScalarFunctionRunner<Row> runner = createSingleUDFRunner();
FlinkFnApi.UserDefinedFunctions udfs = runner.getUserDefinedFunctionsProto();
assertEquals(1, udfs.getUdfsCount());
FlinkFnApi.UserDefinedFunction udf = udfs.getUdfs(0);
assertEquals(1, udf.getInputsCount());
assertEquals(0, udf.getInputs(0).getInputOffset());
}
@Test
public void testUDFProtoConstructedProperlyForMultipleUDFs() throws Exception {
final AbstractPythonScalarFunctionRunner<Row> runner = createMultipleUDFRunner();
FlinkFnApi.UserDefinedFunctions udfs = runner.getUserDefinedFunctionsProto();
assertEquals(2, udfs.getUdfsCount());
FlinkFnApi.UserDefinedFunction udf = udfs.getUdfs(0);
assertEquals(2, udf.getInputsCount());
assertEquals(0, udf.getInputs(0).getInputOffset());
assertEquals(1, udf.getInputs(1).getInputOffset());
udf = udfs.getUdfs(1);
assertEquals(2, udf.getInputsCount());
assertEquals(0, udf.getInputs(0).getInputOffset());
assertEquals(2, udf.getInputs(1).getInputOffset());
}
@Test
public void testUDFProtoConstructedProperlyForChainedUDFs() throws Exception {
final PythonScalarFunctionRunner runner = (PythonScalarFunctionRunner) createChainedUDFRunner();
FlinkFnApi.UserDefinedFunctions udfs = runner.getUserDefinedFunctionsProto();
assertEquals(3, udfs.getUdfsCount());
FlinkFnApi.UserDefinedFunction udf = udfs.getUdfs(0);
assertEquals(2, udf.getInputsCount());
assertEquals(0, udf.getInputs(0).getInputOffset());
assertEquals(1, udf.getInputs(1).getInputOffset());
udf = udfs.getUdfs(1);
assertEquals(2, udf.getInputsCount());
assertEquals(0, udf.getInputs(0).getInputOffset());
FlinkFnApi.UserDefinedFunction chainedUdf = udf.getInputs(1).getUdf();
assertEquals(2, chainedUdf.getInputsCount());
assertEquals(1, chainedUdf.getInputs(0).getInputOffset());
assertEquals(2, chainedUdf.getInputs(1).getInputOffset());
udf = udfs.getUdfs(2);
chainedUdf = udf.getInputs(0).getUdf();
assertEquals(2, chainedUdf.getInputsCount());
assertEquals(1, chainedUdf.getInputs(0).getInputOffset());
assertEquals(3, chainedUdf.getInputs(1).getInputOffset());
chainedUdf = udf.getInputs(1).getUdf();
assertEquals(2, chainedUdf.getInputsCount());
assertEquals(3, chainedUdf.getInputs(0).getInputOffset());
assertEquals(4, chainedUdf.getInputs(1).getInputOffset());
}
@Test
public void testPythonScalarFunctionRunner() throws Exception {
JobBundleFactory jobBundleFactorySpy = spy(JobBundleFactory.class);
FnDataReceiver<byte[]> resultReceiverSpy = spy(FnDataReceiver.class);
final AbstractGeneralPythonScalarFunctionRunner<Row> runner =
createUDFRunner(jobBundleFactorySpy, resultReceiverSpy);
StageBundleFactory stageBundleFactorySpy = spy(StageBundleFactory.class);
when(jobBundleFactorySpy.forStage(any())).thenReturn(stageBundleFactorySpy);
RemoteBundle remoteBundleSpy = spy(RemoteBundle.class);
when(stageBundleFactorySpy.getBundle(any(), any(), any())).thenReturn(remoteBundleSpy);
Map<String, FnDataReceiver> inputReceivers = new HashMap<>();
FnDataReceiver<WindowedValue<?>> windowedValueReceiverSpy = spy(FnDataReceiver.class);
inputReceivers.put("input", windowedValueReceiverSpy);
when(remoteBundleSpy.getInputReceivers()).thenReturn(inputReceivers);
runner.open();
verify(jobBundleFactorySpy, times(1)).forStage(any());
// verify stageBundleFactory.getBundle is called during startBundle
verify(stageBundleFactorySpy, times(0)).getBundle(any(), any(), any());
runner.startBundle();
verify(stageBundleFactorySpy, times(1)).getBundle(any(), any(), any());
runner.processElement(Row.of(1L));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
runner.getInputTypeSerializer().serialize(Row.of(1L), new DataOutputViewStreamWrapper(baos));
// verify remoteBundle.close() is called during finishBundle
verify(remoteBundleSpy, times(0)).close();
runner.finishBundle();
verify(remoteBundleSpy, times(1)).close();
// verify the input element is processed and the result is hand over to the result receiver.
verify(resultReceiverSpy, times(1)).accept(argThat(
value -> Objects.deepEquals(value, baos.toByteArray())));
}
@Override
public AbstractGeneralPythonScalarFunctionRunner<Row> createPythonScalarFunctionRunner(
final PythonFunctionInfo[] pythonFunctionInfos,
RowType inputType,
RowType outputType) {
final FnDataReceiver<byte[]> dummyReceiver = input -> {
// ignore the execution results
};
final PythonEnvironmentManager environmentManager =
new ProcessPythonEnvironmentManager(
new PythonDependencyInfo(new HashMap<>(), null, null, new HashMap<>(), "python"),
new String[] {System.getProperty("java.io.tmpdir")},
new HashMap<>());
return new PythonScalarFunctionRunner(
"testPythonRunner",
dummyReceiver,
pythonFunctionInfos,
environmentManager,
inputType,
outputType,
Collections.emptyMap(),
PythonTestUtils.createMockFlinkMetricContainer());
}
private AbstractGeneralPythonScalarFunctionRunner<Row> createUDFRunner(
JobBundleFactory jobBundleFactory, FnDataReceiver<byte[]> receiver) {
PythonFunctionInfo[] pythonFunctionInfos = new PythonFunctionInfo[] {
new PythonFunctionInfo(
DummyPythonFunction.INSTANCE,
new Integer[]{0})
};
RowType rowType = new RowType(Collections.singletonList(new RowType.RowField("f1", new BigIntType())));
final PythonEnvironmentManager environmentManager =
new ProcessPythonEnvironmentManager(
new PythonDependencyInfo(new HashMap<>(), null, null, new HashMap<>(), "python"),
new String[] {System.getProperty("java.io.tmpdir")},
new HashMap<>());
return new PassThroughPythonScalarFunctionRunner<Row>(
"testPythonRunner",
receiver,
pythonFunctionInfos,
environmentManager,
rowType,
rowType,
Collections.emptyMap(),
jobBundleFactory,
PythonTestUtils.createMockFlinkMetricContainer()) {
@Override
public TypeSerializer<Row> getInputTypeSerializer() {
return (RowSerializer) PythonTypeUtils.toFlinkTypeSerializer(getInputType());
}
};
}
}
| |
package graph;
import java.awt.*;
import java.applet.*;
import java.util.*;
import java.lang.*;
/*
**************************************************************************
**
** Class DataSet
**
**************************************************************************
** Copyright (C) 1995, 1996 Leigh Brookshaw
**
** This program is free software; you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by
** the Free Software Foundation; either version 2 of the License, or
** (at your option) any later version.
**
** This program is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
** GNU General Public License for more details.
**
** You should have received a copy of the GNU General Public License
** along with this program; if not, write to the Free Software
** Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
**************************************************************************
**
** This class is designed to be used in conjunction with
** the Graph2D class and Axis class for plotting 2D graphs.
**
*************************************************************************/
/**
* This class is designed to hold the data to be plotted.
* It is to be used in conjunction with the Graph2D class and Axis
* class for plotting 2D graphs.
*
* @version $Revision: 1.2 $, $Date: 2004/10/07 19:59:24 $.
* @author Leigh Brookshaw
* Author's original version: Revision: 1.15, Date: 1996/09/24 05:23:41
*/
public class DataSet extends Object {
/*
***************************
** Public Static Values
**************************/
/**
* A constant value flag used to specify no straight line segment
* is to join the data points
*/
public final static int NOLINE = 0;
/**
* A constant value flag used to specify that a straight line segment
* is to join the data points.
*/
public final static int LINE = 1;
/*
***********************
** Public Variables
**********************/
/**
* The Graphics canvas that is driving the whole show.
* @see graph.Graph2D
*/
public Graph2D g2d;
/**
* The linestyle to employ when joining the data points with
* straight line segments. Currently only solid and no line
* are supported.
*/
public int linestyle = LINE;
/**
* The color of the straight line segments
*/
public Color linecolor = null;
/**
* The index of the marker to use at the data points.
* @see graph.Markers
*/
public int marker = 0;
/**
* The marker color
*/
public Color markercolor = null;
/**
* The scaling factor for the marker. Default value is 1.
*/
public double markerscale = 1.0;
/**
* The Axis object the X data is attached to. From the Axis object
* the scaling for the data can be derived.
* @see graph.Axis
*/
public Axis xaxis;
/**
* The Axis object the Y data is attached to.
* @see graph.Axis
*/
public Axis yaxis;
/**
* The current plottable X maximum of the data.
* This can be very different from
* true data X maximum. The data is clipped when plotted.
*/
public double xmax;
/**
* The current plottable X minimum of the data.
* This can be very different from
* true data X minimum. The data is clipped when plotted.
*/
public double xmin;
/**
* The current plottable Y maximum of the data.
* This can be very different from
* true data Y maximum. The data is clipped when plotted.
*/
public double ymax;
/**
* The current plottable Y minimum of the data.
* This can be very different from
* true data Y minimum. The data is clipped when plotted.
*/
public double ymin;
/**
* Boolean to control clipping of the data window.
* Default value is <em>true</em>, clip the data window.
*/
public boolean clipping = true;
/*
*********************
** Protected Variables
**********************/
/**
* The data X maximum.
* Once the data is loaded this will never change.
*/
protected double dxmax;
/**
* The data X minimum.
* Once the data is loaded this will never change.
*/
protected double dxmin;
/**
* The data Y maximum.
* Once the data is loaded this will never change.
*/
protected double dymax;
/**
* The data Y minimum.
* Once the data is loaded this will never change.
*/
protected double dymin;
/**
* The array containing the actual data
*/
protected double data[];
/**
* The number of data points stored in the data array
*/
protected int length;
/**
* The X range of the clipped data
*/
protected double xrange;
/**
* The Y range of the clipped data
*/
protected double yrange;
/**
* The length of the example line in the data legend.
*/
protected int legend_length = 20;
/**
* The legend text
*/
protected TextLine legend_text = null;
/**
* The X pixel position of the data legend
*/
protected int legend_ix;
/**
* The Y pixel position of the data legend
*/
protected int legend_iy;
/**
* The X data position of the data legend
*/
protected double legend_dx;
/**
* The Y data position of the data legend
*/
protected double legend_dy;
/**
* The amount to increment the data array when the append method is being
* used.
*/
protected int increment = 100;
/**
* The stride of the data. For data pairs (x,y) the stride is 2
*/
protected int stride = 2;
/*
*********************
** Constructors
********************/
/**
* Instantiate an empty data set.
*/
public DataSet ( ) {
length = 0;
range(stride);
}
/**
* Instantiate an empty data set.
* @param stride the stride of the data set. The default stride is 2.
*/
public DataSet (int stride ) throws Exception {
if( stride < 2 ) throw
new Exception("Invalid stride parameter!");
this.stride = stride;
length = 0;
range(stride);
}
/**
* Instantiate a DataSet with the parsed data. Default stride is 2.
* The double array contains the data. The X data is expected in
* the even indices, the y data in the odd. The integer n is the
* number of data Points. This means that the length of the data
* array is 2*n.
* @param d Array containing the (x,y) data pairs.
* @param n Number of (x,y) data pairs in the array.
* @exception Exception
* A Generic exception if it fails to load the
* parsed array into the class.
*/
public DataSet ( double d[], int n ) throws Exception {
int i;
int k = 0;
length = 0;
if ( d == null || d.length == 0 || n <= 0 ) {
throw new Exception("DataSet: Error in parsed data!");
}
// Copy the data locally.
data = new double[n*stride];
length = n*stride;
/*
* System.out.println(n);
* System.out.println(stride);
* for (int j=0; j<n; j++ ) {
* System.out.println(d[j]);
* }
*/
System.arraycopy(d, 0, data, 0, length);
// Calculate the data range.
range(stride);
}
/**
* Instantiate a DataSet with the parsed data.
* The double array contains the data. The X data is expected to be in
* indices i*stride where i=0,1,... The Y data is expected to be found
* in indices i*stride+1 where i=0,1,2...
* The integer n is the
* number of data Points. This means that the length of the data
* array is 2*stride.
* @param d Array containing the (x,y) data pairs.
* @param n Number of (x,y) data pairs in the array.
* @param s The stride of the data.
* @exception Exception
* A Generic exception if it fails to load the
* parsed array into the class.
*/
public DataSet ( double d[], int n, int s ) throws Exception {
if( s < 2 ) throw
new Exception("Invalid stride parameter!");
int i;
int k = 0;
length = 0;
if ( d == null || d.length == 0 || n <= 0 ) {
throw new Exception("DataSet: Error in parsed data!");
}
this.stride = s;
// Copy the data locally.
data = new double[n*stride];
length = n*stride;
System.arraycopy(d, 0, data, 0, length);
// Calculate the data range.
range(stride);
}
/*
*******************
** Public Methods
******************/
/**
* Append data to the data set.
* @param d Array containing (x,y) pairs to append
* @param n Number of (x,y) data pairs in the array.
* @exception Exception
* A generic exception if it fails to load the
* parsed array into the class.
*/
public void append( double d[], int n ) throws Exception {
int i;
int k = 0;
double tmp[];
int ln = n*stride;
if ( d == null || d.length == 0 || n <= 0 ) {
throw new Exception("DataSet: Error in append data!");
}
if(data == null) data = new double[increment];
// Copy the data locally.
if( ln+length < data.length ) {
System.arraycopy(d, 0, data, length, ln);
length += ln;
} else {
tmp = new double[ln+length+increment];
if( length != 0 ) {
System.arraycopy(data, 0, tmp, 0, length);
}
System.arraycopy(d, 0, tmp, length, ln);
length += ln;
data = tmp;
}
// Calculate the data range.
range(stride);
// Update the range on Axis that this data is attached to
if(xaxis != null) xaxis.resetRange();
if(yaxis != null) yaxis.resetRange();
}
/**
* Delete data from the data set (start and end are inclusive).
* The first (x,y) pair in the data set start at index 0.
* @param start The start (x,y) pair index.
* @param end The end (x,y) pair index.
*/
public void delete( int start, int end ) {
int End = stride*end;
int Start = stride*start;
if(length <= 0) return;
if( End < Start ) return;
if( Start < 0 ) Start = 0;
if( End > length-stride ) End = length-stride;
if( End < length-stride) {
System.arraycopy(data, End+stride,
data, Start, length - End - stride);
}
length -= End+stride-Start;
// Calculate the data range.
range(stride);
}
/**
* Delete all the data from the data set.
*/
public void deleteData( ) {
length = 0;
data = null;
range(stride);
}
/**
* Draw the straight line segments and/or the markers at the
* data points.
* If this data has been attached to an Axis then scale the data
* based on the axis maximum/minimum otherwise scale using
* the data's maximum/minimum
* @param g Graphics state
* @param bounds The data window to draw into
*/
public void draw_data(Graphics g, Rectangle bounds) {
Color c;
if ( xaxis != null ) {
xmax = xaxis.maximum;
xmin = xaxis.minimum;
}
if ( yaxis != null ) {
ymax = yaxis.maximum;
ymin = yaxis.minimum;
}
xrange = xmax - xmin;
yrange = ymax - ymin;
/*
** draw the legend before we clip the data window
*/
draw_legend(g,bounds);
/*
** Clip the data window
*/
if(clipping) g.clipRect(bounds.x, bounds.y,
bounds.width, bounds.height);
c = g.getColor();
if( linestyle != DataSet.NOLINE ) {
if ( linecolor != null) g.setColor(linecolor);
else g.setColor(c);
draw_lines(g,bounds);
}
if( marker > 0 ) {
if(markercolor != null) g.setColor(markercolor);
else g.setColor(c);
draw_markers(g,bounds);
}
g.setColor(c);
}
/**
* return the data X maximum.
*/
public double getXmax() { return dxmax; }
/**
* return the data X minimum.
*/
public double getXmin() { return dxmin; }
/**
* return the data Y maximum.
*/
public double getYmax() { return dymax; }
/**
* return the data Y minimum.
*/
public double getYmin() { return dymin; }
/**
* Define a data legend in the graph window
* @param x pixel position of the legend.
* @param y pixel position of the legend.
* @param text text to display in the legend
*/
public void legend(int x, int y, String text) {
if(text == null) { legend_text = null; return; }
if(legend_text == null) legend_text = new TextLine(text);
else legend_text.setText(text);
legend_text.setJustification(TextLine.LEFT);
legend_ix = x;
legend_iy = y;
legend_dx = 0.0;
legend_dy = 0.0;
}
/**
* Define a data legend in the graph window
* @param x data position of the legend.
* @param y data position of the legend.
* @param text text to display in the legend
*/
public void legend(double x, double y, String text) {
if(text == null) { legend_text = null; return; }
if(legend_text == null) legend_text = new TextLine(text);
else legend_text.setText(text);
legend_text.setJustification(TextLine.LEFT);
legend_dx = x;
legend_dy = y;
legend_ix = 0;
legend_iy = 0;
}
/**
* Set the font to be used in the legend
* @param f font
*/
public void legendFont(Font f) {
if(f == null) return;
if(legend_text == null) legend_text = new TextLine();
legend_text.setFont(f);
}
/**
* Set the color for the legend text
* @param c color
*/
public void legendColor(Color c) {
if(c == null) return;
if(legend_text == null) legend_text = new TextLine();
legend_text.setColor(c);
}
/**
* Return the number of data points in the DataSet
* @return number of (x,y0 points.
*/
public int dataPoints() { return length/stride; }
/**
* get the data point at the parsed index. The first (x,y) pair
* is at index 0.
* @param index Data point index
* @return array containing the (x,y) pair.
*/
public double[] getPoint(int index) {
double point[] = new double[stride];
int i = index*stride;
if( index < 0 || i > length-stride ) return null;
for(int j=0; j<stride; j++) point[j] = data[i+j];
return point;
}
/**
* Return the data point that is closest to the parsed (x,y) position
* @param x
* @param y (x,y) position in data space.
* @return array containing the closest data point.
*/
public double[] getClosestPoint(double x, double y) {
double point[] = {0.0, 0.0, 0.0};
int i;
double xdiff, ydiff, dist2;
xdiff = data[0] - x;
ydiff = data[1] - y;
point[0] = data[0];
point[1] = data[1];
point[2] = xdiff*xdiff + ydiff*ydiff;
for(i=stride; i<length-1; i+=stride) {
xdiff = data[i ] - x;
ydiff = data[i+1] - y;
dist2 = xdiff*xdiff + ydiff*ydiff;
if(dist2 < point[2]) {
point[0] = data[i ];
point[1] = data[i+1];
point[2] = dist2;
}
}
//System.out.println("DataSet: closestpoint "+point[0]+", "+point[1]+", "+point[2]);
return point;
}
/*
*********************
** Protected Methods
*********************/
/**
* Draw into the data window the straight line segments joining the
* data points.
* @param g Graphics context
* @param w Data window
*/
protected void draw_lines(Graphics g, Rectangle w) {
int i;
int j;
boolean inside0 = false;
boolean inside1 = false;
double x,y;
int x0 = 0 , y0 = 0;
int x1 = 0 , y1 = 0;
// Calculate the clipping rectangle
Rectangle clip = g.getClipBounds();
int xcmin = clip.x;
int xcmax = clip.x + clip.width;
int ycmin = clip.y;
int ycmax = clip.y + clip.height;
// Is there any data to draw? Sometimes the draw command will
// will be called before any data has been placed in the class.
if( data == null || data.length < stride ) return;
// System.out.println("Drawing Data Lines!");
// Is the first point inside the drawing region ?
if( (inside0 = contains(data[0], data[1])) ) {
x0 = (int)(w.x + ((data[0]-xmin)/xrange)*w.width);
y0 = (int)(w.y + (1.0 - (data[1]-ymin)/yrange)*w.height);
if( x0 < xcmin || x0 > xcmax ||
y0 < ycmin || y0 > ycmax) inside0 = false;
}
for(i=stride; i<length; i+=stride) {
// Is this point inside the drawing region?
inside1 = contains( data[i], data[i+1]);
// If one point is inside the drawing region calculate the second point
if ( inside1 || inside0 ) {
x1 = (int)(w.x + ((data[i]-xmin)/xrange)*w.width);
y1 = (int)(w.y + (1.0 - (data[i+1]-ymin)/yrange)*w.height);
if( x1 < xcmin || x1 > xcmax ||
y1 < ycmin || y1 > ycmax) inside1 = false;
}
// If the second point is inside calculate the first point if it
// was outside
if ( !inside0 && inside1 ) {
x0 = (int)(w.x + ((data[i-stride]-xmin)/xrange)*w.width);
y0 = (int)(w.y + (1.0 - (data[i-stride+1]-ymin)/yrange)*w.height);
}
// If either point is inside draw the segment
if ( inside0 || inside1 ) {
g.drawLine(x0,y0,x1,y1);
}
/*
** The reason for the convolution above is to avoid calculating
** the points over and over. Now just copy the second point to the
** first and grab the next point
*/
inside0 = inside1;
x0 = x1;
y0 = y1;
}
}
/**
* Return true if the point (x,y) is inside the allowed data range.
*/
protected boolean contains(double x, double y) {
if( x >= xmin && x <= xmax &&
y >= ymin && y <= ymax ) return true;
return false;
}
/**
* Draw the markers.
* Only markers inside the specified range will be drawn. Also markers
* close the edge of the clipping region will be clipped.
* @param g Graphics context
* @param w data window
* @see graph.Markers
*/
protected void draw_markers(Graphics g, Rectangle w) {
int x1,y1;
int i;
// Calculate the clipping rectangle
Rectangle clip = g.getClipBounds();
int xcmin = clip.x;
int xcmax = clip.x + clip.width;
int ycmin = clip.y;
int ycmax = clip.y + clip.height;
/*
** Load the marker specified for this data
*/
Markers m = g2d.getMarkers();
if( m == null) return;
// System.out.println("Drawing Data Markers!");
for(i=0; i<length; i+=stride) {
if( contains( data[i], data[i+1]) ) {
x1 = (int)(w.x + ((data[i]-xmin)/xrange)*w.width);
y1 = (int)(w.y + (1.0 - (data[i+1]-ymin)/yrange)*w.height);
if( x1 >= xcmin && x1 <= xcmax &&
y1 >= ycmin && y1 <= ycmax )
m.draw(g, marker, markerscale, x1, y1);
}
}
}
/**
* Draw a legend for this data set
* @param g Graphics context
* @param w Data Window
*/
protected void draw_legend(Graphics g, Rectangle w) {
Color c = g.getColor();
Markers m = null;
if( legend_text == null) return;
if( legend_text.isNull() ) return;
if( legend_ix == 0 && legend_iy == 0 ) {
legend_ix = (int)(w.x + ((legend_dx-xmin)/xrange)*w.width);
legend_iy = (int)(w.y + (1.0 - (legend_dy-ymin)/yrange)*w.height);
}
if( linestyle != DataSet.NOLINE ) {
if ( linecolor != null) g.setColor(linecolor);
g.drawLine(legend_ix,legend_iy,legend_ix+legend_length,legend_iy);
}
if( marker > 0 ) {
m = g2d.getMarkers();
if( m != null) {
if(markercolor != null) g.setColor(markercolor);
else g.setColor(c);
m.draw(g,marker,1.0, legend_ix+legend_length/2, legend_iy);
}
}
legend_text.draw( g,
legend_ix+legend_length+legend_text.charWidth(g,' '),
legend_iy+legend_text.getAscent(g)/3);
g.setColor(c);
}
/**
* Calculate the range of the data. This modifies dxmin,dxmax,dymin,dymax
* and xmin,xmax,ymin,ymax
*/
protected void range(int stride) {
int i;
if( length >= stride ) {
dxmax = data[0];
dymax = data[1];
dxmin = dxmax;
dymin = dymax;
} else {
dxmin = 0.0;
dxmax = 0.0;
dymin = 0.0;
dymax = 0.0;
}
for(i=stride; i<length; i+=stride ) {
if( dxmax < data[i] ) { dxmax = data[i]; }
else
if( dxmin > data[i] ) { dxmin = data[i]; }
if( dymax < data[i+1] ) { dymax = data[i+1]; }
else
if( dymin > data[i+1] ) { dymin = data[i+1]; }
}
if( xaxis == null) {
xmin = dxmin;
xmax = dxmax;
}
if( yaxis == null) {
ymin = dymin;
ymax = dymax;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.fms.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Violation detail for Network Firewall for a firewall policy that has a different
* <a>NetworkFirewallPolicyDescription</a> than is required by the Firewall Manager policy.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/fms-2018-01-01/NetworkFirewallPolicyModifiedViolation"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class NetworkFirewallPolicyModifiedViolation implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The ID of the Network Firewall or VPC resource that's in violation.
* </p>
*/
private String violationTarget;
/**
* <p>
* The policy that's currently in use in the individual account.
* </p>
*/
private NetworkFirewallPolicyDescription currentPolicyDescription;
/**
* <p>
* The policy that should be in use in the individual account in order to be compliant.
* </p>
*/
private NetworkFirewallPolicyDescription expectedPolicyDescription;
/**
* <p>
* The ID of the Network Firewall or VPC resource that's in violation.
* </p>
*
* @param violationTarget
* The ID of the Network Firewall or VPC resource that's in violation.
*/
public void setViolationTarget(String violationTarget) {
this.violationTarget = violationTarget;
}
/**
* <p>
* The ID of the Network Firewall or VPC resource that's in violation.
* </p>
*
* @return The ID of the Network Firewall or VPC resource that's in violation.
*/
public String getViolationTarget() {
return this.violationTarget;
}
/**
* <p>
* The ID of the Network Firewall or VPC resource that's in violation.
* </p>
*
* @param violationTarget
* The ID of the Network Firewall or VPC resource that's in violation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public NetworkFirewallPolicyModifiedViolation withViolationTarget(String violationTarget) {
setViolationTarget(violationTarget);
return this;
}
/**
* <p>
* The policy that's currently in use in the individual account.
* </p>
*
* @param currentPolicyDescription
* The policy that's currently in use in the individual account.
*/
public void setCurrentPolicyDescription(NetworkFirewallPolicyDescription currentPolicyDescription) {
this.currentPolicyDescription = currentPolicyDescription;
}
/**
* <p>
* The policy that's currently in use in the individual account.
* </p>
*
* @return The policy that's currently in use in the individual account.
*/
public NetworkFirewallPolicyDescription getCurrentPolicyDescription() {
return this.currentPolicyDescription;
}
/**
* <p>
* The policy that's currently in use in the individual account.
* </p>
*
* @param currentPolicyDescription
* The policy that's currently in use in the individual account.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public NetworkFirewallPolicyModifiedViolation withCurrentPolicyDescription(NetworkFirewallPolicyDescription currentPolicyDescription) {
setCurrentPolicyDescription(currentPolicyDescription);
return this;
}
/**
* <p>
* The policy that should be in use in the individual account in order to be compliant.
* </p>
*
* @param expectedPolicyDescription
* The policy that should be in use in the individual account in order to be compliant.
*/
public void setExpectedPolicyDescription(NetworkFirewallPolicyDescription expectedPolicyDescription) {
this.expectedPolicyDescription = expectedPolicyDescription;
}
/**
* <p>
* The policy that should be in use in the individual account in order to be compliant.
* </p>
*
* @return The policy that should be in use in the individual account in order to be compliant.
*/
public NetworkFirewallPolicyDescription getExpectedPolicyDescription() {
return this.expectedPolicyDescription;
}
/**
* <p>
* The policy that should be in use in the individual account in order to be compliant.
* </p>
*
* @param expectedPolicyDescription
* The policy that should be in use in the individual account in order to be compliant.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public NetworkFirewallPolicyModifiedViolation withExpectedPolicyDescription(NetworkFirewallPolicyDescription expectedPolicyDescription) {
setExpectedPolicyDescription(expectedPolicyDescription);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getViolationTarget() != null)
sb.append("ViolationTarget: ").append(getViolationTarget()).append(",");
if (getCurrentPolicyDescription() != null)
sb.append("CurrentPolicyDescription: ").append(getCurrentPolicyDescription()).append(",");
if (getExpectedPolicyDescription() != null)
sb.append("ExpectedPolicyDescription: ").append(getExpectedPolicyDescription());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof NetworkFirewallPolicyModifiedViolation == false)
return false;
NetworkFirewallPolicyModifiedViolation other = (NetworkFirewallPolicyModifiedViolation) obj;
if (other.getViolationTarget() == null ^ this.getViolationTarget() == null)
return false;
if (other.getViolationTarget() != null && other.getViolationTarget().equals(this.getViolationTarget()) == false)
return false;
if (other.getCurrentPolicyDescription() == null ^ this.getCurrentPolicyDescription() == null)
return false;
if (other.getCurrentPolicyDescription() != null && other.getCurrentPolicyDescription().equals(this.getCurrentPolicyDescription()) == false)
return false;
if (other.getExpectedPolicyDescription() == null ^ this.getExpectedPolicyDescription() == null)
return false;
if (other.getExpectedPolicyDescription() != null && other.getExpectedPolicyDescription().equals(this.getExpectedPolicyDescription()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getViolationTarget() == null) ? 0 : getViolationTarget().hashCode());
hashCode = prime * hashCode + ((getCurrentPolicyDescription() == null) ? 0 : getCurrentPolicyDescription().hashCode());
hashCode = prime * hashCode + ((getExpectedPolicyDescription() == null) ? 0 : getExpectedPolicyDescription().hashCode());
return hashCode;
}
@Override
public NetworkFirewallPolicyModifiedViolation clone() {
try {
return (NetworkFirewallPolicyModifiedViolation) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.fms.model.transform.NetworkFirewallPolicyModifiedViolationMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
package com.example.knitknit;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.ActionMode;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MenuInflater;
import android.view.ViewGroup;
public class CountingLand extends Activity {
private static final String TAG = "knitknit-CountingLand";
public static Counter selectedCounter = null;
private DataWrangler mDataWrangler;
private Project mProject;
private ActionMode mActionMode;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getActionBar().setDisplayHomeAsUpEnabled(true);
// Get projectID from savedInstanceState
Long projectID = null;
projectID = (savedInstanceState == null ?
null :
(Long) savedInstanceState.getSerializable(DataWrangler.PROJECT_KEY_ID));
// If we still don't have projectID, get it from intent extras
if (projectID == null) {
Bundle extras = getIntent().getExtras();
projectID = (extras != null ?
extras.getLong(DataWrangler.PROJECT_KEY_ID) :
null);
}
if (projectID == null) {
Log.w(TAG, "projectID is null");
}
else{
Log.w(TAG, "projectID: " + projectID);
}
// Load the project
mDataWrangler = new DataWrangler(this);
mDataWrangler.open();
mProject = mDataWrangler.retrieveProject(projectID);
mProject.setActivity(this);
// Update the dateOpened on the project
mDataWrangler.touchProject(mProject);
// DEBUG
Log.w(TAG, "this project has " + mProject.getCounters().size() + " counters");
// Set the action bar title to the name of the project
getActionBar().setTitle(mProject.getName());
// Attach the project's view to our view
ViewGroup root = (ViewGroup) findViewById(android.R.id.content);
mProject.inflate(root, getResources().getConfiguration().orientation);
root.addView(mProject.getWrapper());
}
// Lifecycle Management Methods
//@Override
//protected void onDestroy() {
// super.onDestroy();
// mDataWrangler.close();
//}
@Override
protected void onPause() {
super.onPause();
mDataWrangler.saveProject(mProject);
}
@Override
public void onWindowFocusChanged(boolean hasFocus) {
Log.w(TAG, "in onWindowFocusChanged()");
super.onWindowFocusChanged(hasFocus);
if (hasFocus) {
mProject.refreshViews();
}
}
// Options Menu (Action Bar) Methods
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.countingland, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle item selection
switch (item.getItemId()) {
case android.R.id.home:
Intent intent = new Intent(this, ProjectList.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
return true;
case R.id.new_counter:
// Add a new counter to the database
long id = mDataWrangler.createCounter(mProject.getId());
// Retrieve a counter object for the new counter
Counter counter = mDataWrangler.retrieveCounter(id);
// Add the counter object to the project
mProject.addCounter(counter);
mProject.refreshViews();
return true;
default:
return false;
}
}
// Other Methods
public Project getProject() {
return mProject;
}
public static boolean getZeroMode() {
return false;
}
public void startActionModeForCounter(Counter counter) {
if (mActionMode == null) {
// Tell the ProjectWrapper to ignore touch events while we are in action mode
((ProjectWrapper) findViewById(R.id.project_wrapper)).setRespondToTouch(false);
mActionMode = startActionMode(mActionModeCallback);
mActionMode.setTag(counter);
// If the selected counter has a name
if (counter.getName() != null) {
// Set the action bar title to the name of the counter
mActionMode.setTitle(counter.getName());
}
// Highlight thte selected counter
counter.getWrapper().setSelected(true);
counter.refreshViews();
selectedCounter = counter;
}
}
// Contextual Action Mode Callbacks
private ActionMode.Callback mActionModeCallback = new ActionMode.Callback() {
@Override
public boolean onCreateActionMode(ActionMode mode, Menu menu) {
// Inflate a menu resource providing context menu items
MenuInflater inflater = mode.getMenuInflater();
inflater.inflate(R.menu.counter_context, menu);
// If there is less than 2 counters
if (mProject.getCounters().size() < 2) {
// Hide the "delete" menu item
MenuItem deleteItem = menu.findItem(R.id.delete_counter);
deleteItem.setVisible(false);
}
return true;
}
// Called each time the action mode is shown. Always called after onCreateActionMode, but
// may be called multiple times if the mode is invalidated.
@Override
public boolean onPrepareActionMode(ActionMode mode, Menu menu) {
return false; // Return false if nothing is done
}
// Called when the user selects a contextual menu item
@Override
public boolean onActionItemClicked(ActionMode mode, MenuItem item) {
Counter counter = (Counter) mode.getTag();
switch (item.getItemId()) {
case R.id.decrease_counter:
counter.decrease();
return true;
case R.id.delete_counter:
mProject.deleteCounter(counter);
// Close the Contextual Action Bar
mode.finish();
return true;
case R.id.increase_counter:
counter.increase();
return true;
case R.id.edit_counter:
// Start the CounterEditor activity
Intent intent = new Intent(CountingLand.this, CounterEditor.class);
startActivity(intent);
// Close the Contextual Action Bar
mode.finish();
return true;
default:
return false;
}
}
// Called when the user exits the action mode
@Override
public void onDestroyActionMode(ActionMode mode) {
mActionMode = null;
// Un-highlight the selected counter
Counter counter = (Counter) mode.getTag();
counter.getWrapper().setSelected(false);
mProject.refreshViews();
// Tell the ProjectWrapper to stop ignoring touch events
((ProjectWrapper) findViewById(R.id.project_wrapper)).setRespondToTouch(true);
}
};
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.config.ClientSecurityConfig;
import com.hazelcast.client.spi.impl.ClientExecutionServiceImpl;
import com.hazelcast.client.spi.properties.ClientProperty;
import com.hazelcast.client.test.TestHazelcastFactory;
import com.hazelcast.config.Config;
import com.hazelcast.config.EvictionConfig;
import com.hazelcast.config.ListenerConfig;
import com.hazelcast.config.NearCacheConfig;
import com.hazelcast.config.SerializationConfig;
import com.hazelcast.core.DistributedObject;
import com.hazelcast.core.EntryAdapter;
import com.hazelcast.core.EntryEvent;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IExecutorService;
import com.hazelcast.core.ILock;
import com.hazelcast.core.IMap;
import com.hazelcast.core.LifecycleEvent;
import com.hazelcast.core.LifecycleListener;
import com.hazelcast.logging.Logger;
import com.hazelcast.map.MapInterceptor;
import com.hazelcast.map.listener.MapListener;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.nio.serialization.DataSerializable;
import com.hazelcast.nio.serialization.Portable;
import com.hazelcast.nio.serialization.PortableFactory;
import com.hazelcast.nio.serialization.PortableReader;
import com.hazelcast.nio.serialization.PortableWriter;
import com.hazelcast.security.UsernamePasswordCredentials;
import com.hazelcast.test.AssertTask;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.annotation.NightlyTest;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.util.Collection;
import java.util.LinkedList;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static com.hazelcast.config.EvictionConfig.MaxSizePolicy.ENTRY_COUNT;
import static com.hazelcast.core.LifecycleEvent.LifecycleState;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelTest.class})
public class ClientRegressionWithMockNetworkTest extends HazelcastTestSupport {
private final TestHazelcastFactory hazelcastFactory = new TestHazelcastFactory();
@After
public void cleanup() {
hazelcastFactory.terminateAll();
}
/**
* Test for issues #267 and #493
*/
@Test
public void testIssue493() {
final HazelcastInstance hz1 = hazelcastFactory.newHazelcastInstance();
hazelcastFactory.newHazelcastInstance();
ClientConfig clientConfig = new ClientConfig();
clientConfig.getNetworkConfig().setRedoOperation(true);
HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig);
final ILock lock = client.getLock("lock");
for (int k = 0; k < 10; k++) {
lock.lock();
try {
sleepMillis(100);
} finally {
lock.unlock();
}
}
lock.lock();
hz1.shutdown();
lock.unlock();
}
@Test(timeout = 60000)
public void testOperationRedo() {
final HazelcastInstance hz1 = hazelcastFactory.newHazelcastInstance();
hazelcastFactory.newHazelcastInstance();
ClientConfig clientConfig = new ClientConfig();
clientConfig.getNetworkConfig().setRedoOperation(true);
HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig);
final Thread thread = new Thread() {
public void run() {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
hz1.getLifecycleService().shutdown();
}
};
final IMap<Integer, String> map = client.getMap("m");
thread.start();
int expected = 1000;
for (int i = 0; i < expected; i++) {
map.put(i, "item" + i);
}
assertJoinable(thread);
assertEquals(expected, map.size());
}
@Test
public void testOperationRedo_smartRoutingDisabled() {
final HazelcastInstance hz1 = hazelcastFactory.newHazelcastInstance();
hazelcastFactory.newHazelcastInstance();
ClientConfig clientConfig = new ClientConfig();
clientConfig.setRedoOperation(true);
clientConfig.setSmartRouting(false);
HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig);
final Thread thread = new Thread() {
public void run() {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
hz1.getLifecycleService().shutdown();
}
};
final IMap<Integer, Integer> map = client.getMap("m");
thread.start();
int expected = 1000;
for (int i = 0; i < expected; i++) {
map.put(i, i);
}
assertJoinable(thread);
assertEquals(expected, map.size());
}
@Test
public void testGetDistributedObjectsIssue678() {
final HazelcastInstance hz = hazelcastFactory.newHazelcastInstance();
hz.getQueue("queue");
hz.getMap("map");
hz.getSemaphore("s");
final HazelcastInstance instance = hazelcastFactory.newHazelcastClient();
final Collection<DistributedObject> distributedObjects = instance.getDistributedObjects();
assertEquals(3, distributedObjects.size());
}
@Test
public void testMapDestroyIssue764() {
HazelcastInstance server = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient();
assertNoOfDistributedObject("Initially the server should have %d distributed objects, but had %d", 0, server.getDistributedObjects());
assertNoOfDistributedObject("Initially the client should have %d distributed objects, but had %d", 0, client.getDistributedObjects());
IMap map = client.getMap("mapToDestroy");
assertNoOfDistributedObject("After getMap() the server should have %d distributed objects, but had %d", 1, server.getDistributedObjects());
assertNoOfDistributedObject("After getMap() the client should have %d distributed objects, but had %d", 1, client.getDistributedObjects());
map.destroy();
// Get the distributed objects as fast as possible to catch a race condition more likely
Collection<DistributedObject> serverDistributedObjects = server.getDistributedObjects();
Collection<DistributedObject> clientDistributedObjects = client.getDistributedObjects();
assertNoOfDistributedObject("After destroy() the server should should have %d distributed objects, but had %d", 0, serverDistributedObjects);
assertNoOfDistributedObject("After destroy() the client should should have %d distributed objects, but had %d", 0, clientDistributedObjects);
}
private void assertNoOfDistributedObject(String message, int expected, Collection<DistributedObject> distributedObjects) {
StringBuilder sb = new StringBuilder(message + "\n");
for (DistributedObject distributedObject : distributedObjects) {
sb
.append("Name: ").append(distributedObject.getName())
.append(", Service: ").append(distributedObject.getServiceName())
.append(", PartitionKey: ").append(distributedObject.getPartitionKey())
.append("\n");
}
assertEqualsStringFormat(sb.toString(), expected, distributedObjects.size());
}
/**
* Client hangs at map.get after shutdown
*/
@Test
public void testIssue821() {
final HazelcastInstance instance = hazelcastFactory.newHazelcastInstance();
final HazelcastInstance client = hazelcastFactory.newHazelcastClient();
final IMap<Object, Object> map = client.getMap("default");
map.put("key1", "value1");
instance.shutdown();
try {
map.get("key1");
fail();
} catch (Exception ignored) {
}
assertFalse(instance.getLifecycleService().isRunning());
}
@Test
public void testClientConnectionEvents() {
final LinkedList<LifecycleState> list = new LinkedList<LifecycleState>();
list.offer(LifecycleState.STARTING);
list.offer(LifecycleState.STARTED);
list.offer(LifecycleState.CLIENT_CONNECTED);
list.offer(LifecycleState.CLIENT_DISCONNECTED);
list.offer(LifecycleState.CLIENT_CONNECTED);
list.offer(LifecycleState.CLIENT_DISCONNECTED);
list.offer(LifecycleState.SHUTTING_DOWN);
list.offer(LifecycleState.SHUTDOWN);
hazelcastFactory.newHazelcastInstance();
final CountDownLatch latch = new CountDownLatch(list.size());
final CountDownLatch connectedLatch = new CountDownLatch(2);
final CountDownLatch disconnectedLatch = new CountDownLatch(2);
LifecycleListener listener = new LifecycleListener() {
public void stateChanged(LifecycleEvent event) {
Logger.getLogger(getClass()).info("stateChanged: " + event);
final LifecycleState state = list.poll();
LifecycleState eventState = event.getState();
if (state != null && state.equals(eventState)) {
latch.countDown();
}
if (LifecycleState.CLIENT_CONNECTED.equals(eventState)) {
connectedLatch.countDown();
}
if (LifecycleState.CLIENT_DISCONNECTED.equals(eventState)) {
disconnectedLatch.countDown();
}
}
};
final ListenerConfig listenerConfig = new ListenerConfig(listener);
final ClientConfig clientConfig = new ClientConfig();
clientConfig.addListenerConfig(listenerConfig);
clientConfig.getNetworkConfig().setConnectionAttemptLimit(100);
HazelcastInstance hazelcastClient = hazelcastFactory.newHazelcastClient(clientConfig);
hazelcastFactory.shutdownAllMembers();
hazelcastFactory.newHazelcastInstance();
assertOpenEventually("LifecycleState failed. Expected two CLIENT_CONNECTED events!", connectedLatch);
hazelcastFactory.shutdownAllMembers();
//wait for disconnect then call client.shutdown(). Otherwise shutdown could prevent firing DISCONNECTED event
assertOpenEventually("LifecycleState failed. Expected two CLIENT_DISCONNECTED events!", disconnectedLatch);
hazelcastClient.shutdown();
assertOpenEventually("LifecycleState failed", latch);
}
@Test
public void testInterceptor() {
hazelcastFactory.newHazelcastInstance();
final HazelcastInstance client = hazelcastFactory.newHazelcastClient();
final IMap<Object, Object> map = client.getMap("map");
final MapInterceptorImpl interceptor = new MapInterceptorImpl();
final String id = map.addInterceptor(interceptor);
assertNotNull(id);
map.put("key1", "value");
assertEquals("value", map.get("key1"));
map.put("key1", "value1");
assertEquals("getIntercepted", map.get("key1"));
assertFalse(map.replace("key1", "getIntercepted", "val"));
assertTrue(map.replace("key1", "value1", "val"));
assertEquals("val", map.get("key1"));
map.put("key2", "oldValue");
assertEquals("oldValue", map.get("key2"));
map.put("key2", "newValue");
assertEquals("putIntercepted", map.get("key2"));
map.put("key3", "value2");
assertEquals("value2", map.get("key3"));
assertEquals("removeIntercepted", map.remove("key3"));
}
private static class MapInterceptorImpl implements MapInterceptor {
MapInterceptorImpl() {
}
public Object interceptGet(Object value) {
if ("value1".equals(value)) {
return "getIntercepted";
}
return null;
}
public void afterGet(Object value) {
}
public Object interceptPut(Object oldValue, Object newValue) {
if ("oldValue".equals(oldValue) && "newValue".equals(newValue)) {
return "putIntercepted";
}
return null;
}
public void afterPut(Object value) {
}
public Object interceptRemove(Object removedValue) {
if ("value2".equals(removedValue)) {
return "removeIntercepted";
}
return null;
}
public void afterRemove(Object value) {
}
}
@Test
public void testClientPortableWithoutRegisteringToNode() {
hazelcastFactory.newHazelcastInstance();
final SerializationConfig serializationConfig = new SerializationConfig();
serializationConfig.addPortableFactory(5, new PortableFactory() {
public Portable create(int classId) {
return new SamplePortable();
}
});
final ClientConfig clientConfig = new ClientConfig();
clientConfig.setSerializationConfig(serializationConfig);
final HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig);
final IMap<Integer, SamplePortable> sampleMap = client.getMap(randomString());
sampleMap.put(1, new SamplePortable(666));
final SamplePortable samplePortable = sampleMap.get(1);
assertEquals(666, samplePortable.a);
}
@Test
public void testCredentials() {
final Config config = new Config();
config.getGroupConfig().setName("foo").setPassword("bar");
hazelcastFactory.newHazelcastInstance(config);
final ClientConfig clientConfig = new ClientConfig();
final ClientSecurityConfig securityConfig = clientConfig.getSecurityConfig();
securityConfig.setCredentialsClassname(MyCredentials.class.getName());
hazelcastFactory.newHazelcastClient(clientConfig);
}
public static class MyCredentials extends UsernamePasswordCredentials {
public MyCredentials() {
super("foo", "bar");
}
}
public void testListenerReconnect() {
final HazelcastInstance instance = hazelcastFactory.newHazelcastInstance();
final HazelcastInstance client = hazelcastFactory.newHazelcastClient();
final CountDownLatch latch = new CountDownLatch(2);
final IMap<Object, Object> m = client.getMap("m");
final String id = m.addEntryListener(new EntryAdapter() {
public void entryAdded(EntryEvent event) {
latch.countDown();
}
@Override
public void entryUpdated(EntryEvent event) {
latch.countDown();
}
}, true);
m.put("key1", "value1");
hazelcastFactory.newHazelcastInstance();
instance.shutdown();
final Thread thread = new Thread() {
@Override
public void run() {
while (!isInterrupted()) {
m.put("key2", "value2");
try {
Thread.sleep(100);
} catch (InterruptedException ignored) {
}
}
}
};
thread.start();
assertOpenEventually(latch, 10);
thread.interrupt();
assertJoinable(thread);
assertTrue(m.removeEntryListener(id));
assertFalse(m.removeEntryListener("foo"));
}
static class SamplePortable implements Portable {
public int a;
public SamplePortable(int a) {
this.a = a;
}
public SamplePortable() {
}
public int getFactoryId() {
return 5;
}
public int getClassId() {
return 6;
}
public void writePortable(PortableWriter writer) throws IOException {
writer.writeInt("a", a);
}
public void readPortable(PortableReader reader) throws IOException {
a = reader.readInt("a");
}
}
@Test
public void testNearCache_WhenRegisteredNodeIsDead() {
final HazelcastInstance instance = hazelcastFactory.newHazelcastInstance();
final ClientConfig clientConfig = new ClientConfig();
clientConfig.getNetworkConfig().setConnectionAttemptLimit(Integer.MAX_VALUE);
final String mapName = randomMapName();
NearCacheConfig nearCacheConfig = new NearCacheConfig();
nearCacheConfig.setName(mapName);
nearCacheConfig.setInvalidateOnChange(true);
clientConfig.addNearCacheConfig(nearCacheConfig);
final HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig);
final IMap<Object, Object> map = client.getMap(mapName);
map.put("a", "b");
// populate Near Cache
map.get("a");
instance.shutdown();
hazelcastFactory.newHazelcastInstance();
assertTrueEventually(new AssertTask() {
@Override
public void run() throws Exception {
assertNull(map.get("a"));
}
});
}
@Category(NightlyTest.class)
@Test
public void testLock_WhenDummyClientAndOwnerNodeDiesTogether() throws Exception {
testLock_WhenClientAndOwnerNodeDiesTogether(false);
}
@Category(NightlyTest.class)
@Test
public void testLock_WhenSmartClientAndOwnerNodeDiesTogether() throws Exception {
testLock_WhenClientAndOwnerNodeDiesTogether(true);
}
private void testLock_WhenClientAndOwnerNodeDiesTogether(boolean smart) throws Exception {
hazelcastFactory.newHazelcastInstance();
final ClientConfig clientConfig = new ClientConfig();
clientConfig.getNetworkConfig().setSmartRouting(smart);
final int tryCount = 5;
for (int i = 0; i < tryCount; i++) {
final HazelcastInstance instance = hazelcastFactory.newHazelcastInstance();
final HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig);
final ILock lock = client.getLock("lock");
assertTrue(lock.tryLock(1, TimeUnit.MINUTES));
client.getLifecycleService().terminate(); //with client is dead, lock should be released.
instance.getLifecycleService().terminate();
}
}
@Test
public void testDeadlock_WhenDoingOperationFromListeners() {
hazelcastFactory.newHazelcastInstance();
final HazelcastInstance client = hazelcastFactory.newHazelcastClient(new ClientConfig().setExecutorPoolSize(1));
final int putCount = 1000;
final CountDownLatch latch = new CountDownLatch(putCount);
final IMap<Object, Object> map1 = client.getMap(randomMapName());
final IMap<Object, Object> map2 = client.getMap(randomMapName());
map1.addEntryListener(new EntryAdapter<Object, Object>() {
@Override
public void entryAdded(EntryEvent<Object, Object> event) {
map2.put(1, 1);
latch.countDown();
}
}, false);
for (int i = 0; i < putCount; i++) {
map1.put(i, i);
}
assertOpenEventually(latch);
}
@Test
public void testDeadlock_WhenDoingOperationFromLifecycleListener() {
HazelcastInstance instance = hazelcastFactory.newHazelcastInstance();
final ClientConfig clientConfig = new ClientConfig();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig.setExecutorPoolSize(1));
hazelcastFactory.newHazelcastInstance();
final CountDownLatch latch = new CountDownLatch(1);
final IMap<Object, Object> map = client.getMap(randomMapName());
client.getLifecycleService().addLifecycleListener(new LifecycleListener() {
@Override
public void stateChanged(LifecycleEvent event) {
if (event.getState() == LifecycleState.CLIENT_DISCONNECTED) {
map.get(1);
latch.countDown();
}
}
});
instance.shutdown();
assertOpenEventually(latch);
}
@Test
public void testDeadlock_WhenDoingOperationFromLifecycleListenerWithInitialPartitionTable() {
HazelcastInstance instance = hazelcastFactory.newHazelcastInstance();
final ClientConfig clientConfig = new ClientConfig();
final HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig.setExecutorPoolSize(1));
hazelcastFactory.newHazelcastInstance();
final CountDownLatch latch = new CountDownLatch(1);
final IMap<Object, Object> map = client.getMap(randomMapName());
// Let the partition table retrieved the first time
map.get(1);
client.getLifecycleService().addLifecycleListener(new LifecycleListener() {
@Override
public void stateChanged(LifecycleEvent event) {
if (event.getState() == LifecycleState.CLIENT_DISCONNECTED) {
for (int i = 0; i < 1000; i++) {
map.get(i);
}
latch.countDown();
}
}
});
instance.shutdown();
assertOpenEventually(latch);
}
@Test
public void testDeadlock_whenDoingOperationFromLifecycleListener_withNearCache() {
String mapName = randomMapName();
EvictionConfig evictionConfig = new EvictionConfig()
.setMaximumSizePolicy(ENTRY_COUNT)
.setSize(1);
NearCacheConfig nearCacheConfig = new NearCacheConfig()
.setName(mapName)
.setEvictionConfig(evictionConfig);
ClientConfig clientConfig = new ClientConfig()
.addNearCacheConfig(nearCacheConfig)
.setExecutorPoolSize(1);
HazelcastInstance instance = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig);
hazelcastFactory.newHazelcastInstance();
final CountDownLatch latch = new CountDownLatch(1);
final IMap<Object, Object> map = client.getMap(mapName);
client.getLifecycleService().addLifecycleListener(new LifecycleListener() {
@Override
public void stateChanged(LifecycleEvent event) {
if (event.getState() == LifecycleState.CLIENT_DISCONNECTED) {
map.get(1);
map.get(2);
latch.countDown();
}
}
});
instance.shutdown();
assertOpenEventually(latch);
}
@Test(expected = ExecutionException.class, timeout = 120000)
public void testGithubIssue3557() throws Exception {
HazelcastInstance hz = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient();
UnDeserializable unDeserializable = new UnDeserializable(1);
IExecutorService executorService = client.getExecutorService("default");
Issue2509Runnable task = new Issue2509Runnable(unDeserializable);
Future<?> future = executorService.submitToMember(task, hz.getCluster().getLocalMember());
future.get();
}
public static class Issue2509Runnable implements Callable<Integer>, DataSerializable {
private UnDeserializable unDeserializable;
public Issue2509Runnable() {
}
public Issue2509Runnable(UnDeserializable unDeserializable) {
this.unDeserializable = unDeserializable;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeObject(unDeserializable);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
unDeserializable = in.readObject();
}
@Override
public Integer call() {
return unDeserializable.foo;
}
}
public static class UnDeserializable implements DataSerializable {
private int foo;
public UnDeserializable(int foo) {
this.foo = foo;
}
@Override
public void writeData(ObjectDataOutput out) throws IOException {
out.writeInt(foo);
}
@Override
public void readData(ObjectDataInput in) throws IOException {
foo = in.readInt();
}
}
@Test
public void testNearCache_shutdownClient() {
final ClientConfig clientConfig = new ClientConfig();
NearCacheConfig invalidateConfig = new NearCacheConfig();
final String mapName = randomMapName();
invalidateConfig.setName(mapName);
invalidateConfig.setInvalidateOnChange(true);
clientConfig.addNearCacheConfig(invalidateConfig);
hazelcastFactory.newHazelcastInstance();
final HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig);
final IMap<Integer, Integer> map = client.getMap(mapName);
map.get(1);
//test should finish without throwing any exception.
client.shutdown();
}
@Test
public void testClientReconnect_thenCheckRequestsAreRetriedWithoutException() {
final HazelcastInstance hazelcastInstance = hazelcastFactory.newHazelcastInstance();
final CountDownLatch clientStartedDoingRequests = new CountDownLatch(1);
new Thread(new Runnable() {
@Override
public void run() {
try {
clientStartedDoingRequests.await();
} catch (InterruptedException ignored) {
}
hazelcastInstance.shutdown();
hazelcastFactory.newHazelcastInstance();
}
}).start();
ClientConfig clientConfig = new ClientConfig();
//Retry all requests
clientConfig.getNetworkConfig().setRedoOperation(true);
//retry to connect to cluster forever(never shutdown the client)
clientConfig.getNetworkConfig().setConnectionAttemptLimit(Integer.MAX_VALUE);
//Retry all requests forever(until client is shutdown)
clientConfig.setProperty(ClientProperty.INVOCATION_TIMEOUT_SECONDS.getName(), String.valueOf(Integer.MAX_VALUE));
HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig);
IMap<Object, Object> map = client.getMap(randomMapName());
int mapSize = 1000;
for (int i = 0; i < mapSize; i++) {
if (i == mapSize / 4) {
clientStartedDoingRequests.countDown();
}
try {
map.put(i, i);
} catch (Exception e) {
assertTrue("Requests should not throw exception with this configuration. Last put key: " + i, false);
}
}
}
@Test
public void testClusterShutdown_thenCheckOperationsNotHanging() throws Exception {
HazelcastInstance hazelcastInstance = hazelcastFactory.newHazelcastInstance();
ClientConfig clientConfig = new ClientConfig();
//Retry all requests
clientConfig.getNetworkConfig().setRedoOperation(true);
//Retry all requests forever(until client is shutdown)
clientConfig.setProperty(ClientProperty.INVOCATION_TIMEOUT_SECONDS.getName(), String.valueOf(Integer.MAX_VALUE));
HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig);
final IMap<Object, Object> map = client.getMap(randomMapName());
final int mapSize = 1000;
final CountDownLatch clientStartedDoingRequests = new CountDownLatch(1);
int threadCount = 100;
final CountDownLatch testFinishedLatch = new CountDownLatch(threadCount);
for (int i = 0; i < threadCount; i++) {
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
try {
for (int i = 0; i < mapSize; i++) {
if (i == mapSize / 4) {
clientStartedDoingRequests.countDown();
}
map.put(i, i);
}
} catch (Throwable ignored) {
} finally {
testFinishedLatch.countDown();
}
}
});
thread.start();
}
assertTrue(clientStartedDoingRequests.await(30, TimeUnit.SECONDS));
hazelcastInstance.shutdown();
assertOpenEventually("Put operations should not hang.", testFinishedLatch);
}
@Test(timeout = 120000)
public void testMemberAddedWithListeners_thenCheckOperationsNotHanging() {
hazelcastFactory.newHazelcastInstance();
ClientConfig clientConfig = new ClientConfig();
clientConfig.setProperty(ClientExecutionServiceImpl.INTERNAL_EXECUTOR_POOL_SIZE.getName(), "1");
HazelcastInstance client = hazelcastFactory.newHazelcastClient(clientConfig);
IMap map = client.getMap("map");
map.addEntryListener(mock(MapListener.class), true);
HazelcastInstance h2 = hazelcastFactory.newHazelcastInstance();
String key = generateKeyOwnedBy(h2);
map.get(key);
}
}
| |
/*
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.cas.services;
import org.jasig.cas.authentication.principal.Principal;
import org.jasig.cas.authentication.principal.Service;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.*;
/**
*
* @author battags
* @since 3.0.0
*
*/
public class DefaultServicesManagerImplTests {
private DefaultServicesManagerImpl defaultServicesManagerImpl;
@Before
public void setUp() throws Exception {
final InMemoryServiceRegistryDaoImpl dao = new InMemoryServiceRegistryDaoImpl();
final List<RegisteredService> list = new ArrayList<>();
final RegisteredServiceImpl r = new RegisteredServiceImpl();
r.setId(2500);
r.setServiceId("serviceId");
r.setName("serviceName");
r.setEvaluationOrder(1000);
list.add(r);
dao.setRegisteredServices(list);
this.defaultServicesManagerImpl = new DefaultServicesManagerImpl(dao);
}
@Test
public void verifySaveAndGet() {
final RegisteredServiceImpl r = new RegisteredServiceImpl();
r.setId(1000);
r.setName("test");
r.setServiceId("test");
this.defaultServicesManagerImpl.save(r);
assertNotNull(this.defaultServicesManagerImpl.findServiceBy(1000));
}
@Test
public void verifyMultiServicesBySameName() {
RegisteredServiceImpl r = new RegisteredServiceImpl();
r.setId(666);
r.setName("testServiceName");
r.setServiceId("testServiceA");
this.defaultServicesManagerImpl.save(r);
r = new RegisteredServiceImpl();
r.setId(999);
r.setName("testServiceName");
r.setServiceId("testServiceB");
this.defaultServicesManagerImpl.save(r);
/** Added 2 above, plus another that is added during @Setup **/
assertEquals(3, this.defaultServicesManagerImpl.getAllServices().size());
}
@Test
public void verifySaveWithReturnedPersistedInstance() {
final RegisteredServiceImpl r = new RegisteredServiceImpl();
r.setId(1000L);
r.setName("test");
r.setServiceId("test");
final RegisteredService persistedRs = this.defaultServicesManagerImpl.save(r);
assertNotNull(persistedRs);
assertEquals(1000L, persistedRs.getId());
}
@Test
public void verifyDeleteAndGet() {
final RegisteredServiceImpl r = new RegisteredServiceImpl();
r.setId(1000);
r.setName("test");
r.setServiceId("test");
this.defaultServicesManagerImpl.save(r);
assertEquals(r, this.defaultServicesManagerImpl.findServiceBy(r.getId()));
this.defaultServicesManagerImpl.delete(r.getId());
assertNull(this.defaultServicesManagerImpl.findServiceBy(r.getId()));
}
@Test
public void verifyDeleteNotExistentService() {
assertNull(this.defaultServicesManagerImpl.delete(1500));
}
@Test
public void verifyMatchesExistingService() {
final RegisteredServiceImpl r = new RegisteredServiceImpl();
r.setId(1000);
r.setName("test");
r.setServiceId("test");
final Service service = new SimpleService("test");
final Service service2 = new SimpleService("fdfa");
this.defaultServicesManagerImpl.save(r);
assertTrue(this.defaultServicesManagerImpl.matchesExistingService(service));
assertEquals(r, this.defaultServicesManagerImpl.findServiceBy(service));
assertNull(this.defaultServicesManagerImpl.findServiceBy(service2));
}
@Test
public void verifyAllService() {
final RegisteredServiceImpl r = new RegisteredServiceImpl();
r.setId(1000);
r.setName("test");
r.setServiceId("test");
r.setEvaluationOrder(2);
this.defaultServicesManagerImpl.save(r);
assertEquals(2, this.defaultServicesManagerImpl.getAllServices().size());
assertTrue(this.defaultServicesManagerImpl.getAllServices().contains(r));
}
@Test
public void verifyRegexService() {
final RegexRegisteredService r = new RegexRegisteredService();
r.setId(10000);
r.setName("regex test");
r.setServiceId("^http://www.test.edu.+");
r.setEvaluationOrder(10000);
this.defaultServicesManagerImpl.save(r);
final SimpleService service = new SimpleService("HTTP://www.TEST.edu/param=hello");
assertEquals(r, this.defaultServicesManagerImpl.findServiceBy(service));
}
@Test
public void verifyEmptyServicesRegistry() {
final SimpleService s = new SimpleService("http://www.google.com");
for (final RegisteredService svc : defaultServicesManagerImpl.getAllServices()) {
defaultServicesManagerImpl.delete(svc.getId());
}
assertTrue(this.defaultServicesManagerImpl.getAllServices().size() == 0);
assertNull(this.defaultServicesManagerImpl.findServiceBy(s));
assertNull(this.defaultServicesManagerImpl.findServiceBy(1000));
}
@Test
public void verifyEvaluationOrderOfServices() {
final RegisteredServiceImpl r = new RegisteredServiceImpl();
r.setId(100);
r.setName("test");
r.setServiceId("test");
r.setEvaluationOrder(200);
final RegisteredServiceImpl r2 = new RegisteredServiceImpl();
r2.setId(101);
r2.setName("test");
r2.setServiceId("test");
r2.setEvaluationOrder(80);
final RegisteredServiceImpl r3 = new RegisteredServiceImpl();
r3.setId(102);
r3.setName("Sample test service");
r3.setServiceId("test");
r3.setEvaluationOrder(80);
this.defaultServicesManagerImpl.save(r);
this.defaultServicesManagerImpl.save(r3);
this.defaultServicesManagerImpl.save(r2);
final List<RegisteredService> allServices = new ArrayList<>(
this.defaultServicesManagerImpl.getAllServices());
//We expect the 3 newly added services, plus the one added in setUp()
assertEquals(4, allServices.size());
assertEquals(allServices.get(0).getId(), r3.getId());
assertEquals(allServices.get(1).getId(), r2.getId());
assertEquals(allServices.get(2).getId(), r.getId());
}
private static class SimpleService implements Service {
/**
* Comment for <code>serialVersionUID</code>.
*/
private static final long serialVersionUID = 6572142033945243669L;
private final String id;
protected SimpleService(final String id) {
this.id = id;
}
public Map<String, Object> getAttributes() {
return null;
}
public String getId() {
return this.id;
}
public void setPrincipal(final Principal principal) {
// nothing to do
}
public boolean matches(final Service service) {
return true;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.modules.session.installer;
import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.apache.geode.internal.ExitCode;
import org.apache.geode.modules.session.installer.args.Argument;
import org.apache.geode.modules.session.installer.args.ArgumentProcessor;
import org.apache.geode.modules.session.installer.args.ArgumentValues;
import org.apache.geode.modules.session.installer.args.UsageException;
public class Installer {
private static final String GEMFIRE_FILTER_CLASS =
"org.apache.geode.modules.session.filter.SessionCachingFilter";
private ArgumentValues argValues;
private static final Argument ARG_HELP =
new Argument("-h", false).setDescription("Displays this help message.");
private static final Argument ARG_GEMFIRE_PARAMETERS = new Argument("-p", false, "param=value")
.setDescription("Specific parameter for inclusion into the "
+ "session filter definition as a regular " + "init-param. Can be given multiple times.");
private static final Argument ARG_CACHE_TYPE = new Argument("-t", false, "cache-type")
.setDescription("Type of cache. Must be one of 'peer-to-peer' or "
+ "'client-server'. Default is peer-to-peer.")
.setDefaults("peer-to-peer");
private static final Argument ARG_WEB_XML_FILE =
new Argument("-w", true, "web.xml file").setDescription("The web.xml file to be modified.");
/**
* Class main method
*
* @param args Arguments passed in via the command line
* @throws Exception in the event of any errors
*/
public static void main(final String[] args) throws Exception {
new Installer(args).process();
}
private static void log(String message) {
System.err.println(message);
}
public Installer(String[] args) {
final ArgumentProcessor processor = new ArgumentProcessor("Installer");
argValues = null;
try {
// These are ordered so as to keep the options alphabetical
processor.addArgument(ARG_HELP);
processor.addArgument(ARG_GEMFIRE_PARAMETERS);
processor.addArgument(ARG_CACHE_TYPE);
processor.addArgument(ARG_WEB_XML_FILE);
processor.setUnknownArgumentHandler((form, params) -> {
log("Unknown argument being ignored: " + form + " (" + params.length + " params)");
log("Use '-h' argument to display usage");
});
argValues = processor.process(args);
if (argValues.isDefined(ARG_HELP)) {
final UsageException usageException = new UsageException("Usage requested by user");
usageException.setUsage(processor.getUsage());
throw (usageException);
}
} catch (UsageException ux) {
final StringBuilder error = new StringBuilder();
error.append("\nERROR: ");
error.append(ux.getMessage());
error.append("\n");
if (ux.getUsage() != null) {
error.append(ux.getUsage());
}
log(error.toString());
ExitCode.INSTALL_FAILURE.doSystemExit();
}
}
/**
* The main entry point for processing
*
* @throws Exception if any errors occur.
*/
private void process() throws Exception {
String argInputFile = argValues.getFirstResult(ARG_WEB_XML_FILE);
ByteArrayOutputStream output = new ByteArrayOutputStream();
InputStream input = new FileInputStream(argInputFile);
processWebXml(input, output);
input.close();
System.out.println(output.toString());
}
public void processWebXml(final InputStream webXml, final OutputStream out) throws Exception {
Document doc = createWebXmlDoc(webXml);
mangleWebXml(doc);
streamXML(doc, out);
}
private Document createWebXmlDoc(final InputStream webXml) throws Exception {
Document doc;
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
final DocumentBuilder builder = factory.newDocumentBuilder();
doc = builder.parse(webXml);
return doc;
}
private Document mangleWebXml(final Document doc) {
final Element docElement = doc.getDocumentElement();
final NodeList nodelist = docElement.getChildNodes();
Node firstFilter = null;
Node displayElement = null;
Node afterDisplayElement = null;
for (int i = 0; i < nodelist.getLength(); i++) {
final Node node = nodelist.item(i);
final String name = node.getNodeName();
if ("display-name".equals(name)) {
displayElement = node;
} else {
if ("filter".equals(name)) {
if (firstFilter == null) {
firstFilter = node;
}
}
if (displayElement != null && afterDisplayElement == null) {
afterDisplayElement = node;
}
}
}
Node initParam;
final Element filter = doc.createElement("filter");
append(doc, filter, "filter-name", "gemfire-session-filter");
append(doc, filter, "filter-class", GEMFIRE_FILTER_CLASS);
// Set the type of cache
initParam = append(doc, filter, "init-param", null);
append(doc, initParam, "param-name", "cache-type");
append(doc, initParam, "param-value", argValues.getFirstResult(ARG_CACHE_TYPE));
if (argValues.isDefined(ARG_GEMFIRE_PARAMETERS)) {
for (String[] val : argValues.getAllResults(ARG_GEMFIRE_PARAMETERS)) {
String gfParam = val[0];
int idx = gfParam.indexOf("=");
initParam = append(doc, filter, "init-param", null);
append(doc, initParam, "param-name", gfParam.substring(0, idx));
append(doc, initParam, "param-value", gfParam.substring(idx + 1));
}
}
Node first = firstFilter;
if (first == null) {
if (afterDisplayElement != null) {
first = afterDisplayElement;
}
}
if (first == null) {
first = docElement.getFirstChild();
}
docElement.insertBefore(filter, first);
final Element filterMapping = doc.createElement("filter-mapping");
append(doc, filterMapping, "filter-name", "gemfire-session-filter");
append(doc, filterMapping, "url-pattern", "/*");
docElement.insertBefore(filterMapping, after(docElement, "filter"));
return doc;
}
private Node after(final Node parent, final String nodeName) {
final NodeList nodelist = parent.getChildNodes();
int index = -1;
for (int i = 0; i < nodelist.getLength(); i++) {
final Node node = nodelist.item(i);
final String name = node.getNodeName();
if (nodeName.equals(name)) {
index = i;
}
}
if (index == -1) {
return null;
}
if (nodelist.getLength() > (index + 1)) {
return nodelist.item(index + 1);
}
return null;
}
private Node append(final Document doc, final Node parent, final String element,
final String value) {
final Element child = doc.createElement(element);
if (value != null) {
child.setTextContent(value);
}
parent.appendChild(child);
return child;
}
private void streamXML(final Document doc, final OutputStream out) {
try {// Use a Transformer for output
final TransformerFactory tFactory = TransformerFactory.newInstance();
final Transformer transformer = tFactory.newTransformer();
if (doc.getDoctype() != null) {
final String systemId = doc.getDoctype().getSystemId();
final String publicId = doc.getDoctype().getPublicId();
transformer.setOutputProperty(OutputKeys.DOCTYPE_PUBLIC, publicId);
transformer.setOutputProperty(OutputKeys.DOCTYPE_SYSTEM, systemId);
}
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4");
final DOMSource source = new DOMSource(doc);
final StreamResult result = new StreamResult(out);
transformer.transform(source, result);
} catch (final Exception e) {
e.printStackTrace();
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql;
import com.facebook.presto.sql.planner.DependencyExtractor;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.IsNullPredicate;
import com.facebook.presto.sql.tree.LogicalBinaryExpression;
import com.facebook.presto.sql.tree.QualifiedNameReference;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import static com.facebook.presto.sql.planner.DeterminismEvaluator.deterministic;
import static com.facebook.presto.sql.tree.BooleanLiteral.FALSE_LITERAL;
import static com.facebook.presto.sql.tree.BooleanLiteral.TRUE_LITERAL;
import static com.google.common.base.Predicates.not;
import static com.google.common.collect.Iterables.filter;
public final class ExpressionUtils
{
private ExpressionUtils() {}
public static List<Expression> extractConjuncts(Expression expression)
{
if (expression instanceof LogicalBinaryExpression && ((LogicalBinaryExpression) expression).getType() == LogicalBinaryExpression.Type.AND) {
LogicalBinaryExpression and = (LogicalBinaryExpression) expression;
return ImmutableList.<Expression>builder()
.addAll(extractConjuncts(and.getLeft()))
.addAll(extractConjuncts(and.getRight()))
.build();
}
return ImmutableList.of(expression);
}
public static List<Expression> extractDisjuncts(Expression expression)
{
if (expression instanceof LogicalBinaryExpression && ((LogicalBinaryExpression) expression).getType() == LogicalBinaryExpression.Type.OR) {
LogicalBinaryExpression or = (LogicalBinaryExpression) expression;
return ImmutableList.<Expression>builder()
.addAll(extractDisjuncts(or.getLeft()))
.addAll(extractDisjuncts(or.getRight()))
.build();
}
return ImmutableList.of(expression);
}
public static Expression and(Expression... expressions)
{
return and(Arrays.asList(expressions));
}
public static Expression and(Iterable<Expression> expressions)
{
return binaryExpression(LogicalBinaryExpression.Type.AND, expressions);
}
public static Expression or(Expression... expressions)
{
return or(Arrays.asList(expressions));
}
public static Expression or(Iterable<Expression> expressions)
{
return binaryExpression(LogicalBinaryExpression.Type.OR, expressions);
}
public static Expression binaryExpression(LogicalBinaryExpression.Type type, Iterable<Expression> expressions)
{
Preconditions.checkNotNull(type, "type is null");
Preconditions.checkNotNull(expressions, "expressions is null");
Preconditions.checkArgument(!Iterables.isEmpty(expressions), "expressions is empty");
Iterator<Expression> iterator = expressions.iterator();
Expression result = iterator.next();
while (iterator.hasNext()) {
result = new LogicalBinaryExpression(type, result, iterator.next());
}
return result;
}
public static Expression combineConjuncts(Expression... expressions)
{
return combineConjuncts(Arrays.asList(expressions));
}
public static Expression combineConjuncts(Iterable<Expression> expressions)
{
return combineConjunctsWithDefault(expressions, TRUE_LITERAL);
}
public static Expression combineConjunctsWithDefault(Iterable<Expression> expressions, Expression emptyDefault)
{
Preconditions.checkNotNull(expressions, "expressions is null");
// Flatten all the expressions into their component conjuncts
expressions = Iterables.concat(Iterables.transform(expressions, new Function<Expression, Iterable<Expression>>()
{
@Override
public Iterable<Expression> apply(Expression expression)
{
return extractConjuncts(expression);
}
}));
// Strip out all true literal conjuncts
expressions = Iterables.filter(expressions, not(Predicates.<Expression>equalTo(TRUE_LITERAL)));
expressions = removeDuplicates(expressions);
return Iterables.isEmpty(expressions) ? emptyDefault : and(expressions);
}
public static Expression combineDisjuncts(Expression... expressions)
{
return combineDisjuncts(Arrays.asList(expressions));
}
public static Expression combineDisjuncts(Iterable<Expression> expressions)
{
return combineDisjunctsWithDefault(expressions, FALSE_LITERAL);
}
public static Expression combineDisjunctsWithDefault(Iterable<Expression> expressions, Expression emptyDefault)
{
Preconditions.checkNotNull(expressions, "expressions is null");
// Flatten all the expressions into their component disjuncts
expressions = Iterables.concat(Iterables.transform(expressions, new Function<Expression, Iterable<Expression>>()
{
@Override
public Iterable<Expression> apply(Expression expression)
{
return extractDisjuncts(expression);
}
}));
// Strip out all false literal disjuncts
expressions = Iterables.filter(expressions, not(Predicates.<Expression>equalTo(FALSE_LITERAL)));
expressions = removeDuplicates(expressions);
return Iterables.isEmpty(expressions) ? emptyDefault : or(expressions);
}
public static Function<Symbol, QualifiedNameReference> symbolToQualifiedNameReference()
{
return new Function<Symbol, QualifiedNameReference>()
{
@Override
public QualifiedNameReference apply(Symbol symbol)
{
return new QualifiedNameReference(symbol.toQualifiedName());
}
};
}
public static Expression stripNonDeterministicConjuncts(Expression expression)
{
return combineConjuncts(filter(extractConjuncts(expression), deterministic()));
}
public static Function<Expression, Expression> expressionOrNullSymbols(final Predicate<Symbol> nullSymbolScope)
{
return new Function<Expression, Expression>()
{
@Override
public Expression apply(Expression expression)
{
Iterable<Symbol> symbols = filter(DependencyExtractor.extractUnique(expression), nullSymbolScope);
if (Iterables.isEmpty(symbols)) {
return expression;
}
ImmutableList.Builder<Expression> nullConjuncts = ImmutableList.builder();
for (Symbol symbol : symbols) {
nullConjuncts.add(new IsNullPredicate(new QualifiedNameReference(symbol.toQualifiedName())));
}
return or(expression, and(nullConjuncts.build()));
}
};
}
private static Iterable<Expression> removeDuplicates(Iterable<Expression> expressions)
{
// Capture all non-deterministic predicates
Iterable<Expression> nonDeterministicDisjuncts = Iterables.filter(expressions, not(deterministic()));
// Capture and de-dupe all deterministic predicates
Iterable<Expression> deterministicDisjuncts = ImmutableSet.copyOf(Iterables.filter(expressions, deterministic()));
return Iterables.concat(nonDeterministicDisjuncts, deterministicDisjuncts);
}
}
| |
/*
This file is licensed to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.xmlunit.matchers;
import static org.hamcrest.CoreMatchers.both;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.isEmptyString;
import static org.junit.Assert.assertThat;
import static org.xmlunit.TestResources.TEST_RESOURCE_DIR;
import static org.xmlunit.matchers.CompareMatcher.isIdenticalTo;
import static org.xmlunit.matchers.CompareMatcher.isSimilarTo;
import org.xmlunit.XMLUnitException;
import org.xmlunit.builder.Input;
import org.xmlunit.builder.Input.Builder;
import org.xmlunit.diff.Comparison;
import org.xmlunit.diff.Comparison.Detail;
import org.xmlunit.diff.ComparisonFormatter;
import org.xmlunit.diff.ComparisonListener;
import org.xmlunit.diff.ComparisonResult;
import org.xmlunit.diff.ComparisonType;
import org.xmlunit.diff.DefaultNodeMatcher;
import org.xmlunit.diff.DifferenceEvaluator;
import org.xmlunit.diff.ElementSelectors;
import org.xmlunit.util.Predicate;
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
import org.hamcrest.Matchers;
import org.hamcrest.StringDescription;
import org.junit.Assert;
import org.junit.ComparisonFailure;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.Mockito;
import org.w3c.dom.Attr;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.stream.StreamResult;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
public class CompareMatcherTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
/** set this to true for manual review of the Error Messages, how the really looks in your IDE. */
private final boolean letExceptionTestFail = false;
@Test
public void testIsIdenticalTo_withAssertionErrorForAttributes_throwsReadableMessage() {
// Expected Exception
expect(AssertionError.class);
expectMessage("Expected attribute value 'xy' but was 'xyz'");
expectMessage("at /Element[1]/@attr2");
expectMessage("attr2=\"xyz\"");
// run test:
assertThat("<Element attr2=\"xyz\" attr1=\"12\"/>", isIdenticalTo("<Element attr1=\"12\" attr2=\"xy\"/>"));
}
@Test
public void testIsIdenticalTo_withAssertionErrorForElementOrder_throwsReadableMessage() {
// Expected Exception
expect(AssertionError.class);
expectMessage("Expected child nodelist sequence '0' but was '1'");
expectMessage("comparing <b...> at /a[1]/b[1] to <b...> at /a[1]/b[1]");
// run test:
assertThat("<a><c/><b/></a>", isIdenticalTo("<a><b/><c/></a>")
.withNodeMatcher(new DefaultNodeMatcher(ElementSelectors.byNameAndText)));
}
@Test
public void testIsIdenticalTo_withAssertionErrorForWhitespaces_throwsReadableMessage() {
// Expected Exception
expect(AssertionError.class);
expectMessage("Expected child nodelist length '1' but was '3'");
expectMessage("<a>" + getLineSeparator() + " <b/>" + getLineSeparator() + "</a>");
expectMessage("<a><b/></a>");
// run test:
assertThat("<a>\n <b/>\n</a>", isIdenticalTo("<a><b/></a>"));
}
@Test
public void testIsIdenticalTo_withComparisonFailureForWhitespaces_throwsReadableMessage() {
// Expected Exception
expect(ComparisonFailure.class);
expectMessage("Expected child nodelist length '1' but was '3'");
expectMessage("expected:<<a>[<b/>]</a>> but was:<<a>[" + getLineSeparator() + " <b/>" + getLineSeparator() + "]</a>>");
// run test:
assertThat("<a>\n <b/>\n</a>", isIdenticalTo("<a><b/></a>").throwComparisonFailure());
}
@Test
public void testIsIdenticalTo_withIgnoreWhitespaces_shouldSucceed() {
// run test:
assertThat("<a>\n <b/>\n</a>", isIdenticalTo("<a><b/></a>").ignoreWhitespace());
}
@Test
public void testIsIdenticalTo_withIgnoreElementContentWhitespaces_shouldSucceed() {
// run test:
assertThat("<a>\n <b/>\n</a>", isIdenticalTo("<a><b/></a>").ignoreElementContentWhitespace());
}
@Test
public void testIsIdenticalTo_withIgnoreComments_shouldSucceed() {
// run test:
assertThat("<a><!-- test --></a>", isIdenticalTo("<a></a>").ignoreComments());
}
@Test
public void testIsIdenticalTo_withIgnoreComments1_0_shouldSucceed() {
// run test:
assertThat("<a><!-- test --></a>", isIdenticalTo("<a></a>").ignoreCommentsUsingXSLTVersion("1.0"));
}
@Test
public void testIsIdenticalTo_withIgnoreComments2_0_shouldSucceed() {
// run test:
assertThat("<a><!-- test --></a>", isIdenticalTo("<a></a>").ignoreCommentsUsingXSLTVersion("2.0"));
}
@Test
public void testIsIdenticalTo_withNormalizeWhitespace_shouldSucceed() {
// run test:
assertThat("<a>\n <b>\n Test\n Node\n </b>\n</a>", isIdenticalTo("<a><b>Test Node</b></a>")
.normalizeWhitespace());
}
@Test
public void testIsIdenticalTo_withNormalizeWhitespace_shouldFail() {
expect(AssertionError.class);
expectMessage("Expected text value 'TestNode' but was 'Test Node'");
// run test:
assertThat("<a>\n <b>\n Test\n Node\n </b>\n</a>", isIdenticalTo("<a><b>TestNode</b></a>")
.normalizeWhitespace());
}
@Test
public void testIsSimilarTo_withSwappedElements_shouldSucceed() {
// run test:
assertThat("<a><c/><b/></a>", isSimilarTo("<a><b/><c/></a>")
.withNodeMatcher(new DefaultNodeMatcher(ElementSelectors.byNameAndText)));
}
@Test
public void testIsSimilarTo_withFileInput() {
expect(AssertionError.class);
expectMessage("In Source");
expectMessage("test2.xml");
// run test:
assertThat(new File(TEST_RESOURCE_DIR, "test1.xml"),
isSimilarTo(new File(TEST_RESOURCE_DIR, "test2.xml")));
}
@Test
public void testIsSimilarTo_withDifferenceEvaluator_shouldSucceed() {
// prepare testData
final String control = "<a><b attr=\"abc\"></b></a>";
final String test = "<a><b attr=\"xyz\"></b></a>";
// run test
assertThat(test, isSimilarTo(control).withDifferenceEvaluator(new IgnoreAttributeDifferenceEvaluator("attr")));
}
@Test
public void testIsSimilarTo_withComparisonFormatter_shouldFailWithCustomMessage() {
// prepare testData
expect(AssertionError.class);
expectMessage("DESCRIPTION");
expectMessage("DETAIL-abc");
expectMessage("DETAIL-xyz");
final String control = "<a><b attr=\"abc\"></b></a>";
final String test = "<a><b attr=\"xyz\"></b></a>";
// run test
assertThat(test, isSimilarTo(control).withComparisonFormatter(new DummyComparisonFormatter()));
}
@Test
public void testIsSimilarTo_withComparisonFormatterAndThrowComparisonFailure_shouldFailWithCustomMessage() {
// prepare testData
expect(ComparisonFailure.class);
expectMessage("DESCRIPTION");
expectMessage("DETAIL-[abc]");
expectMessage("DETAIL-[xyz]");
final String control = "<a><b attr=\"abc\"></b></a>";
final String test = "<a><b attr=\"xyz\"></b></a>";
// run test
assertThat(test, isSimilarTo(control).withComparisonFormatter(new DummyComparisonFormatter()).throwComparisonFailure());
}
@Test
public void testIsSimilarTo_withComparisonListener_shouldCollectChanges() {
CounterComparisonListener comparisonListener = new CounterComparisonListener();
String controlXml = "<a><b>Test Value</b><c>ABC</c></a>";
String testXml = "<a><b><![CDATA[Test Value]]></b><c>XYZ</c></a>";
// run test:
try {
assertThat(testXml, isSimilarTo(controlXml).withComparisonListeners(comparisonListener));
} catch (AssertionError e) {
assertThat(e.getMessage(), containsString("Expected text value 'ABC' but was 'XYZ'"));
}
// validate result
assertThat(comparisonListener.differents, is(1));
assertThat(comparisonListener.similars, is(1));
assertThat(comparisonListener.equals, is(Matchers.greaterThan(10)));
}
@Test
public void testIsSimilarTo_withDifferenceListener_shouldCollectChanges() {
CounterComparisonListener comparisonListener = new CounterComparisonListener();
String controlXml = "<a><b>Test Value</b><c>ABC</c></a>";
String testXml = "<a><b><![CDATA[Test Value]]></b><c>XYZ</c></a>";
// run test:
try {
assertThat(testXml, isSimilarTo(controlXml).withDifferenceListeners(comparisonListener));
Assert.fail("Should throw AssertionError");
} catch (AssertionError e) {
assertThat(e.getMessage(), containsString("Expected text value 'ABC' but was 'XYZ'"));
}
// validate result
assertThat(comparisonListener.differents, is(1));
assertThat(comparisonListener.similars, is(1));
assertThat(comparisonListener.equals, is(0));
}
@Test
public void testCompareMatcherWrapper_shouldWriteFailedTestInput() {
final String control = "<a><b attr=\"abc\"></b></a>";
final String test = "<a><b attr=\"xyz\"></b></a>";
// run test
final String fileName = "testCompareMatcherWrapper.xml";
try {
assertThat(test, TestCompareMatcherWrapper.isSimilarTo(control).withTestFileName(fileName));
Assert.fail("Should throw AssertionError");
} catch (AssertionError e) {
assertThat(e.getMessage(), containsString("Expected attribute value 'abc' but was 'xyz'"));
}
// validate that the written File contains the right data:
assertThat(new File(getTestResultFolder(), fileName), isSimilarTo(test));
}
@Test
public void testDiff_withAttributeDifferences() {
final String control = "<a><b attr1=\"abc\" attr2=\"def\"></b></a>";
final String test = "<a><b attr1=\"xyz\" attr2=\"def\"></b></a>";
try {
assertThat(test, isSimilarTo(control));
Assert.fail("Should throw AssertionError");
} catch (AssertionError e) {
assertThat(e.getMessage(), containsString("Expected attribute value 'abc' but was 'xyz'"));
}
assertThat(test, isSimilarTo(control)
.withAttributeFilter(new Predicate<Attr>() {
@Override
public boolean test(Attr a) {
return !"attr1".equals(a.getName());
}
}));
}
@Test
public void testDiff_withExtraNodes() {
String control = "<a><b></b><c/></a>";
String test = "<a><b></b><c/><d/></a>";
try {
assertThat(test, isSimilarTo(control));
} catch (AssertionError e) {
assertThat(e.getMessage(), containsString("Expected child nodelist length '2' but was '3'"));
}
assertThat(test,
isSimilarTo(control)
.withNodeFilter(new Predicate<Node>() {
@Override
public boolean test(Node n) {
return !"d".equals(n.getNodeName());
}
}));
}
/**
* Really only tests there is no NPE.
* @see "https://github.com/xmlunit/xmlunit/issues/81"
*/
@Test(expected = AssertionError.class)
public void canBeCombinedWithFailingMatcher() {
assertThat("not empty", both(isEmptyString()).and(isIdenticalTo("")));
}
@Test
public void canBeCombinedWithPassingMatcher() {
assertThat("<a><c/><b/></a>", both(not(isEmptyString()))
.and(isSimilarTo("<a><b/><c/></a>")
.withNodeMatcher(new DefaultNodeMatcher(ElementSelectors.byNameAndText))));
}
@Test
public void usesDocumentBuilderFactory() throws Exception {
DocumentBuilderFactory dFac = Mockito.mock(DocumentBuilderFactory.class);
DocumentBuilder b = Mockito.mock(DocumentBuilder.class);
Mockito.when(dFac.newDocumentBuilder()).thenReturn(b);
Mockito.doThrow(new IOException())
.when(b).parse(Mockito.any(InputSource.class));
String control = "<a><b></b><c/></a>";
String test = "<a><b></b><c/><d/></a>";
try {
assertThat("<a><b></b><c/></a>",
not(isSimilarTo("<a><b></b><c/><d/></a>")
.withDocumentBuilderFactory(dFac)));
Assert.fail("Expected exception");
} catch (XMLUnitException ex) {
Mockito.verify(b).parse(Mockito.any(InputSource.class));
}
}
@Test(expected = UnsupportedOperationException.class)
public void cantSetComparisonController() {
isSimilarTo("<foo/>").withComparisonController(null);
}
/**
* @see "https://github.com/xmlunit/xmlunit/issues/107"
*/
@Test
public void describeToWorksWhenThereAreNoDifferences() throws Exception {
CompareMatcher m = isIdenticalTo("<foo/>");
Assert.assertTrue(m.matches("<foo/>"));
StringDescription sd = new StringDescription();
m.describeTo(sd);
assertThat(sd.toString(), is(" is equal to the control document"));
}
@Test
public void createsAUsefulMessageWhenFailingCombinedWithNot() throws Exception {
expect(AssertionError.class);
expectMessage("not is similar to the control document");
assertThat("<a><b></b><c/></a>", not(isSimilarTo("<a><b></b><c/></a>")));
}
public void expect(Class<? extends Throwable> type) {
if (letExceptionTestFail) return;
thrown.expect(type);
}
public void expectMessage(String substring) {
if (letExceptionTestFail) return;
thrown.expectMessage(substring);
}
private String getLineSeparator() {
return System.getProperty("line.separator");
}
private final class DummyComparisonFormatter implements ComparisonFormatter {
@Override
public String getDetails(Detail details, ComparisonType type, boolean formatXml) {
return "DETAIL-" + details.getValue();
}
@Override
public String getDescription(Comparison difference) {
return "DESCRIPTION";
}
}
private final class IgnoreAttributeDifferenceEvaluator implements DifferenceEvaluator {
private final String attributeName;
public IgnoreAttributeDifferenceEvaluator(String attributeName) {
this.attributeName = attributeName;
}
@Override
public ComparisonResult evaluate(Comparison comparison, ComparisonResult outcome) {
final Node controlNode = comparison.getControlDetails().getTarget();
if (controlNode instanceof Attr) {
Attr attr = (Attr) controlNode;
if (attr.getName().equals(attributeName)) {
return ComparisonResult.EQUAL;
}
}
return outcome;
}
}
private final class CounterComparisonListener implements ComparisonListener {
private int equals;
private int similars;
private int differents;
@Override
public void comparisonPerformed(Comparison comparison, ComparisonResult outcome) {
switch (outcome) {
case EQUAL:
equals++;
break;
case SIMILAR:
similars++;
break;
case DIFFERENT:
differents++;
break;
default:
break;
}
}
}
/**
* Example Wrapper for {@link CompareMatcher}.
* <p>
* This example will write the Test-Input into the Files System.<br>
* This could be useful for manual reviews or as template for a control-File.
*
*/
private static class TestCompareMatcherWrapper extends BaseMatcher<Object> {
private final CompareMatcher compareMatcher;
private String fileName;
protected TestCompareMatcherWrapper(CompareMatcher compareMatcher) {
this.compareMatcher = compareMatcher;
}
public TestCompareMatcherWrapper withTestFileName(String fileName) {
this.fileName = fileName;
return this;
}
public static TestCompareMatcherWrapper isSimilarTo(final Object control) {
return new TestCompareMatcherWrapper(CompareMatcher.isSimilarTo(control));
}
@Override
public boolean matches(Object testItem) {
if (fileName == null) {
return compareMatcher.matches(testItem);
}
// do something with your Test-Source
final Builder builder = Input.from(testItem);
// e.g.: write the testItem into the FilesSystem. So it can be used as template for a new control-File.
final File testFile = writeIntoTestResultFolder(builder.build());
return compareMatcher.matches(testFile);
}
private File writeIntoTestResultFolder(final Source source) throws TransformerFactoryConfigurationError {
File file = new File(getTestResultFolder(), this.fileName);
try {
if (!file.exists()) {
file.createNewFile();
}
try (FileOutputStream fop = new FileOutputStream(file)) {
marshal(source, fop);
fop.flush();
}
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
return file;
}
private void marshal(final Source source, FileOutputStream fop)
throws TransformerFactoryConfigurationError, TransformerConfigurationException, TransformerException {
StreamResult r = new StreamResult(fop);
TransformerFactory fac = TransformerFactory.newInstance();
Transformer t = fac.newTransformer();
t.transform(source, r);
}
@Override
public void describeTo(Description description) {
compareMatcher.describeTo(description);
}
@Override
public void describeMismatch(Object item, Description description) {
compareMatcher.describeMismatch(item, description);
}
}
private static File getTestResultFolder() {
final File folder = new File(//
String.format("./target/testResults/%s", CompareMatcherTest.class.getSimpleName()));
if (!folder.exists()) {
folder.mkdirs();
}
return folder;
}
}
| |
package com.seandbeach.stockticker;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.widget.ShareActionProvider;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import java.text.DecimalFormat;
/**
* A placeholder fragment containing a simple view.
*/
public class DetailActivityFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor> {
public static final String YAHOO_FINANCE_URL_BASE = "http://finance.yahoo.com/q?s=";
public static final String GOOGLE_PLAY_BITLINK = "http://bit.ly/1GtB1Ns";
private static final String LOG_TAG = DetailActivityFragment.class.getSimpleName();
private static final int DETAIL_LOADER = 0;
static final String DETAIL_URI = "URI";
private String mShareString;
private ShareActionProvider mShareActionProvider;
private Uri mUri;
private TextView mSymbolView;
private TextView mNameView;
private TextView mPriceView;
private TextView mChangeView;
private TextView mChangePercentView;
private TextView mOpenView;
private TextView mPreviousCloseView;
private TextView mDayLowView;
private TextView mDayHighView;
private TextView mYearLowView;
private TextView mYearLowChangeView;
private TextView mYearLowChangePercentView;
private TextView mYearHighView;
private TextView mYearHighChangeView;
private TextView mYearHighChangePercentView;
public DetailActivityFragment() {
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
Bundle arguments = getArguments();
if (arguments != null) {
mUri = arguments.getParcelable(DETAIL_URI);
}
View rootView = inflater.inflate(R.layout.fragment_detail, container, false);
mSymbolView = (TextView) rootView.findViewById(R.id.detail_symbol);
mNameView = (TextView) rootView.findViewById(R.id.detail_name);
mPriceView = (TextView) rootView.findViewById(R.id.detail_price);
mChangeView = (TextView) rootView.findViewById(R.id.detail_change);
mChangePercentView = (TextView) rootView.findViewById(R.id.detail_change_percent);
mOpenView = (TextView) rootView.findViewById(R.id.detail_open);
mPreviousCloseView = (TextView) rootView.findViewById(R.id.detail_previous_close);
mDayLowView = (TextView) rootView.findViewById(R.id.detail_day_low);
mDayHighView = (TextView) rootView.findViewById(R.id.detail_day_high);
mYearLowView = (TextView) rootView.findViewById(R.id.detail_year_low);
mYearLowChangeView = (TextView) rootView.findViewById(R.id.detail_year_low_change);
mYearLowChangePercentView = (TextView) rootView.findViewById(R.id.detail_year_low_change_percent);
mYearHighView = (TextView) rootView.findViewById(R.id.detail_year_high);
mYearHighChangeView = (TextView) rootView.findViewById(R.id.detail_year_high_change);
mYearHighChangePercentView = (TextView) rootView.findViewById(R.id.detail_year_high_change_percent);
return rootView;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
// Inflate the menu; this adds items to the action bar if it is present.
inflater.inflate(R.menu.menu_detail_fragment, menu);
// Retrieve the share menu item
MenuItem menuItem = menu.findItem(R.id.action_share);
// Get the provider and hold onto it to set/change the share intent.
mShareActionProvider = (ShareActionProvider) MenuItemCompat.getActionProvider(menuItem);
// Attach an intent to this ShareActionProvider. You can update this at any time,
// like when the user selects a new piece of data they might like to share.
if (mShareString != null) {
mShareActionProvider.setShareIntent(createShareQuoteIntent());
} else {
Log.d(LOG_TAG, "Share Action Provider is null?");
}
}
private Intent createShareQuoteIntent() {
Intent shareIntent = new Intent(Intent.ACTION_SEND);
shareIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
shareIntent.setType("text/plain");
shareIntent.putExtra(Intent.EXTRA_TEXT, mShareString);
return shareIntent;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
getLoaderManager().initLoader(DETAIL_LOADER, null, this);
super.onActivityCreated(savedInstanceState);
}
@Override
public Loader<Cursor> onCreateLoader(int i, Bundle bundle) {
if (mUri != null) {
return new CursorLoader(getActivity(),
mUri,
StockQuoteFragment.STOCK_COLUMNS,
mUri.getQuery(),
null,
null);
}
return null;
}
@Override
public void onLoadFinished(Loader<Cursor> cursorLoader, Cursor cursor) {
if (cursor != null && cursor.moveToFirst()) {
String name = cursor.getString(StockQuoteFragment.COL_STOCK_NAME);
if (name != null && !name.isEmpty() && !name.equals("null")) {
mNameView.setText(name);
} else {
mNameView.setVisibility(View.GONE);
}
String symbol = cursor.getString(StockQuoteFragment.COL_STOCK_SYMBOL);
// if (getActivity().getActionBar() != null) {
// getActivity().getActionBar().setTitle(symbol);
// mSymbolView.setVisibility(View.GONE);
// } else if (getActivity() instanceof AppCompatActivity
// && ((AppCompatActivity)getActivity()).getSupportActionBar() != null) {
// ((AppCompatActivity)getActivity()).getSupportActionBar().setTitle(symbol);
// mSymbolView.setVisibility(View.GONE);
// } else {
// mSymbolView.setText(symbol);
// }
mSymbolView.setText(symbol);
String pricePattern = "\u00A4#,##0.00";
String changePattern = "#,##0.00";
String changePercentPattern = "#,##0.00%";
DecimalFormat fmt = new DecimalFormat(pricePattern);
double price = cursor.getDouble(StockQuoteFragment.COL_STOCK_PRICE);
mPriceView.setText(fmt.format(price));
double previousClose = cursor.getDouble(StockQuoteFragment.COL_STOCK_PREVIOUS_CLOSE);
mPreviousCloseView.setText(fmt.format(previousClose));
double open = cursor.getDouble(StockQuoteFragment.COL_STOCK_OPEN);
mOpenView.setText(fmt.format(open));
double low = cursor.getDouble(StockQuoteFragment.COL_STOCK_DAY_LOW);
mDayLowView.setText(fmt.format(low));
double high = cursor.getDouble(StockQuoteFragment.COL_STOCK_DAY_HIGH);
mDayHighView.setText(fmt.format(high));
double yearLow = cursor.getDouble(StockQuoteFragment.COL_STOCK_YEAR_LOW);
mYearLowView.setText(fmt.format(yearLow));
double yearHigh = cursor.getDouble(StockQuoteFragment.COL_STOCK_YEAR_HIGH);
mYearHighView.setText(fmt.format(yearHigh));
fmt.applyPattern(changePattern);
double change = cursor.getDouble(StockQuoteFragment.COL_STOCK_CHANGE);
mChangeView.setText(fmt.format(change));
double yearLowChange = cursor.getDouble(StockQuoteFragment.COL_STOCK_YEAR_LOW_CHANGE);
mYearLowChangeView.setText(fmt.format(yearLowChange));
double yearHighChange = cursor.getDouble(StockQuoteFragment.COL_STOCK_YEAR_HIGH_CHANGE);
mYearHighChangeView.setText(fmt.format(yearHighChange));
fmt.applyPattern(changePercentPattern);
double quoteChangePercent = cursor.getDouble(StockQuoteFragment.COL_STOCK_CHANGE_PERCENT);
mChangePercentView.setText("(" + fmt.format(quoteChangePercent / 100) + ")");
double yearLowChangePercent = cursor.getDouble(StockQuoteFragment.COL_STOCK_YEAR_LOW_CHANGE_PERCENT);
mYearLowChangePercentView.setText("(" + fmt.format(yearLowChangePercent / 100) + ")");
double yearHighChangePercent = cursor.getDouble(StockQuoteFragment.COL_STOCK_YEAR_HIGH_CHANGE_PERCENT);
mYearHighChangePercentView.setText("(" + fmt.format(yearHighChangePercent / 100) + ")");
if (change >= 0) {
mChangeView.setTextColor(getResources().getColor(R.color.stock_change_positive));
mChangePercentView.setTextColor(getResources().getColor(R.color.stock_change_positive));
} else {
mChangeView.setTextColor(getResources().getColor(R.color.stock_change_negative));
mChangePercentView.setTextColor(getResources().getColor(R.color.stock_change_negative));
}
if (yearLowChange >= 0) {
mYearLowChangeView.setTextColor(getResources().getColor(R.color.stock_change_positive));
mYearLowChangePercentView.setTextColor(getResources().getColor(R.color.stock_change_positive));
} else {
mYearLowChangeView.setTextColor(getResources().getColor(R.color.stock_change_negative));
mYearLowChangePercentView.setTextColor(getResources().getColor(R.color.stock_change_negative));
}
if (yearHighChange >= 0) {
mYearHighChangeView.setTextColor(getResources().getColor(R.color.stock_change_positive));
mYearHighChangePercentView.setTextColor(getResources().getColor(R.color.stock_change_positive));
} else {
mYearHighChangeView.setTextColor(getResources().getColor(R.color.stock_change_negative));
mYearHighChangePercentView.setTextColor(getResources().getColor(R.color.stock_change_negative));
}
fmt.applyPattern(pricePattern);
mShareString = symbol + " stock"
+ (change > 0 ? " rises " : " falls ")
+ fmt.format(Math.abs(change))
+ "; now " + fmt.format(price) + "\n"
+ YAHOO_FINANCE_URL_BASE + symbol
+ "\nGet Stock Ticker for Android:\n" + GOOGLE_PLAY_BITLINK;
// If onCreateOptionsMenu has already happened, we need to update the share intent now.
if (mShareActionProvider != null) {
mShareActionProvider.setShareIntent(createShareQuoteIntent());
}
}
}
@Override
public void onLoaderReset(Loader<Cursor> cursorLoader) { }
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package mf.org.apache.xerces.parsers;
import mf.org.apache.xerces.impl.Constants;
import mf.org.apache.xerces.impl.xs.XMLSchemaValidator;
import mf.org.apache.xerces.impl.xs.XSMessageFormatter;
import mf.org.apache.xerces.util.SymbolTable;
import mf.org.apache.xerces.xni.grammars.XMLGrammarPool;
import mf.org.apache.xerces.xni.parser.XMLComponentManager;
import mf.org.apache.xerces.xni.parser.XMLConfigurationException;
/**
* This is the "standard" parser configuration. It extends the DTD
* configuration with the standard set of parser components.
* The standard set of parser components include those needed
* to parse and validate with DTD's, and those needed for XML
* Schema.</p>
* <p>
* In addition to the features and properties recognized by the base
* parser configuration, this class recognizes these additional
* features and properties:
* <ul>
* <li>Features
* <ul>
* <li>http://apache.org/xml/features/validation/schema</li>
* <li>http://apache.org/xml/features/validation/schema-full-checking</li>
* <li>http://apache.org/xml/features/validation/schema/normalized-value</li>
* <li>http://apache.org/xml/features/validation/schema/element-default</li>
* </ul>
* <li>Properties
* <ul>
* <li>http://apache.org/xml/properties/internal/error-reporter</li>
* <li>http://apache.org/xml/properties/internal/entity-manager</li>
* <li>http://apache.org/xml/properties/internal/document-scanner</li>
* <li>http://apache.org/xml/properties/internal/dtd-scanner</li>
* <li>http://apache.org/xml/properties/internal/grammar-pool</li>
* <li>http://apache.org/xml/properties/internal/validator/dtd</li>
* <li>http://apache.org/xml/properties/internal/datatype-validator-factory</li>
* </ul>
* </ul>
*
* @author Arnaud Le Hors, IBM
* @author Andy Clark, IBM
*
* @version $Id: StandardParserConfiguration.java 805582 2009-08-18 21:13:20Z sandygao $
*/
public class StandardParserConfiguration
extends DTDConfiguration {
//
// Constants
//
// feature identifiers
/** Feature identifier: expose schema normalized value */
protected static final String NORMALIZE_DATA =
Constants.XERCES_FEATURE_PREFIX + Constants.SCHEMA_NORMALIZED_VALUE;
/** Feature identifier: send element default value via characters() */
protected static final String SCHEMA_ELEMENT_DEFAULT =
Constants.XERCES_FEATURE_PREFIX + Constants.SCHEMA_ELEMENT_DEFAULT;
/** Feature identifier: augment PSVI */
protected static final String SCHEMA_AUGMENT_PSVI =
Constants.XERCES_FEATURE_PREFIX + Constants.SCHEMA_AUGMENT_PSVI;
/** feature identifier: XML Schema validation */
protected static final String XMLSCHEMA_VALIDATION =
Constants.XERCES_FEATURE_PREFIX + Constants.SCHEMA_VALIDATION_FEATURE;
/** feature identifier: XML Schema validation -- full checking */
protected static final String XMLSCHEMA_FULL_CHECKING =
Constants.XERCES_FEATURE_PREFIX + Constants.SCHEMA_FULL_CHECKING;
/** Feature: generate synthetic annotations */
protected static final String GENERATE_SYNTHETIC_ANNOTATIONS =
Constants.XERCES_FEATURE_PREFIX + Constants.GENERATE_SYNTHETIC_ANNOTATIONS_FEATURE;
/** Feature identifier: validate annotations */
protected static final String VALIDATE_ANNOTATIONS =
Constants.XERCES_FEATURE_PREFIX + Constants.VALIDATE_ANNOTATIONS_FEATURE;
/** Feature identifier: honour all schemaLocations */
protected static final String HONOUR_ALL_SCHEMALOCATIONS =
Constants.XERCES_FEATURE_PREFIX + Constants.HONOUR_ALL_SCHEMALOCATIONS_FEATURE;
/** Feature identifier: namespace growth */
protected static final String NAMESPACE_GROWTH =
Constants.XERCES_FEATURE_PREFIX + Constants.NAMESPACE_GROWTH_FEATURE;
/** Feature identifier: tolerate duplicates */
protected static final String TOLERATE_DUPLICATES =
Constants.XERCES_FEATURE_PREFIX + Constants.TOLERATE_DUPLICATES_FEATURE;
/** Feature identifier: whether to ignore xsi:type attributes until a global element declaration is encountered */
protected static final String IGNORE_XSI_TYPE =
Constants.XERCES_FEATURE_PREFIX + Constants.IGNORE_XSI_TYPE_FEATURE;
/** Feature identifier: whether to ignore ID/IDREF errors */
protected static final String ID_IDREF_CHECKING =
Constants.XERCES_FEATURE_PREFIX + Constants.ID_IDREF_CHECKING_FEATURE;
/** Feature identifier: whether to ignore unparsed entity errors */
protected static final String UNPARSED_ENTITY_CHECKING =
Constants.XERCES_FEATURE_PREFIX + Constants.UNPARSED_ENTITY_CHECKING_FEATURE;
/** Feature identifier: whether to ignore identity constraint errors */
protected static final String IDENTITY_CONSTRAINT_CHECKING =
Constants.XERCES_FEATURE_PREFIX + Constants.IDC_CHECKING_FEATURE;
// property identifiers
/** Property identifier: XML Schema validator. */
protected static final String SCHEMA_VALIDATOR =
Constants.XERCES_PROPERTY_PREFIX + Constants.SCHEMA_VALIDATOR_PROPERTY;
/** Property identifier: schema location. */
protected static final String SCHEMA_LOCATION =
Constants.XERCES_PROPERTY_PREFIX + Constants.SCHEMA_LOCATION;
/** Property identifier: no namespace schema location. */
protected static final String SCHEMA_NONS_LOCATION =
Constants.XERCES_PROPERTY_PREFIX + Constants.SCHEMA_NONS_LOCATION;
/** Property identifier: root type definition. */
protected static final String ROOT_TYPE_DEF =
Constants.XERCES_PROPERTY_PREFIX + Constants.ROOT_TYPE_DEFINITION_PROPERTY;
/** Property identifier: root element declaration. */
protected static final String ROOT_ELEMENT_DECL =
Constants.XERCES_PROPERTY_PREFIX + Constants.ROOT_ELEMENT_DECLARATION_PROPERTY;
/** Property identifier: Schema DV Factory */
protected static final String SCHEMA_DV_FACTORY =
Constants.XERCES_PROPERTY_PREFIX + Constants.SCHEMA_DV_FACTORY_PROPERTY;
//
// Data
//
// components (non-configurable)
/** XML Schema Validator. */
protected XMLSchemaValidator fSchemaValidator;
//
// Constructors
//
/** Default constructor. */
public StandardParserConfiguration() {
this(null, null, null);
} // <init>()
/**
* Constructs a parser configuration using the specified symbol table.
*
* @param symbolTable The symbol table to use.
*/
public StandardParserConfiguration(SymbolTable symbolTable) {
this(symbolTable, null, null);
} // <init>(SymbolTable)
/**
* Constructs a parser configuration using the specified symbol table and
* grammar pool.
* <p>
* <strong>REVISIT:</strong>
* Grammar pool will be updated when the new validation engine is
* implemented.
*
* @param symbolTable The symbol table to use.
* @param grammarPool The grammar pool to use.
*/
public StandardParserConfiguration(SymbolTable symbolTable,
XMLGrammarPool grammarPool) {
this(symbolTable, grammarPool, null);
} // <init>(SymbolTable,XMLGrammarPool)
/**
* Constructs a parser configuration using the specified symbol table,
* grammar pool, and parent settings.
* <p>
* <strong>REVISIT:</strong>
* Grammar pool will be updated when the new validation engine is
* implemented.
*
* @param symbolTable The symbol table to use.
* @param grammarPool The grammar pool to use.
* @param parentSettings The parent settings.
*/
public StandardParserConfiguration(SymbolTable symbolTable,
XMLGrammarPool grammarPool,
XMLComponentManager parentSettings) {
super(symbolTable, grammarPool, parentSettings);
// add default recognized features
final String[] recognizedFeatures = {
NORMALIZE_DATA,
SCHEMA_ELEMENT_DEFAULT,
SCHEMA_AUGMENT_PSVI,
GENERATE_SYNTHETIC_ANNOTATIONS,
VALIDATE_ANNOTATIONS,
HONOUR_ALL_SCHEMALOCATIONS,
NAMESPACE_GROWTH,
TOLERATE_DUPLICATES,
// NOTE: These shouldn't really be here but since the XML Schema
// validator is constructed dynamically, its recognized
// features might not have been set and it would cause a
// not-recognized exception to be thrown. -Ac
XMLSCHEMA_VALIDATION,
XMLSCHEMA_FULL_CHECKING,
IGNORE_XSI_TYPE,
ID_IDREF_CHECKING,
IDENTITY_CONSTRAINT_CHECKING,
UNPARSED_ENTITY_CHECKING,
};
addRecognizedFeatures(recognizedFeatures);
// set state for default features
setFeature(SCHEMA_ELEMENT_DEFAULT, true);
setFeature(NORMALIZE_DATA, true);
setFeature(SCHEMA_AUGMENT_PSVI, true);
setFeature(GENERATE_SYNTHETIC_ANNOTATIONS, false);
setFeature(VALIDATE_ANNOTATIONS, false);
setFeature(HONOUR_ALL_SCHEMALOCATIONS, false);
setFeature(NAMESPACE_GROWTH, false);
setFeature(TOLERATE_DUPLICATES, false);
setFeature(IGNORE_XSI_TYPE, false);
setFeature(ID_IDREF_CHECKING, true);
setFeature(IDENTITY_CONSTRAINT_CHECKING, true);
setFeature(UNPARSED_ENTITY_CHECKING, true);
// add default recognized properties
final String[] recognizedProperties = {
// NOTE: These shouldn't really be here but since the XML Schema
// validator is constructed dynamically, its recognized
// properties might not have been set and it would cause a
// not-recognized exception to be thrown. -Ac
SCHEMA_LOCATION,
SCHEMA_NONS_LOCATION,
ROOT_TYPE_DEF,
ROOT_ELEMENT_DECL,
SCHEMA_DV_FACTORY,
};
addRecognizedProperties(recognizedProperties);
} // <init>(SymbolTable,XMLGrammarPool)
//
// Public methods
//
/** Configures the pipeline. */
protected void configurePipeline() {
super.configurePipeline();
if ( getFeature(XMLSCHEMA_VALIDATION )) {
// If schema validator was not in the pipeline insert it.
if (fSchemaValidator == null) {
fSchemaValidator = new XMLSchemaValidator();
// add schema component
fProperties.put(SCHEMA_VALIDATOR, fSchemaValidator);
addComponent(fSchemaValidator);
// add schema message formatter
if (fErrorReporter.getMessageFormatter(XSMessageFormatter.SCHEMA_DOMAIN) == null) {
XSMessageFormatter xmft = new XSMessageFormatter();
fErrorReporter.putMessageFormatter(XSMessageFormatter.SCHEMA_DOMAIN, xmft);
}
}
fLastComponent = fSchemaValidator;
fNamespaceBinder.setDocumentHandler(fSchemaValidator);
fSchemaValidator.setDocumentHandler(fDocumentHandler);
fSchemaValidator.setDocumentSource(fNamespaceBinder);
}
} // configurePipeline()
// features and properties
/**
* Check a feature. If feature is know and supported, this method simply
* returns. Otherwise, the appropriate exception is thrown.
*
* @param featureId The unique identifier (URI) of the feature.
*
* @throws XMLConfigurationException Thrown for configuration error.
* In general, components should
* only throw this exception if
* it is <strong>really</strong>
* a critical error.
*/
protected void checkFeature(String featureId)
throws XMLConfigurationException {
//
// Xerces Features
//
if (featureId.startsWith(Constants.XERCES_FEATURE_PREFIX)) {
final int suffixLength = featureId.length() - Constants.XERCES_FEATURE_PREFIX.length();
//
// http://apache.org/xml/features/validation/schema
// Lets the user turn Schema validation support on/off.
//
if (suffixLength == Constants.SCHEMA_VALIDATION_FEATURE.length() &&
featureId.endsWith(Constants.SCHEMA_VALIDATION_FEATURE)) {
return;
}
// activate full schema checking
if (suffixLength == Constants.SCHEMA_FULL_CHECKING.length() &&
featureId.endsWith(Constants.SCHEMA_FULL_CHECKING)) {
return;
}
// Feature identifier: expose schema normalized value
// http://apache.org/xml/features/validation/schema/normalized-value
if (suffixLength == Constants.SCHEMA_NORMALIZED_VALUE.length() &&
featureId.endsWith(Constants.SCHEMA_NORMALIZED_VALUE)) {
return;
}
// Feature identifier: send element default value via characters()
// http://apache.org/xml/features/validation/schema/element-default
if (suffixLength == Constants.SCHEMA_ELEMENT_DEFAULT.length() &&
featureId.endsWith(Constants.SCHEMA_ELEMENT_DEFAULT)) {
return;
}
}
//
// Not recognized
//
super.checkFeature(featureId);
} // checkFeature(String)
/**
* Check a property. If the property is know and supported, this method
* simply returns. Otherwise, the appropriate exception is thrown.
*
* @param propertyId The unique identifier (URI) of the property
* being set.
*
* @throws XMLConfigurationException Thrown for configuration error.
* In general, components should
* only throw this exception if
* it is <strong>really</strong>
* a critical error.
*/
protected void checkProperty(String propertyId)
throws XMLConfigurationException {
//
// Xerces Properties
//
if (propertyId.startsWith(Constants.XERCES_PROPERTY_PREFIX)) {
final int suffixLength = propertyId.length() - Constants.XERCES_PROPERTY_PREFIX.length();
if (suffixLength == Constants.SCHEMA_LOCATION.length() &&
propertyId.endsWith(Constants.SCHEMA_LOCATION)) {
return;
}
if (suffixLength == Constants.SCHEMA_NONS_LOCATION.length() &&
propertyId.endsWith(Constants.SCHEMA_NONS_LOCATION)) {
return;
}
}
if (propertyId.startsWith(Constants.JAXP_PROPERTY_PREFIX)) {
final int suffixLength = propertyId.length() - Constants.JAXP_PROPERTY_PREFIX.length();
if (suffixLength == Constants.SCHEMA_SOURCE.length() &&
propertyId.endsWith(Constants.SCHEMA_SOURCE)) {
return;
}
}
//
// Not recognized
//
super.checkProperty(propertyId);
} // checkProperty(String)
} // class StandardParserConfiguration
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.wal;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope;
import org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType;
import org.apache.hadoop.hbase.regionserver.SequenceId;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.ByteString;
// imports for things that haven't moved from regionserver.wal yet.
import org.apache.hadoop.hbase.regionserver.wal.CompressionContext;
import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
/**
* A Key for an entry in the change log.
*
* The log intermingles edits to many tables and rows, so each log entry
* identifies the appropriate table and row. Within a table and row, they're
* also sorted.
*
* <p>Some Transactional edits (START, COMMIT, ABORT) will not have an
* associated row.
*
* Note that protected members marked @InterfaceAudience.Private are only protected
* to support the legacy HLogKey class, which is in a different package.
*/
// TODO: Key and WALEdit are never used separately, or in one-to-many relation, for practical
// purposes. They need to be merged into WALEntry.
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.REPLICATION)
public class WALKey implements SequenceId, Comparable<WALKey> {
private static final Log LOG = LogFactory.getLog(WALKey.class);
// should be < 0 (@see HLogKey#readFields(DataInput))
// version 2 supports WAL compression
// public members here are only public because of HLogKey
@InterfaceAudience.Private
protected enum Version {
UNVERSIONED(0),
// Initial number we put on WALKey when we introduced versioning.
INITIAL(-1),
// Version -2 introduced a dictionary compression facility. Only this
// dictionary-based compression is available in version -2.
COMPRESSED(-2);
public final int code;
static final Version[] byCode;
static {
byCode = Version.values();
for (int i = 0; i < byCode.length; i++) {
if (byCode[i].code != -1 * i) {
throw new AssertionError("Values in this enum should be descending by one");
}
}
}
Version(int code) {
this.code = code;
}
public boolean atLeast(Version other) {
return code <= other.code;
}
public static Version fromCode(int code) {
return byCode[code * -1];
}
}
/*
* This is used for reading the log entries created by the previous releases
* (0.94.11) which write the clusters information to the scopes of WALEdit.
*/
private static final String PREFIX_CLUSTER_KEY = ".";
// visible for deprecated HLogKey
@InterfaceAudience.Private
protected static final Version VERSION = Version.COMPRESSED;
/** Used to represent when a particular wal key doesn't know/care about the sequence ordering. */
public static final long NO_SEQUENCE_ID = -1;
// visible for deprecated HLogKey
@InterfaceAudience.Private
protected byte [] encodedRegionName;
// visible for deprecated HLogKey
@InterfaceAudience.Private
protected TableName tablename;
// visible for deprecated HLogKey
@InterfaceAudience.Private
protected long logSeqNum;
private long origLogSeqNum = 0;
private CountDownLatch seqNumAssignedLatch = new CountDownLatch(1);
// Time at which this edit was written.
// visible for deprecated HLogKey
@InterfaceAudience.Private
protected long writeTime;
// The first element in the list is the cluster id on which the change has originated
// visible for deprecated HLogKey
@InterfaceAudience.Private
protected List<UUID> clusterIds;
private NavigableMap<byte[], Integer> scopes;
private long nonceGroup = HConstants.NO_NONCE;
private long nonce = HConstants.NO_NONCE;
static final List<UUID> EMPTY_UUIDS = Collections.unmodifiableList(new ArrayList<UUID>());
// visible for deprecated HLogKey
@InterfaceAudience.Private
protected CompressionContext compressionContext;
public WALKey() {
init(null, null, 0L, HConstants.LATEST_TIMESTAMP,
new ArrayList<UUID>(), HConstants.NO_NONCE, HConstants.NO_NONCE);
}
@VisibleForTesting
public WALKey(final byte[] encodedRegionName, final TableName tablename, long logSeqNum,
final long now, UUID clusterId) {
List<UUID> clusterIds = new ArrayList<UUID>();
clusterIds.add(clusterId);
init(encodedRegionName, tablename, logSeqNum, now, clusterIds,
HConstants.NO_NONCE, HConstants.NO_NONCE);
}
public WALKey(final byte[] encodedRegionName, final TableName tablename) {
this(encodedRegionName, tablename, System.currentTimeMillis());
}
public WALKey(final byte[] encodedRegionName, final TableName tablename, final long now) {
init(encodedRegionName, tablename, NO_SEQUENCE_ID, now,
EMPTY_UUIDS, HConstants.NO_NONCE, HConstants.NO_NONCE);
}
/**
* Create the log key for writing to somewhere.
* We maintain the tablename mainly for debugging purposes.
* A regionName is always a sub-table object.
* <p>Used by log splitting and snapshots.
*
* @param encodedRegionName Encoded name of the region as returned by
* <code>HRegionInfo#getEncodedNameAsBytes()</code>.
* @param tablename - name of table
* @param logSeqNum - log sequence number
* @param now Time at which this edit was written.
* @param clusterIds the clusters that have consumed the change(used in Replication)
*/
public WALKey(final byte [] encodedRegionName, final TableName tablename,
long logSeqNum, final long now, List<UUID> clusterIds, long nonceGroup, long nonce) {
init(encodedRegionName, tablename, logSeqNum, now, clusterIds, nonceGroup, nonce);
}
/**
* Create the log key for writing to somewhere.
* We maintain the tablename mainly for debugging purposes.
* A regionName is always a sub-table object.
*
* @param encodedRegionName Encoded name of the region as returned by
* <code>HRegionInfo#getEncodedNameAsBytes()</code>.
* @param tablename
* @param now Time at which this edit was written.
* @param clusterIds the clusters that have consumed the change(used in Replication)
* @param nonceGroup
* @param nonce
*/
public WALKey(final byte [] encodedRegionName, final TableName tablename,
final long now, List<UUID> clusterIds, long nonceGroup, long nonce) {
init(encodedRegionName, tablename, NO_SEQUENCE_ID, now, clusterIds,
nonceGroup, nonce);
}
/**
* Create the log key for writing to somewhere.
* We maintain the tablename mainly for debugging purposes.
* A regionName is always a sub-table object.
*
* @param encodedRegionName Encoded name of the region as returned by
* <code>HRegionInfo#getEncodedNameAsBytes()</code>.
* @param tablename
* @param logSeqNum
* @param nonceGroup
* @param nonce
*/
public WALKey(final byte [] encodedRegionName, final TableName tablename, long logSeqNum,
long nonceGroup, long nonce) {
init(encodedRegionName, tablename, logSeqNum, EnvironmentEdgeManager.currentTime(),
EMPTY_UUIDS, nonceGroup, nonce);
}
@InterfaceAudience.Private
protected void init(final byte [] encodedRegionName, final TableName tablename,
long logSeqNum, final long now, List<UUID> clusterIds, long nonceGroup, long nonce) {
this.logSeqNum = logSeqNum;
this.writeTime = now;
this.clusterIds = clusterIds;
this.encodedRegionName = encodedRegionName;
this.tablename = tablename;
this.nonceGroup = nonceGroup;
this.nonce = nonce;
}
/**
* @param compressionContext Compression context to use
*/
public void setCompressionContext(CompressionContext compressionContext) {
this.compressionContext = compressionContext;
}
/** @return encoded region name */
public byte [] getEncodedRegionName() {
return encodedRegionName;
}
/** @return table name */
public TableName getTablename() {
return tablename;
}
/** @return log sequence number */
public long getLogSeqNum() {
return this.logSeqNum;
}
/**
* Allow that the log sequence id to be set post-construction and release all waiters on assigned
* sequence number.
* Only public for org.apache.hadoop.hbase.regionserver.wal.FSWALEntry
* @param sequence
*/
@InterfaceAudience.Private
public void setLogSeqNum(final long sequence) {
this.logSeqNum = sequence;
this.seqNumAssignedLatch.countDown();
}
/**
* Used to set original seq Id for WALKey during wal replay
* @param seqId
*/
public void setOrigLogSeqNum(final long seqId) {
this.origLogSeqNum = seqId;
}
/**
* Return a positive long if current WALKey is created from a replay edit
* @return original sequence number of the WALEdit
*/
public long getOrigLogSeqNum() {
return this.origLogSeqNum;
}
/**
* Wait for sequence number is assigned & return the assigned value
* @return long the new assigned sequence number
* @throws IOException
*/
@Override
public long getSequenceId() throws IOException {
return getSequenceId(-1);
}
/**
* Wait for sequence number is assigned & return the assigned value
* @param maxWaitForSeqId maximum duration, in milliseconds, to wait for seq number to be assigned
* @return long the new assigned sequence number
* @throws IOException
*/
public long getSequenceId(int maxWaitForSeqId) throws IOException {
try {
if (maxWaitForSeqId < 0) {
this.seqNumAssignedLatch.await();
} else {
if (!this.seqNumAssignedLatch.await(maxWaitForSeqId, TimeUnit.MILLISECONDS)) {
throw new IOException("Timed out waiting for seq number to be assigned");
}
}
} catch (InterruptedException ie) {
LOG.warn("Thread interrupted waiting for next log sequence number");
InterruptedIOException iie = new InterruptedIOException();
iie.initCause(ie);
throw iie;
}
return this.logSeqNum;
}
/**
* @return the write time
*/
public long getWriteTime() {
return this.writeTime;
}
public NavigableMap<byte[], Integer> getScopes() {
return scopes;
}
/** @return The nonce group */
public long getNonceGroup() {
return nonceGroup;
}
/** @return The nonce */
public long getNonce() {
return nonce;
}
public void setScopes(NavigableMap<byte[], Integer> scopes) {
this.scopes = scopes;
}
public void readOlderScopes(NavigableMap<byte[], Integer> scopes) {
if (scopes != null) {
Iterator<Map.Entry<byte[], Integer>> iterator = scopes.entrySet()
.iterator();
while (iterator.hasNext()) {
Map.Entry<byte[], Integer> scope = iterator.next();
String key = Bytes.toString(scope.getKey());
if (key.startsWith(PREFIX_CLUSTER_KEY)) {
addClusterId(UUID.fromString(key.substring(PREFIX_CLUSTER_KEY
.length())));
iterator.remove();
}
}
if (scopes.size() > 0) {
this.scopes = scopes;
}
}
}
/**
* Marks that the cluster with the given clusterId has consumed the change
*/
public void addClusterId(UUID clusterId) {
if (!clusterIds.contains(clusterId)) {
clusterIds.add(clusterId);
}
}
/**
* @return the set of cluster Ids that have consumed the change
*/
public List<UUID> getClusterIds() {
return clusterIds;
}
/**
* @return the cluster id on which the change has originated. It there is no such cluster, it
* returns DEFAULT_CLUSTER_ID (cases where replication is not enabled)
*/
public UUID getOriginatingClusterId(){
return clusterIds.isEmpty() ? HConstants.DEFAULT_CLUSTER_ID : clusterIds.get(0);
}
@Override
public String toString() {
return tablename + "/" + Bytes.toString(encodedRegionName) + "/" +
logSeqNum;
}
/**
* Produces a string map for this key. Useful for programmatic use and
* manipulation of the data stored in an WALKey, for example, printing
* as JSON.
*
* @return a Map containing data from this key
*/
public Map<String, Object> toStringMap() {
Map<String, Object> stringMap = new HashMap<String, Object>();
stringMap.put("table", tablename);
stringMap.put("region", Bytes.toStringBinary(encodedRegionName));
stringMap.put("sequence", logSeqNum);
return stringMap;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
return compareTo((WALKey)obj) == 0;
}
@Override
public int hashCode() {
int result = Bytes.hashCode(this.encodedRegionName);
result ^= this.logSeqNum;
result ^= this.writeTime;
return result;
}
@Override
public int compareTo(WALKey o) {
int result = Bytes.compareTo(this.encodedRegionName, o.encodedRegionName);
if (result == 0) {
if (this.logSeqNum < o.logSeqNum) {
result = -1;
} else if (this.logSeqNum > o.logSeqNum) {
result = 1;
}
if (result == 0) {
if (this.writeTime < o.writeTime) {
result = -1;
} else if (this.writeTime > o.writeTime) {
return 1;
}
}
}
// why isn't cluster id accounted for?
return result;
}
/**
* Drop this instance's tablename byte array and instead
* hold a reference to the provided tablename. This is not
* meant to be a general purpose setter - it's only used
* to collapse references to conserve memory.
*/
void internTableName(TableName tablename) {
// We should not use this as a setter - only to swap
// in a new reference to the same table name.
assert tablename.equals(this.tablename);
this.tablename = tablename;
}
/**
* Drop this instance's region name byte array and instead
* hold a reference to the provided region name. This is not
* meant to be a general purpose setter - it's only used
* to collapse references to conserve memory.
*/
void internEncodedRegionName(byte []encodedRegionName) {
// We should not use this as a setter - only to swap
// in a new reference to the same table name.
assert Bytes.equals(this.encodedRegionName, encodedRegionName);
this.encodedRegionName = encodedRegionName;
}
public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder getBuilder(WALCellCodec.ByteStringCompressor compressor)
throws IOException {
org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder builder = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder();
if (compressionContext == null) {
builder.setEncodedRegionName(ByteStringer.wrap(this.encodedRegionName));
builder.setTableName(ByteStringer.wrap(this.tablename.getName()));
} else {
builder.setEncodedRegionName(compressor.compress(this.encodedRegionName,
compressionContext.regionDict));
builder.setTableName(compressor.compress(this.tablename.getName(),
compressionContext.tableDict));
}
builder.setLogSequenceNumber(this.logSeqNum);
builder.setWriteTime(writeTime);
if(this.origLogSeqNum > 0) {
builder.setOrigSequenceNumber(this.origLogSeqNum);
}
if (this.nonce != HConstants.NO_NONCE) {
builder.setNonce(nonce);
}
if (this.nonceGroup != HConstants.NO_NONCE) {
builder.setNonceGroup(nonceGroup);
}
HBaseProtos.UUID.Builder uuidBuilder = HBaseProtos.UUID.newBuilder();
for (UUID clusterId : clusterIds) {
uuidBuilder.setLeastSigBits(clusterId.getLeastSignificantBits());
uuidBuilder.setMostSigBits(clusterId.getMostSignificantBits());
builder.addClusterIds(uuidBuilder.build());
}
if (scopes != null) {
for (Map.Entry<byte[], Integer> e : scopes.entrySet()) {
ByteString family = (compressionContext == null) ? ByteStringer.wrap(e.getKey())
: compressor.compress(e.getKey(), compressionContext.familyDict);
builder.addScopes(FamilyScope.newBuilder()
.setFamily(family).setScopeType(ScopeType.valueOf(e.getValue())));
}
}
return builder;
}
public void readFieldsFromPb(
org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey walKey, WALCellCodec.ByteStringUncompressor uncompressor) throws IOException {
if (this.compressionContext != null) {
this.encodedRegionName = uncompressor.uncompress(
walKey.getEncodedRegionName(), compressionContext.regionDict);
byte[] tablenameBytes = uncompressor.uncompress(
walKey.getTableName(), compressionContext.tableDict);
this.tablename = TableName.valueOf(tablenameBytes);
} else {
this.encodedRegionName = walKey.getEncodedRegionName().toByteArray();
this.tablename = TableName.valueOf(walKey.getTableName().toByteArray());
}
clusterIds.clear();
if (walKey.hasClusterId()) {
//When we are reading the older log (0.95.1 release)
//This is definitely the originating cluster
clusterIds.add(new UUID(walKey.getClusterId().getMostSigBits(), walKey.getClusterId()
.getLeastSigBits()));
}
for (HBaseProtos.UUID clusterId : walKey.getClusterIdsList()) {
clusterIds.add(new UUID(clusterId.getMostSigBits(), clusterId.getLeastSigBits()));
}
if (walKey.hasNonceGroup()) {
this.nonceGroup = walKey.getNonceGroup();
}
if (walKey.hasNonce()) {
this.nonce = walKey.getNonce();
}
this.scopes = null;
if (walKey.getScopesCount() > 0) {
this.scopes = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
for (FamilyScope scope : walKey.getScopesList()) {
byte[] family = (compressionContext == null) ? scope.getFamily().toByteArray() :
uncompressor.uncompress(scope.getFamily(), compressionContext.familyDict);
this.scopes.put(family, scope.getScopeType().getNumber());
}
}
this.logSeqNum = walKey.getLogSequenceNumber();
this.writeTime = walKey.getWriteTime();
if(walKey.hasOrigSequenceNumber()) {
this.origLogSeqNum = walKey.getOrigSequenceNumber();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import org.junit.Test;
public class LastValueFunctionIT extends BaseHBaseManagedTimeIT {
@Test
public void unsignedLong() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String ddl = "CREATE TABLE IF NOT EXISTS last_value_table "
+ "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG,"
+ " date DATE, \"value\" UNSIGNED_LONG)";
conn.createStatement().execute(ddl);
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") "
+ "VALUES (1, 8, TO_DATE('2013-01-01 00:00:00'), 300)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") "
+ "VALUES (2, 8, TO_DATE('2013-01-01 00:01:00'), 7)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") "
+ "VALUES (3, 8, TO_DATE('2013-01-01 00:02:00'), 9)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") "
+ "VALUES (4, 8, TO_DATE('2013-01-01 00:03:00'), 4)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") "
+ "VALUES (5, 8, TO_DATE('2013-01-01 00:04:00'), 2)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") "
+ "VALUES (6, 8, TO_DATE('2013-01-01 00:05:00'), 150)");
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"SELECT LAST_VALUE(\"value\") WITHIN GROUP (ORDER BY date ASC) FROM last_value_table GROUP BY page_id");
assertTrue(rs.next());
assertEquals(rs.getLong(1), 150);
assertFalse(rs.next());
}
@Test
public void signedInteger() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String ddl = "CREATE TABLE IF NOT EXISTS last_test_table "
+ "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG, date INTEGER, \"value\" INTEGER)";
conn.createStatement().execute(ddl);
conn.createStatement().execute("UPSERT INTO last_test_table (id, page_id, date, \"value\") VALUES (5, 8, 5, -255)");
conn.createStatement().execute("UPSERT INTO last_test_table (id, page_id, date, \"value\") VALUES (1, 8, 1, 3)");
conn.createStatement().execute("UPSERT INTO last_test_table (id, page_id, date, \"value\") VALUES (2, 8, 2, 7)");
conn.createStatement().execute("UPSERT INTO last_test_table (id, page_id, date, \"value\") VALUES (3, 8, 3, 9)");
conn.createStatement().execute("UPSERT INTO last_test_table (id, page_id, date, \"value\") VALUES (4, 8, 4, 4)");
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"SELECT LAST_VALUE(\"value\") WITHIN GROUP (ORDER BY date ASC) FROM last_test_table GROUP BY page_id"
);
assertTrue(rs.next());
assertEquals(rs.getInt(1), -255);
assertFalse(rs.next());
}
@Test
public void unsignedInteger() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String ddl = "CREATE TABLE IF NOT EXISTS last_test_table "
+ "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG,"
+ " date UNSIGNED_INT, \"value\" UNSIGNED_INT)";
conn.createStatement().execute(ddl);
conn.createStatement().execute("UPSERT INTO last_test_table (id, page_id, date, \"value\") VALUES (1, 8, 1, 3)");
conn.createStatement().execute("UPSERT INTO last_test_table (id, page_id, date, \"value\") VALUES (2, 8, 2, 7)");
conn.createStatement().execute("UPSERT INTO last_test_table (id, page_id, date, \"value\") VALUES (3, 8, 3, 9)");
conn.createStatement().execute("UPSERT INTO last_test_table (id, page_id, date, \"value\") VALUES (5, 8, 4, 2)");
conn.createStatement().execute("UPSERT INTO last_test_table (id, page_id, date, \"value\") VALUES (4, 8, 5, 4)");
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"SELECT LAST_VALUE(\"value\") WITHIN GROUP (ORDER BY date ASC) FROM last_test_table GROUP BY page_id"
);
assertTrue(rs.next());
assertEquals(rs.getInt(1), 4);
assertFalse(rs.next());
}
@Test
public void simpleTestDescOrder() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String ddl = "CREATE TABLE IF NOT EXISTS last_value_table "
+ "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG,"
+ " dates INTEGER, val INTEGER)";
conn.createStatement().execute(ddl);
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, dates, val) VALUES (1, 8, 0, 300)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, dates, val) VALUES (2, 8, 1, 7)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, dates, val) VALUES (3, 8, 2, 9)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, dates, val) VALUES (4, 8, 3, 4)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, dates, val) VALUES (5, 8, 4, 2)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, dates, val) VALUES (6, 8, 5, 150)");
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"SELECT LAST_VALUE(val) WITHIN GROUP (ORDER BY dates DESC) FROM last_value_table GROUP BY page_id");
assertTrue(rs.next());
assertEquals(rs.getInt(1), 300);
assertFalse(rs.next());
}
@Test
public void simpleTestAscOrder() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String ddl = "CREATE TABLE IF NOT EXISTS last_value_table "
+ "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG,"
+ " dates INTEGER, val INTEGER)";
conn.createStatement().execute(ddl);
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, dates, val) VALUES (1, 8, 0, 300)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, dates, val) VALUES (2, 8, 1, 7)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, dates, val) VALUES (3, 8, 2, 9)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, dates, val) VALUES (4, 8, 3, 4)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, dates, val) VALUES (5, 8, 4, 2)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, dates, val) VALUES (6, 8, 5, 150)");
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"SELECT LAST_VALUE(val) WITHIN GROUP (ORDER BY dates ASC) FROM last_value_table GROUP BY page_id");
assertTrue(rs.next());
assertEquals(rs.getInt(1), 150);
assertFalse(rs.next());
}
@Test
public void charDatatype() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String ddl = "CREATE TABLE IF NOT EXISTS last_value_table "
+ "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG, "
+ "date CHAR(3), \"value\" CHAR(3))";
conn.createStatement().execute(ddl);
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (1, 8, '1', '300')");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (2, 8, '2', '7')");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (3, 8, '3', '9')");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (5, 8, '4', '2')");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (4, 8, '5', '400')");
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"SELECT LAST_VALUE(\"value\") WITHIN GROUP (ORDER BY date ASC) FROM last_value_table GROUP BY page_id");
assertTrue(rs.next());
assertEquals(rs.getString(1), "400");
assertFalse(rs.next());
}
@Test
public void varcharVariableLenghtDatatype() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String ddl = "CREATE TABLE IF NOT EXISTS last_value_table "
+ "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG,"
+ " date VARCHAR, \"value\" VARCHAR)";
conn.createStatement().execute(ddl);
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (1, 8, '1', '3')");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (2, 8, '2', '7')");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (3, 8, '3', '9')");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (5, 8, '4', '2')");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (4, 8, '5', '4')");
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"SELECT LAST_VALUE(\"value\") WITHIN GROUP (ORDER BY date ASC) FROM last_value_table GROUP BY page_id");
assertTrue(rs.next());
assertEquals(rs.getString(1), "4");
assertFalse(rs.next());
}
@Test
public void groupMultipleValues() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String ddl = "CREATE TABLE IF NOT EXISTS last_value_table "
+ "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG,"
+ " date UNSIGNED_INT, \"value\" UNSIGNED_INT)";
conn.createStatement().execute(ddl);
//first page_id
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (1, 8, 1, 3)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (2, 8, 2, 7)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (3, 8, 3, 9)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (5, 8, 4, 2)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (4, 8, 5, 4)");
//second page_id
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (11, 9, 1, 3)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (12, 9, 2, 7)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (13, 9, 3, 9)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (15, 9, 4, 2)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (14, 9, 5, 40)");
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"SELECT LAST_VALUE(\"value\") WITHIN GROUP (ORDER BY date ASC) FROM last_value_table GROUP BY page_id");
assertTrue(rs.next());
assertEquals(rs.getInt(1), 4);
assertTrue(rs.next());
assertEquals(rs.getInt(1), 40);
assertFalse(rs.next());
}
@Test
public void nullValuesInAggregatingColumns() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String ddl = "CREATE TABLE IF NOT EXISTS last_value_table "
+ "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG,"
+ " date UNSIGNED_INT, \"value\" UNSIGNED_INT)";
conn.createStatement().execute(ddl);
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date) VALUES (1, 8, 1)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date) VALUES (2, 8, 2)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date) VALUES (3, 8, 3)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date) VALUES (5, 8, 4)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date) VALUES (4, 8, 5)");
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"SELECT LAST_VALUE(\"value\") WITHIN GROUP (ORDER BY date ASC) FROM last_value_table GROUP BY page_id");
assertTrue(rs.next());
byte[] nothing = rs.getBytes(1);
assertTrue(nothing == null);
}
@Test
public void nullValuesInAggregatingColumnsSecond() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String ddl = "CREATE TABLE IF NOT EXISTS last_value_table "
+ "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG,"
+ " date UNSIGNED_INT, \"value\" UNSIGNED_INT)";
conn.createStatement().execute(ddl);
//first page_id
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date) VALUES (1, 8, 1)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date) VALUES (2, 8, 2)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date) VALUES (3, 8, 3)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date) VALUES (5, 8, 4)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date) VALUES (4, 8, 5)");
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"SELECT LAST_VALUE(\"value\") WITHIN GROUP (ORDER BY date ASC) FROM last_value_table GROUP BY page_id");
assertTrue(rs.next());
byte[] nothing = rs.getBytes(1);
assertTrue(nothing == null);
}
@Test
public void inOrderByClausule() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String ddl = "CREATE TABLE IF NOT EXISTS last_value_table "
+ "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_INT,"
+ " date UNSIGNED_INT, \"value\" UNSIGNED_INT)";
conn.createStatement().execute(ddl);
//first page
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (1, 8, 1, 3)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (2, 8, 2, 7)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (3, 8, 3, 9)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (5, 8, 4, 2)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (4, 8, 5, 5)");
//second page
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (5, 2, 1, 3)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (6, 2, 2, 7)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (7, 2, 3, 9)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (8, 2, 4, 2)");
conn.createStatement().execute("UPSERT INTO last_value_table (id, page_id, date, \"value\") VALUES (9, 2, 5, 4)");
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"SELECT LAST_VALUE(\"value\") WITHIN GROUP (ORDER BY date ASC) AS val "
+ "FROM last_value_table GROUP BY page_id ORDER BY val DESC");
assertTrue(rs.next());
assertEquals(rs.getInt(1), 5);
assertTrue(rs.next());
assertEquals(rs.getInt(1), 4);
assertFalse(rs.next());
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.clouddirectory.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/clouddirectory-2016-05-10/ListAttachedIndices" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListAttachedIndicesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The indices attached to the specified object.
* </p>
*/
private java.util.List<IndexAttachment> indexAttachments;
/**
* <p>
* The pagination token.
* </p>
*/
private String nextToken;
/**
* <p>
* The indices attached to the specified object.
* </p>
*
* @return The indices attached to the specified object.
*/
public java.util.List<IndexAttachment> getIndexAttachments() {
return indexAttachments;
}
/**
* <p>
* The indices attached to the specified object.
* </p>
*
* @param indexAttachments
* The indices attached to the specified object.
*/
public void setIndexAttachments(java.util.Collection<IndexAttachment> indexAttachments) {
if (indexAttachments == null) {
this.indexAttachments = null;
return;
}
this.indexAttachments = new java.util.ArrayList<IndexAttachment>(indexAttachments);
}
/**
* <p>
* The indices attached to the specified object.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setIndexAttachments(java.util.Collection)} or {@link #withIndexAttachments(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param indexAttachments
* The indices attached to the specified object.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAttachedIndicesResult withIndexAttachments(IndexAttachment... indexAttachments) {
if (this.indexAttachments == null) {
setIndexAttachments(new java.util.ArrayList<IndexAttachment>(indexAttachments.length));
}
for (IndexAttachment ele : indexAttachments) {
this.indexAttachments.add(ele);
}
return this;
}
/**
* <p>
* The indices attached to the specified object.
* </p>
*
* @param indexAttachments
* The indices attached to the specified object.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAttachedIndicesResult withIndexAttachments(java.util.Collection<IndexAttachment> indexAttachments) {
setIndexAttachments(indexAttachments);
return this;
}
/**
* <p>
* The pagination token.
* </p>
*
* @param nextToken
* The pagination token.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The pagination token.
* </p>
*
* @return The pagination token.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The pagination token.
* </p>
*
* @param nextToken
* The pagination token.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAttachedIndicesResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getIndexAttachments() != null)
sb.append("IndexAttachments: ").append(getIndexAttachments()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListAttachedIndicesResult == false)
return false;
ListAttachedIndicesResult other = (ListAttachedIndicesResult) obj;
if (other.getIndexAttachments() == null ^ this.getIndexAttachments() == null)
return false;
if (other.getIndexAttachments() != null && other.getIndexAttachments().equals(this.getIndexAttachments()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getIndexAttachments() == null) ? 0 : getIndexAttachments().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListAttachedIndicesResult clone() {
try {
return (ListAttachedIndicesResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
package org.apache.commons.fileupload.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.commons.fileupload.util.mime.MimeUtility;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
/**
* A simple parser intended to parse sequences of name/value pairs.
* <p>
* Parameter values are expected to be enclosed in quotes if they
* contain unsafe characters, such as '=' characters or separators.
* Parameter values are optional and can be omitted.
* </p>
* <p>
* <code>param1 = value; param2 = "anything goes; really"; param3</code>
* </p>
*/
public class ParameterParser {
/**
* String to be parsed.
*/
private char[] chars = null;
/**
* Current position in the string.
*/
private int pos = 0;
/**
* Maximum position in the string.
*/
private int len = 0;
/**
* Start of a token.
*/
private int i1 = 0;
/**
* End of a token.
*/
private int i2 = 0;
/**
* Whether names stored in the map should be converted to lower case.
*/
private boolean lowerCaseNames = false;
/**
* Are there any characters left to parse?
*
* @return {@code true} if there are unparsed characters,
* {@code false} otherwise.
*/
private boolean hasChar() {
return this.pos < this.len;
}
/**
* A helper method to process the parsed token. This method removes
* leading and trailing blanks as well as enclosing quotation marks,
* when necessary.
*
* @param quoted {@code true} if quotation marks are expected,
* {@code false} otherwise.
* @return the token
*/
private String getToken(boolean quoted) {
// Trim leading white spaces
while ((i1 < i2) && (Character.isWhitespace(chars[i1]))) {
i1++;
}
// Trim trailing white spaces
while ((i2 > i1) && (Character.isWhitespace(chars[i2 - 1]))) {
i2--;
}
// Strip away quotation marks if necessary
if (quoted
&& ((i2 - i1) >= 2)
&& (chars[i1] == '"')
&& (chars[i2 - 1] == '"')) {
i1++;
i2--;
}
String result = null;
if (i2 > i1) {
result = new String(chars, i1, i2 - i1);
}
return result;
}
/**
* Tests if the given character is present in the array of characters.
*
* @param ch the character to test for presence in the array of characters
* @param charray the array of characters to test against
* @return {@code true} if the character is present in the array of
* characters, {@code false} otherwise.
*/
private boolean isOneOf(char ch, final char[] charray) {
boolean result = false;
for (char element : charray) {
if (ch == element) {
result = true;
break;
}
}
return result;
}
/**
* Parses out a token until any of the given terminators
* is encountered.
*
* @param terminators the array of terminating characters. Any of these
* characters when encountered signify the end of the token
* @return the token
*/
private String parseToken(final char[] terminators) {
char ch;
i1 = pos;
i2 = pos;
while (hasChar()) {
ch = chars[pos];
if (isOneOf(ch, terminators)) {
break;
}
i2++;
pos++;
}
return getToken(false);
}
/**
* Parses out a token until any of the given terminators
* is encountered outside the quotation marks.
*
* @param terminators the array of terminating characters. Any of these
* characters when encountered outside the quotation marks signify the end
* of the token
* @return the token
*/
private String parseQuotedToken(final char[] terminators) {
char ch;
i1 = pos;
i2 = pos;
boolean quoted = false;
boolean charEscaped = false;
while (hasChar()) {
ch = chars[pos];
if (!quoted && isOneOf(ch, terminators)) {
break;
}
if (!charEscaped && ch == '"') {
quoted = !quoted;
}
charEscaped = (!charEscaped && ch == '\\');
i2++;
pos++;
}
return getToken(true);
}
/**
* Sets the flag if parameter names are to be converted to lower case when
* name/value pairs are parsed.
*/
public void setLowerCaseNames() {
this.lowerCaseNames = true;
}
/**
* Extracts a map of name/value pairs from the given string. Names are
* expected to be unique. Multiple separators may be specified and
* the earliest found in the input string is used.
*
* @param str the string that contains a sequence of name/value pairs
* @param separators the name/value pairs separators
* @return a map of name/value pairs
*/
public Map<String, String> parse(final String str, char[] separators) {
if (separators == null || separators.length == 0) return new HashMap<>();
char separator = separators[0];
if (str != null) {
int idx = str.length();
for (char separator2 : separators) {
int tmp = str.indexOf(separator2);
if (tmp > -1 && tmp < idx) {
idx = tmp;
separator = separator2;
}
}
}
return parse(str, separator);
}
/**
* Extracts a map of name/value pairs from the given string. Names are
* expected to be unique.
*
* @param str the string that contains a sequence of name/value pairs
* @param separator the name/value pairs separator
* @return a map of name/value pairs
*/
public Map<String, String> parse(final String str, char separator) {
if (str == null) return new HashMap<>();
return parse(str.toCharArray(), separator);
}
/**
* Extracts a map of name/value pairs from the given array of
* characters. Names are expected to be unique.
*
* @param charArray the array of characters that contains a sequence of
* name/value pairs
* @param separator the name/value pairs separator
* @return a map of name/value pairs
*/
private Map<String, String> parse(final char[] charArray, char separator) {
if (charArray == null) return new HashMap<>();
return parse(charArray, charArray.length, separator);
}
/**
* Extracts a map of name/value pairs from the given array of
* characters. Names are expected to be unique.
*
* @param charArray the array of characters that contains a sequence of
* name/value pairs
* @param length - the length.
* @param separator the name/value pairs separator
* @return a map of name/value pairs
*/
private Map<String, String> parse(
final char[] charArray,
int length,
char separator) {
if (charArray == null) {
return new HashMap<>();
}
HashMap<String, String> params = new HashMap<>();
this.chars = charArray;
this.pos = 0;
this.len = length;
while (hasChar()) {
String paramName = parseToken(new char[]{'=', separator});
String paramValue = null;
if (hasChar() && (charArray[pos] == '=')) {
pos++; // skip '='
paramValue = parseQuotedToken(new char[]{separator});
if (paramValue != null)
try {
paramValue = MimeUtility.decodeText(paramValue);
} catch (UnsupportedEncodingException e) {
// let's keep the original value in this case
}
}
if (hasChar() && (charArray[pos] == separator)) {
pos++; // skip separator
}
if ((paramName != null) && (paramName.length() > 0)) {
if (this.lowerCaseNames) paramName = paramName.toLowerCase(Locale.ENGLISH);
params.put(paramName, paramValue);
}
}
return params;
}
}
| |
package org.nd4j.jita.allocator.impl;
import lombok.Getter;
import lombok.NonNull;
import org.apache.commons.lang3.RandomUtils;
import org.bytedeco.javacpp.Pointer;
import org.nd4j.jita.allocator.Allocator;
import org.nd4j.jita.allocator.context.ContextPool;
import org.nd4j.jita.allocator.context.ExternalContext;
import org.nd4j.jita.allocator.enums.Aggressiveness;
import org.nd4j.jita.allocator.enums.AllocationStatus;
import org.nd4j.jita.allocator.garbage.GarbageReference;
import org.nd4j.jita.allocator.pointers.PointersPair;
import org.nd4j.jita.allocator.time.Ring;
import org.nd4j.jita.allocator.time.rings.LockedRing;
import org.nd4j.jita.allocator.utils.AllocationUtils;
import org.nd4j.jita.conf.Configuration;
import org.nd4j.jita.conf.CudaEnvironment;
import org.nd4j.linalg.cache.ConstantHandler;
import org.nd4j.jita.constant.CudaConstantHandler;
import org.nd4j.jita.flow.FlowController;
import org.nd4j.jita.handler.MemoryHandler;
import org.nd4j.jita.handler.impl.CudaZeroHandler;
import org.nd4j.linalg.api.buffer.BaseDataBuffer;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.jcublas.buffer.CudaIntDataBuffer;
import org.nd4j.linalg.jcublas.context.CudaContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.ref.ReferenceQueue;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* Just-in-Time Allocator for CUDA
*
* This method is a basement for pre-allocated memory management for cuda.
* Basically that's sophisticated garbage collector for both zero-copy memory, and multiple device memory.
*
* There's multiple possible data movement directions, but general path is:
* host memory (issued on JVM side) ->
* zero-copy pinned memory (which is allocated for everything out there) ->
* device memory (where data gets moved from zero-copy, if used actively enough)
*
* And the backward movement, if memory isn't used anymore (like if originating INDArray was trashed by JVM GC), or it's not popular enough to hold in device memory
*
* Mechanism is as lock-free, as possible. This achieved using three-state memory state signalling: Tick/Tack/Toe.
* Tick: memory chunk (or its part) is accessed on on device
* Tack: memory chink (or its part) device access session was finished
* Toe: memory chunk is locked for some reason. Possible reasons:
* Memory synchronization is ongoing, host->gpu or gpu->host
* Memory relocation is ongoing, zero->gpu, or gpu->zero, or gpu->host
* Memory removal is ongoing.
*
* So, basically memory being used for internal calculations, not interfered with manual changes (aka putRow etc), are always available without locks
*
* // TODO: compare, if referenceQueue-based garbage collection would be more efficient
* @author raver119@gmail.com
*/
public class AtomicAllocator implements Allocator {
private static final AtomicAllocator INSTANCE = new AtomicAllocator();
private Configuration configuration = CudaEnvironment.getInstance().getConfiguration();
@Getter private transient MemoryHandler memoryHandler;
private AtomicLong allocationsCounter = new AtomicLong(0);
private AtomicLong objectsTracker = new AtomicLong(0);
// we have single tracking point for allocation points, since we're not going to cycle through it it any time soon
private Map<Long, AllocationPoint> allocationsMap = new ConcurrentHashMap<>();
private static Logger log = LoggerFactory.getLogger(AtomicAllocator.class);
/*
locks for internal resources
*/
private ReentrantReadWriteLock globalLock = new ReentrantReadWriteLock();
private ReentrantReadWriteLock externalsLock = new ReentrantReadWriteLock();
/*
here we have handles for garbage collector threads
ThreadId, GarbageCollector
*/
private Map<Long, ZeroGarbageCollectorThread> collectorsZero = new ConcurrentHashMap<>();
private Map<Integer, DeviceGarbageCollectorThread> collectorsDevice = new ConcurrentHashMap<>();
private Map<Integer, UnifiedGarbageCollectorThread> collectorsUnified = new ConcurrentHashMap<>();
private final AtomicBoolean shouldStop = new AtomicBoolean(false);
private final AtomicBoolean wasInitialised = new AtomicBoolean(false);
private final Ring deviceLong = new LockedRing(30);
private final Ring deviceShort = new LockedRing(30);
private final Ring zeroLong = new LockedRing(30);
private final Ring zeroShort = new LockedRing(30);
private final Map<Integer, ReferenceQueue<BaseDataBuffer>> queueMap = new ConcurrentHashMap<>();
private ConstantHandler constantHandler = new CudaConstantHandler();
public static AtomicAllocator getInstance() {
return INSTANCE;
}
protected AtomicAllocator() {
this.memoryHandler = new CudaZeroHandler();
this.memoryHandler.init(configuration, this);
initDeviceCollectors();
initHostCollectors();
}
/**
* This method executes preconfigured number of host memory garbage collectors
*/
protected void initHostCollectors() {
for (int i = 0; i < configuration.getNumberOfGcThreads(); i++) {
ReferenceQueue<BaseDataBuffer> queue = new ReferenceQueue<>();
UnifiedGarbageCollectorThread uThread = new UnifiedGarbageCollectorThread(i, queue);
queueMap.put(i, queue);
uThread.start();
collectorsUnified.put(i, uThread);
/*
ZeroGarbageCollectorThread zThread = new ZeroGarbageCollectorThread((long) i, shouldStop);
zThread.start();
collectorsZero.put((long) i, zThread);
*/
}
}
/**
* This method executes garbage collectors for each special device (i.e. CUDA GPUs) present in system
*/
protected void initDeviceCollectors() {
/*
for (Integer deviceId : this.memoryHandler.getAvailableDevices()) {
DeviceGarbageCollectorThread dThread = new DeviceGarbageCollectorThread(deviceId, shouldStop);
dThread.start();
collectorsDevice.put(deviceId, dThread);
}
*/
}
/**
* This method returns CudaContext for current thread
*
* @return
*/
@Override
public ExternalContext getDeviceContext() {
// FIXME: proper lock avoidance required here
return memoryHandler.getDeviceContext();
}
/**
* This method specifies Mover implementation to be used internally
* @param memoryHandler
*/
public void setMemoryHandler(@NonNull MemoryHandler memoryHandler) {
globalLock.writeLock().lock();
this.memoryHandler = memoryHandler;
this.memoryHandler.init(configuration, this);
globalLock.writeLock().unlock();
}
/**
* Consume and apply configuration passed in as argument
*
* PLEASE NOTE: This method should only be used BEFORE any calculations were started.
*
* @param configuration configuration bean to be applied
*/
@Override
public void applyConfiguration(@NonNull Configuration configuration) {
if (!wasInitialised.get()) {
globalLock.writeLock().lock();
this.configuration = configuration;
globalLock.writeLock().unlock();
}
}
/**
* Returns current Allocator configuration
*
* @return current configuration
*/
@Override
public Configuration getConfiguration() {
try {
globalLock.readLock().lock();
return configuration;
} finally {
globalLock.readLock().unlock();
}
}
/**
* This method returns actual device pointer valid for current object
*
* @param buffer
*/
@Override
public Pointer getPointer(DataBuffer buffer, CudaContext context) {
return memoryHandler.getDevicePointer(buffer, context);
}
/**
* This method returns actual device pointer valid for specified shape of current object
*
* @param buffer
* @param shape
* @param isView
*/
@Override
@Deprecated
public Pointer getPointer(DataBuffer buffer, AllocationShape shape, boolean isView, CudaContext context) {
return memoryHandler.getDevicePointer(buffer, context);
}
/**
* This method returns actual device pointer valid for specified INDArray
*
* @param array
*/
@Override
public Pointer getPointer(INDArray array, CudaContext context) {
// DataBuffer buffer = array.data().originalDataBuffer() == null ? array.data() : array.data().originalDataBuffer();
return memoryHandler.getDevicePointer(array.data(), context);
}
/**
* This method returns actual host pointer valid for current object
*
* @param array
*/
@Override
public Pointer getHostPointer(INDArray array) {
synchronizeHostData(array);
return memoryHandler.getHostPointer(array.data());
}
/**
* This method returns actual host pointer valid for current object
*
* @param buffer
*/
@Override
public Pointer getHostPointer(DataBuffer buffer) {
return memoryHandler.getHostPointer(buffer);
}
/**
* This method should be called to make sure that data on host side is actualized
*
* @param array
*/
@Override
public void synchronizeHostData(INDArray array) {
DataBuffer buffer = array.data().originalDataBuffer() == null ? array.data() : array.data().originalDataBuffer();
synchronizeHostData(buffer);
}
/**
* This method should be called to make sure that data on host side is actualized
*
* @param buffer
*/
@Override
public void synchronizeHostData(DataBuffer buffer) {
// we don't synchronize constant buffers, since we assume they are always valid on host side
if (buffer.isConstant()) {
//log.info("Skipping synchronization due to constant. " + AllocationUtils.buildAllocationShape(buffer));
// log.info("Constant buffer: " + Arrays.toString(buffer.asFloat()));
return;
//AllocationPoint point = getAllocationPoint(buffer.getTrackingPoint());
//log.info("Constant Buffer readiness: {}",point.isActualOnHostSide());
}
// we actually need synchronization only in device-dependant environment. no-op otherwise
if (memoryHandler.isDeviceDependant()) {
AllocationPoint point = getAllocationPoint(buffer.getTrackingPoint());
if (point == null)
throw new RuntimeException("AllocationPoint is NULL");
memoryHandler.synchronizeThreadDevice(Thread.currentThread().getId(), memoryHandler.getDeviceId(), point);
}
}
/**
* This method returns CUDA deviceId for specified buffer
*
* @param array
* @return
*/
public Integer getDeviceId(INDArray array) {
return memoryHandler.getDeviceId();
}
/**
* This method allocates required chunk of memory
*
* @param requiredMemory
*/
@Override
public AllocationPoint allocateMemory(DataBuffer buffer,AllocationShape requiredMemory) {
// by default we allocate on initial location
AllocationPoint point = null;
// TODO: size limitation should be rised in final release to something more sensible
// if (buffer instanceof CudaIntDataBuffer || AllocationUtils.getRequiredMemory(requiredMemory) / requiredMemory.getLength() <= 2) {
// point = allocateMemory(buffer, requiredMemory, AllocationStatus.HOST);
// } else {
point = allocateMemory(buffer, requiredMemory, memoryHandler.getInitialLocation());
// }
return point;
}
/**
* This method allocates required chunk of memory in specific location
* <p>
* PLEASE NOTE: Do not use this method, unless you're 100% sure what you're doing
*
* @param requiredMemory
* @param location
*/
@Override
public AllocationPoint allocateMemory(DataBuffer buffer,AllocationShape requiredMemory, AllocationStatus location) {
AllocationPoint point = new AllocationPoint();
// we use these longs as tracking codes for memory tracking
Long allocId = objectsTracker.getAndIncrement();
//point.attachBuffer(buffer);
point.setObjectId(allocId);
point.setShape(requiredMemory);
if (buffer instanceof CudaIntDataBuffer) {
buffer.setConstant(true);
point.setConstant(true);
}
int numBuckets = configuration.getNumberOfGcThreads();
int bucketId = RandomUtils.nextInt(0, numBuckets);
GarbageReference reference = new GarbageReference((BaseDataBuffer) buffer, queueMap.get(bucketId), point);
point.attachReference(reference);
point.setDeviceId(getDeviceId());
// we stay naive on PointersPair, we just don't know on this level, which pointers are set. MemoryHandler will be used for that
PointersPair pair = memoryHandler.alloc(location, point, requiredMemory);
point.setPointers(pair);
allocationsMap.put(allocId, point);
return point;
}
/**
* This method returns AllocationPoint POJO for specified tracking ID
* @param objectId
* @return
*/
protected AllocationPoint getAllocationPoint(Long objectId) {
return allocationsMap.get(objectId);
}
/**
* This method frees native system memory referenced by specified tracking id/AllocationPoint
*
* @param bucketId
* @param objectId
* @param point
* @param copyback
*/
protected void purgeZeroObject(Long bucketId, Long objectId, AllocationPoint point, boolean copyback) {
allocationsMap.remove(objectId);
memoryHandler.purgeZeroObject(bucketId, objectId, point, copyback);
}
/**
* This method frees native device memory referenced by specified tracking id/AllocationPoint
* @param threadId
* @param deviceId
* @param objectId
* @param point
* @param copyback
*/
protected void purgeDeviceObject(Long threadId, Integer deviceId, Long objectId, AllocationPoint point, boolean copyback) {
memoryHandler.purgeDeviceObject(threadId, deviceId, objectId, point, copyback);
// since we can't allow java object without native memory, we explicitly specify that memory is handled using HOST memory only, after device memory is released
//point.setAllocationStatus(AllocationStatus.HOST);
//memoryHandler.purgeZeroObject(point.getBucketId(), point.getObjectId(), point, copyback);
}
/**
* This method seeks for unused zero-copy memory allocations
*
* @param bucketId Id of the bucket, serving allocations
* @return size of memory that was deallocated
*/
protected synchronized long seekUnusedZero(Long bucketId, Aggressiveness aggressiveness) {
AtomicLong freeSpace = new AtomicLong(0);
int totalElements = (int) memoryHandler.getAllocatedHostObjects(bucketId);
// these 2 variables will contain jvm-wise memory access frequencies
float shortAverage = zeroShort.getAverage();
float longAverage = zeroLong.getAverage();
// threshold is calculated based on agressiveness specified via configuration
float shortThreshold = shortAverage / (Aggressiveness.values().length - aggressiveness.ordinal());
float longThreshold = longAverage / (Aggressiveness.values().length - aggressiveness.ordinal());
// simple counter for dereferenced objects
AtomicInteger elementsDropped = new AtomicInteger(0);
AtomicInteger elementsSurvived = new AtomicInteger(0);
for (Long object: memoryHandler.getHostTrackingPoints(bucketId)) {
AllocationPoint point = getAllocationPoint(object);
// point can be null, if memory was promoted to device and was deleted there
if (point == null)
continue;
if (point.getAllocationStatus() == AllocationStatus.HOST) {
//point.getAccessState().isToeAvailable()
//point.getAccessState().requestToe();
/*
Check if memory points to non-existant buffer, using externals.
If externals don't have specified buffer - delete reference.
*/
if (point.getBuffer() == null ) {
purgeZeroObject(bucketId, object, point, false);
freeSpace.addAndGet(AllocationUtils.getRequiredMemory(point.getShape()));
elementsDropped.incrementAndGet();
continue;
} else {
elementsSurvived.incrementAndGet();
}
//point.getAccessState().releaseToe();
} else {
// log.warn("SKIPPING :(");
}
}
//log.debug("Short average: ["+shortAverage+"], Long average: [" + longAverage + "]");
//log.debug("Aggressiveness: ["+ aggressiveness+"]; Short threshold: ["+shortThreshold+"]; Long threshold: [" + longThreshold + "]");
log.debug("Zero {} elements checked: [{}], deleted: {}, survived: {}", bucketId, totalElements, elementsDropped.get(), elementsSurvived.get());
return freeSpace.get();
}
/**
* This method seeks for unused device memory allocations, for specified thread and device
*
* @param threadId Id of the thread, retrieved via Thread.currentThread().getId()
* @param deviceId Id of the device
* @return size of memory that was deallocated
*/
protected long seekUnusedDevice(Long threadId, Integer deviceId, Aggressiveness aggressiveness) {
AtomicLong freeSpace = new AtomicLong(0);
// int initialSize = allocations.size();
// these 2 variables will contain jvm-wise memory access frequencies
float shortAverage = deviceShort.getAverage();
float longAverage = deviceLong.getAverage();
// threshold is calculated based on agressiveness specified via configuration
float shortThreshold = shortAverage / (Aggressiveness.values().length - aggressiveness.ordinal());
float longThreshold = longAverage / (Aggressiveness.values().length - aggressiveness.ordinal());
AtomicInteger elementsDropped = new AtomicInteger(0);
AtomicInteger elementsMoved = new AtomicInteger(0);
AtomicInteger elementsSurvived = new AtomicInteger(0);
for (Long object: memoryHandler.getDeviceTrackingPoints(deviceId)) {
AllocationPoint point = getAllocationPoint(object);
// if (point.getAccessState().isToeAvailable()) {
// point.getAccessState().requestToe();
/*
Check if memory points to non-existant buffer, using externals.
If externals don't have specified buffer - delete reference.
*/
if (point.getBuffer() == null ) {
if (point.getAllocationStatus() == AllocationStatus.DEVICE) {
// we deallocate device memory
purgeDeviceObject(threadId, deviceId, object, point, false);
freeSpace.addAndGet(AllocationUtils.getRequiredMemory(point.getShape()));
// and we deallocate host memory, since object is dereferenced
purgeZeroObject(point.getBucketId(), object, point, false);
elementsDropped.incrementAndGet();
continue;
};
} else {
elementsSurvived.incrementAndGet();
}
/*
Check, if memory can be removed from allocation.
To check it, we just compare average rates for few tens of latest calls
*/
/*
long millisecondsTTL = configuration.getMinimumTTLMilliseconds();
if (point.getRealDeviceAccessTime() < System.currentTimeMillis() - millisecondsTTL) {
// we could remove device allocation ONLY if it's older then minimum TTL
if (point.getTimerLong().getFrequencyOfEvents() < longThreshold && point.getTimerShort().getFrequencyOfEvents() < shortThreshold) {
//log.info("Removing object: " + object);
purgeDeviceObject(threadId, deviceId, object, point, true);
freeSpace.addAndGet(AllocationUtils.getRequiredMemory(point.getShape()));
elementsMoved.incrementAndGet();
//purgeDeviceObject(threadId, deviceId, object, point, true);
}
}
*/
// point.getAccessState().releaseToe();
//}
}
log.debug("Thread/Device ["+ threadId+"/"+deviceId+"] elements purged: [" + elementsDropped.get()+"]; Relocated: ["+ elementsMoved.get()+"]; Survivors: ["+elementsSurvived.get()+"]");
return freeSpace.get();
}
private class UnifiedGarbageCollectorThread extends Thread implements Runnable {
private final ReferenceQueue<BaseDataBuffer> queue;
private int threadId;
public UnifiedGarbageCollectorThread(Integer threadId, @NonNull ReferenceQueue<BaseDataBuffer> queue) {
this.queue = queue;
this.setDaemon(true);
this.setName("UniGC thread " + threadId);
this.threadId = threadId;
}
@Override
public void run() {
while (true) {
GarbageReference reference = (GarbageReference) queue.poll();
if (reference != null) {
AllocationPoint point = reference.getPoint();
if (point.getAllocationStatus() == AllocationStatus.HOST) {
purgeZeroObject(point.getBucketId(), point.getObjectId(), point, false);
} else if (point.getAllocationStatus() == AllocationStatus.DEVICE) {
purgeDeviceObject(0L, point.getDeviceId(), point.getObjectId(), point, false);
// and we deallocate host memory, since object is dereferenced
purgeZeroObject(point.getBucketId(), point.getObjectId(), point, false);
}
} else {
try {
if (threadId == 0) {
System.gc();
Thread.sleep(2000);
} else Thread.sleep(500);
} catch (Exception e) {
}
}
}
}
}
/**
* This class implements garbage collector for memory allocated on host system.
*
* There's only 1 possible reason of deallocation event: object that reference some memory chunk was removed by JVM gc.
*/
private class ZeroGarbageCollectorThread extends Thread implements Runnable {
private final Long bucketId;
private final AtomicBoolean terminate;
public ZeroGarbageCollectorThread(Long bucketId, AtomicBoolean terminate) {
this.bucketId = bucketId;
this.terminate = terminate;
this.setName("zero gc thread " + bucketId);
this.setDaemon(true);
}
@Override
public void run() {
log.debug("Starting zero GC for thread: " + bucketId);
long lastCheck = System.currentTimeMillis();
while (!terminate.get()) {
/*
Check for zero-copy garbage
*/
// log.info("ZeroGC started...");
/*
We want allocations to take in account multiple things:
1. average access rates for last X objects
2. total number of currently allocated objects
3. total allocated memory size
4. desired aggressiveness
*/
try {
Thread.sleep(Math.max(configuration.getMinimumTTLMilliseconds(), 10000));
if (bucketId == 0)
System.gc();
} catch (Exception e) {
// we can have interruption here, to force gc
}
Aggressiveness aggressiveness = configuration.getHostDeallocAggressiveness();
// if we have too much objects, or total allocated memory has met 75% of max allocation - use urgent mode
if ((memoryHandler.getAllocatedHostObjects(bucketId) > 500000 || memoryHandler.getAllocatedHostMemory() > (configuration.getMaximumZeroAllocation() * 0.75)) && aggressiveness.ordinal() < Aggressiveness.URGENT.ordinal())
aggressiveness = Aggressiveness.URGENT;
if (memoryHandler.getAllocatedHostMemory()> (configuration.getMaximumZeroAllocation() * 0.85))
aggressiveness = Aggressiveness.IMMEDIATE;
if (memoryHandler.getAllocatedHostMemory() < (configuration.getMaximumZeroAllocation() * 0.25) && (memoryHandler.getAllocatedHostObjects(bucketId) < 5000) && lastCheck > System.currentTimeMillis() - 30000) {
; // i don't want deallocation to be fired on lower thresholds. just no sense locking stuff
//log.debug("Skipping zero GC round: ["+zeroUseCounter.get()+"/" +zeroAllocations.get(threadId).size() + "]");
} else {
seekUnusedZero(bucketId, aggressiveness);
lastCheck = System.currentTimeMillis();
}
}
}
}
/**
* This class implements garbage collection for memory regions allocated on devices.
* For each device 1 thread is launched.
*
* There's 2 basic reasons for deallocation:
* 1. Memory isn't used anymore. I.e. INDArray object referencing specific memory chunk was removed by JVM gc.
* 2. Memory wasn't used for quite some time.
*/
private class DeviceGarbageCollectorThread extends Thread implements Runnable {
private final Integer deviceId;
private final AtomicBoolean terminate;
public DeviceGarbageCollectorThread(Integer deviceId, AtomicBoolean terminate) {
this.deviceId = deviceId;
this.terminate = terminate;
this.setName("device gc thread ["+ deviceId +"]");
this.setDaemon(true);
}
@Override
public void run() {
log.info("Starting device GC for device: " + deviceId);
long lastCheck = System.currentTimeMillis();
while (!terminate.get()) {
/*
Check for device garbage
*/
try {
Thread.sleep(Math.max(configuration.getMinimumTTLMilliseconds(), 5000));
} catch (Exception e) {
// we can have interruption here, to force gc
}
//log.info("DeviceGC started...");
Aggressiveness aggressiveness = configuration.getGpuDeallocAggressiveness();
// if we have too much objects, or total allocated memory has met 75% of max allocation - use urgent mode
if ((memoryHandler.getAllocatedDeviceObjects(deviceId) > 100000 || memoryHandler.getAllocatedDeviceMemory(deviceId)> (configuration.getMaximumDeviceAllocation() * 0.75)) && aggressiveness.ordinal() < Aggressiveness.URGENT.ordinal())
aggressiveness = Aggressiveness.URGENT;
if (memoryHandler.getAllocatedDeviceMemory(deviceId) > (configuration.getMaximumDeviceAllocation() * 0.85))
aggressiveness = Aggressiveness.IMMEDIATE;
if (memoryHandler.getAllocatedDeviceMemory(deviceId)< (configuration.getMaximumDeviceAllocation() * 0.25) && (memoryHandler.getAllocatedDeviceObjects(deviceId) < 500) && lastCheck > System.currentTimeMillis() - 30000) {
// i don't want deallocation to be fired on lower thresholds. just no sense locking stuff
} else {
seekUnusedDevice(0L, this.deviceId, aggressiveness);
lastCheck = System.currentTimeMillis();
}
}
}
}
/**
* This method returns the number of tracked zero-copy allocations
*
* @return
*/
public long getTotalAllocatedHostMemory() {
return 0L; // memoryHandler.getAllocationStatistics().row(AllocationStatus.HOST).get(0);
}
/**
* This method returns the number of all tracked memory chunks
*
* @return
*/
protected int getTotalTrackingPoints() {
return allocationsMap.size();
}
/**
* This method returns total amount of memory allocated on specified device
*
* @param deviceId
* @return
*/
public long getTotalAllocatedDeviceMemory(Integer deviceId) {
return 0L;//; memoryHandler.getAllocationStatistics().row(AllocationStatus.DEVICE).get(deviceId);
}
/**
* This method implements asynchronous memcpy, if that's available on current hardware
*
* @param dstBuffer
* @param srcPointer
* @param length
* @param dstOffset
*/
@Override
public void memcpyAsync(DataBuffer dstBuffer, Pointer srcPointer, long length, long dstOffset) {
// if (dstBuffer.isConstant()) {
// this.memoryHandler.memcpySpecial(dstBuffer, srcPointer, length, dstOffset);
// } else
this.memoryHandler.memcpyAsync(dstBuffer, srcPointer, length, dstOffset);
}
@Override
public void memcpySpecial(DataBuffer dstBuffer, Pointer srcPointer, long length, long dstOffset) {
this.memoryHandler.memcpySpecial(dstBuffer, srcPointer, length, dstOffset);
}
@Override
public void memcpyDevice(DataBuffer dstBuffer, Pointer srcPointer, long length, long dstOffset, CudaContext context) {
this.memoryHandler.memcpyDevice(dstBuffer, srcPointer, length, dstOffset, context);
}
/**
* This method implements blocking memcpy
*
* @param dstBuffer
* @param srcPointer
* @param length
* @param dstOffset
*/
@Override
public void memcpyBlocking(DataBuffer dstBuffer, Pointer srcPointer, long length, long dstOffset) {
this.memoryHandler.memcpyBlocking(dstBuffer, srcPointer, length, dstOffset);
}
/**
* This method implements blocking memcpy
*
* @param dstBuffer
* @param srcBuffer
*/
@Override
public void memcpy(DataBuffer dstBuffer, DataBuffer srcBuffer) {
this.memoryHandler.memcpy(dstBuffer, srcBuffer);
}
/**
* This method returns deviceId for current thread
* All values >= 0 are considered valid device IDs, all values < 0 are considered stubs.
*
* @return
*/
@Override
public Integer getDeviceId() {
return memoryHandler.getDeviceId();
}
@Override
public void tickHostWrite(DataBuffer buffer) {
AllocationPoint point = getAllocationPoint(buffer.getTrackingPoint());
point.tickHostWrite();
}
@Override
public void tickHostWrite(INDArray array) {
DataBuffer buffer = array.data().originalDataBuffer() == null ? array.data() : array.data().originalDataBuffer();
tickHostWrite(buffer);
}
@Override
public void tickDeviceWrite(INDArray array) {
DataBuffer buffer = array.data().originalDataBuffer() == null ? array.data() : array.data().originalDataBuffer();
AllocationPoint point = getAllocationPoint(buffer.getTrackingPoint());
point.tickDeviceWrite();
}
@Override
public AllocationPoint getAllocationPoint(INDArray array) {
DataBuffer buffer = array.data().originalDataBuffer() == null ? array.data() : array.data().originalDataBuffer();
return getAllocationPoint(buffer);
}
@Override
public AllocationPoint getAllocationPoint(DataBuffer buffer) {
return getAllocationPoint(buffer.getTrackingPoint());
}
@Override
public void registerAction(CudaContext context, INDArray result, INDArray... operands) {
memoryHandler.registerAction(context, result, operands);
}
@Override
public FlowController getFlowController() {
return memoryHandler.getFlowController();
}
@Override
public ContextPool getContextPool() {
return memoryHandler.getContextPool();
}
@Override
public DataBuffer getConstantBuffer(int[] array) {
return constantHandler.getConstantBuffer(array);
}
@Override
public DataBuffer getConstantBuffer(float[] array) {
return constantHandler.getConstantBuffer(array);
}
@Override
public DataBuffer getConstantBuffer(double[] array) {
return constantHandler.getConstantBuffer(array);
}
@Override
public DataBuffer moveToConstant(DataBuffer dataBuffer) {
constantHandler.moveToConstantSpace(dataBuffer);
return dataBuffer;
}
}
| |
package com.braintreegateway.integrationtest;
import com.braintreegateway.*;
import com.braintreegateway.testhelpers.TestHelper;
import org.junit.Before;
import org.junit.Test;
import java.util.*;
import java.net.URL;
import static org.junit.Assert.*;
public class OAuthIT {
private BraintreeGateway gateway;
@Before
public void createGateway() {
this.gateway = new BraintreeGateway("client_id$development$integration_client_id", "client_secret$development$integration_client_secret");
}
@Test
public void createTokenFromCodeReturnsOAuthCredentials() {
String code = TestHelper.createOAuthGrant(gateway, "integration_merchant_id", "read_write");
OAuthCredentialsRequest oauthCredentials = new OAuthCredentialsRequest().
code(code).
scope("read_write");
Result<OAuthCredentials> result = gateway.oauth().createTokenFromCode(oauthCredentials);
assertTrue(result.isSuccess());
assertTrue(result.getTarget().getAccessToken().startsWith("access_token"));
assertTrue(result.getTarget().getExpiresAt().after(Calendar.getInstance()));
assertTrue(result.getTarget().getRefreshToken().startsWith("refresh_token"));
assertEquals("bearer", result.getTarget().getTokenType());
}
@Test
public void createTokenFromBadCodeReturnsOAuthCredentials() {
OAuthCredentialsRequest oauthCredentials = new OAuthCredentialsRequest().
code("bad_code").
scope("read_write");
Result<OAuthCredentials> result = gateway.oauth().createTokenFromCode(oauthCredentials);
ValidationErrors errors = result.getErrors();
assertFalse(result.isSuccess());
assertEquals(ValidationErrorCode.OAUTH_INVALID_GRANT, errors.forObject("credentials").onField("code").get(0).getCode());
assertEquals("Invalid grant: code not found", errors.forObject("credentials").onField("code").get(0).getMessage());
}
@Test
public void createTokenFromRefreshToken() {
String code = TestHelper.createOAuthGrant(gateway, "integration_merchant_id", "read_write");
OAuthCredentialsRequest oauthCredentials = new OAuthCredentialsRequest().
code(code).
scope("read_write");
Result<OAuthCredentials> result = gateway.oauth().createTokenFromCode(oauthCredentials);
OAuthCredentialsRequest refreshTokenRequest = new OAuthCredentialsRequest().
refreshToken(result.getTarget().getRefreshToken()).
scope("read_write");
Result<OAuthCredentials> refreshTokenResult = gateway.oauth().createTokenFromRefreshToken(refreshTokenRequest);
assertTrue(refreshTokenResult.isSuccess());
assertNotNull(refreshTokenResult.getTarget().getAccessToken());
assertNotNull(refreshTokenResult.getTarget().getRefreshToken());
assertNotNull(refreshTokenResult.getTarget().getExpiresAt());
assertEquals("bearer", refreshTokenResult.getTarget().getTokenType());
}
@Test
public void connectUrlReturnsCorrectUrl() {
OAuthConnectUrlRequest request = new OAuthConnectUrlRequest().
merchantId("integration_merchant_id").
redirectUri("http://bar.example.com").
scope("read_write").
state("baz_state").
user().
country("USA").
email("foo@example.com").
firstName("Bob").
lastName("Jones").
phone("555-555-5555").
dobYear("1970").
dobMonth("01").
dobDay("01").
streetAddress("222 W Merchandise Mart").
locality("Chicago").
region("IL").
postalCode("60606").
done().
business().
name("14 Ladders").
registeredAs("14.0 Ladders").
industry("Ladders").
description("We sell the best ladders").
streetAddress("111 N Canal").
locality("Chicago").
region("IL").
postalCode("60606").
country("USA").
annualVolumeAmount("1000000").
averageTransactionAmount("100").
maximumTransactionAmount("10000").
shipPhysicalGoods(true).
fulfillmentCompletedIn(7).
currency("USD").
website("http://example.com").
done();
String urlString = gateway.oauth().connectUrl(request);
URL url;
try {
url = new URL(urlString);
assertEquals("localhost", url.getHost());
assertEquals("/oauth/connect", url.getPath());
Map<String, String> query = TestHelper.splitQuery(url);
assertEquals("integration_merchant_id", query.get("merchant_id"));
assertEquals("client_id$development$integration_client_id", query.get("client_id"));
assertEquals("http://bar.example.com", query.get("redirect_uri"));
assertEquals("read_write", query.get("scope"));
assertEquals("baz_state", query.get("state"));
assertEquals("USA", query.get("user[country]"));
assertEquals("USA", query.get("user[country]"));
assertEquals("foo@example.com", query.get("user[email]"));
assertEquals("Bob", query.get("user[first_name]"));
assertEquals("Jones", query.get("user[last_name]"));
assertEquals("555-555-5555", query.get("user[phone]"));
assertEquals("1970", query.get("user[dob_year]"));
assertEquals("01", query.get("user[dob_month]"));
assertEquals("01", query.get("user[dob_day]"));
assertEquals("222 W Merchandise Mart", query.get("user[street_address]"));
assertEquals("Chicago", query.get("user[locality]"));
assertEquals("IL", query.get("user[region]"));
assertEquals("60606", query.get("user[postal_code]"));
assertEquals("14 Ladders", query.get("business[name]"));
assertEquals("14.0 Ladders", query.get("business[registered_as]"));
assertEquals("Ladders", query.get("business[industry]"));
assertEquals("We sell the best ladders", query.get("business[description]"));
assertEquals("111 N Canal", query.get("business[street_address]"));
assertEquals("Chicago", query.get("business[locality]"));
assertEquals("IL", query.get("business[region]"));
assertEquals("60606", query.get("business[postal_code]"));
assertEquals("USA", query.get("business[country]"));
assertEquals("1000000", query.get("business[annual_volume_amount]"));
assertEquals("100", query.get("business[average_transaction_amount]"));
assertEquals("10000", query.get("business[maximum_transaction_amount]"));
assertEquals("true", query.get("business[ship_physical_goods]"));
assertEquals("7", query.get("business[fulfillment_completed_in]"));
assertEquals("USD", query.get("business[currency]"));
assertEquals("http://example.com", query.get("business[website]"));
assertEquals(64, query.get("signature").length());
assertTrue(query.get("signature").matches("^[a-f0-9]+$"));
assertEquals("SHA256", query.get("algorithm"));
} catch (java.io.UnsupportedEncodingException e) {
fail("unsupported encoding");
} catch (java.net.MalformedURLException e) {
fail("malformed url");
}
}
@Test
public void connectUrlReturnsCorrectUrlWithoutOptionalParams() {
OAuthConnectUrlRequest request = new OAuthConnectUrlRequest();
String urlString = gateway.oauth().connectUrl(request);
URL url;
try {
url = new URL(urlString);
Map<String, String> query = TestHelper.splitQuery(url);
assertNull(query.get("redirect_uri"));
} catch (java.io.UnsupportedEncodingException e) {
fail("unsupported encoding");
} catch (java.net.MalformedURLException e) {
fail("malformed url");
}
}
@Test
public void connectUrlReturnsCorrectPaymentMethods() {
OAuthConnectUrlRequest request = new OAuthConnectUrlRequest().
paymentMethods(new String[] {"credit_card", "paypal"});
String urlString = gateway.oauth().connectUrl(request);
URL url;
try {
url = new URL(urlString);
Map<String, String> query = TestHelper.splitQuery(url);
assertNull(query.get("redirect_uri"));
assertEquals("credit_card, paypal", query.get("payment_methods[]"));
} catch (java.io.UnsupportedEncodingException e) {
fail("unsupported encoding");
} catch (java.net.MalformedURLException e) {
fail("malformed url");
}
}
@Test
public void computeSignatureReturnsCorrectSignature() {
String url = "http://localhost:3000/oauth/connect?business%5Bname%5D=We+Like+Spaces&client_id=client_id%24development%24integration_client_id";
String signature = gateway.oauth().computeSignature(url);
assertEquals("a36bcf10dd982e2e47e0d6a2cb930aea47ade73f954b7d59c58dae6167894d41", signature);
}
}
| |
package robertbosch.middleware.benchmarking;
import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.UUID;
//import java.io.IOException;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeoutException;
import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken;
import org.eclipse.paho.client.mqttv3.MqttCallback;
import org.eclipse.paho.client.mqttv3.MqttClient;
import org.eclipse.paho.client.mqttv3.MqttConnectOptions;
import org.eclipse.paho.client.mqttv3.MqttException;
import org.eclipse.paho.client.mqttv3.MqttMessage;
import org.json.simple.JSONObject;
import com.google.protobuf.util.JsonFormat;
//import com.protoTest.smartcity.Pollut;
//import com.protoTest.smartcity.Sensed;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.ConnectionFactory;
import robertbosch.utils.RobertBoschUtils;
public class SmartcityDataSimulator implements MqttCallback {
JSONObject data = null;
static Channel channel;
static BufferedWriter publish;
static MqttMessage mqttpub = new MqttMessage();
public String jsonstreetLight() {
boolean[] state = {true, false};
int luxOutput = ThreadLocalRandom.current().nextInt(100, 1001);
int powerconsumption = ThreadLocalRandom.current().nextInt(0, 101);
int casetemperature = ThreadLocalRandom.current().nextInt(1, 101);
int ambientlux = ThreadLocalRandom.current().nextInt(100, 1001);
boolean slaveAlive = state[ThreadLocalRandom.current().nextInt(0,2)];
int batterylevel = ThreadLocalRandom.current().nextInt(0, 5001);
int dataSamplingInstant = ThreadLocalRandom.current().nextInt(10000000, 99999999);
data = new JSONObject();
//data.put("devEUI", "streetlight-" + UUID.randomUUID().toString());
data.put("luxOutput", luxOutput);
data.put("powerConsumption", powerconsumption);
data.put("caseTemperature", casetemperature);
data.put("ambientLux", ambientlux);
data.put("slaveAlive", slaveAlive);
data.put("batteryLevel", batterylevel);
data.put("dataSamplingInstant", dataSamplingInstant);
String msgId = "streetlight-" + UUID.randomUUID().toString();
JSONObject finalobj = new JSONObject();
finalobj.put("msgid", msgId);
finalobj.put("data", data);
String packet = finalobj.toJSONString();
// try {
// MiddlewareAPI.pubwrtr.write(System.currentTimeMillis() + "," + msgId);
// } catch(IOException e) {
// e.printStackTrace();
// }
return packet;
// System.out.println("size of packets: " + packet.getBytes().length);
//
// try {
// String topic = "sahil";
// publish.write(System.currentTimeMillis() + "," + msgId + "\n");
// channel.basicPublish("", topic, null, packet.getBytes());
//
// } catch(IOException e) {
// e.printStackTrace();
// }
}
// private void protostreetlight() throws Exception {
//
// SmartcityDataSimulator simulator = new SmartcityDataSimulator();
// boolean[] state = {true, false};
// int luxOutput = ThreadLocalRandom.current().nextInt(100, 1001);
// int powerconsumption = ThreadLocalRandom.current().nextInt(0, 101);
// int casetemperature = ThreadLocalRandom.current().nextInt(1, 101);
// int ambientlux = ThreadLocalRandom.current().nextInt(100, 1001);
// boolean slaveAlive = state[ThreadLocalRandom.current().nextInt(0,2)];
// int batterylevel = ThreadLocalRandom.current().nextInt(0, 5001);
// int dataSamplingInstant = ThreadLocalRandom.current().nextInt(10000000, 99999999);
//
// Sensed.sensor_values.Builder sensorval = Sensed.sensor_values.newBuilder();
// sensorval.setLuxOutput(luxOutput);
// sensorval.setPowerConsumption(powerconsumption);
// sensorval.setCaseTemperature(casetemperature);
// sensorval.setAmbientLux(ambientlux);
// sensorval.setSlaveAlive(slaveAlive);
// sensorval.setBatteryLevel(batterylevel);
// sensorval.setDataSamplingInstant(dataSamplingInstant);
//
// byte[] snsr = sensorval.build().toByteArray();
// JSONObject ob = new JSONObject();
// String deviceId = UUID.randomUUID().toString();
// ob.put("devEUI", deviceId);
// ob.put("data", snsr);
// String packet = ob.toJSONString();
// System.out.println("size of packets: " + packet.getBytes().length);
//
//// System.out.println(ob.toJSONString());
//// Object o = Sensed.sensor_values.parseFrom(snsr);
//// String packet=JsonFormat.printer().print((Sensed.sensor_values)o);
//// System.out.println("packet is:" + packet);
//
//
//// System.out.println("protostreet light size:" + ob.toJSONString().getBytes().length);
//// simulator.publishToNetworkServer(ob.toJSONString().getBytes());
//
// try {
// String topic = "sahil";
// publish.write(System.currentTimeMillis() + "," + deviceId + "\n");
// channel.basicPublish("", topic, null, packet.getBytes());
//
// } catch(IOException e) {
// e.printStackTrace();
// }
// }
private void protopollution() {
int pm25 = ThreadLocalRandom.current().nextInt(100, 1001);
int pm10 = ThreadLocalRandom.current().nextInt(100, 1001);
int co2 = ThreadLocalRandom.current().nextInt(10, 50);
float noiselevel = ThreadLocalRandom.current().nextInt(0, 100);
// Pollut.pollution.Builder pollutiondata = Pollut.pollution.newBuilder();
// pollutiondata.setPM25(pm25);
// pollutiondata.setPM10(pm10);
// pollutiondata.setCO2(co2);
// pollutiondata.setNOISELEVEL(noiselevel);
//
// byte[] snsr = pollutiondata.build().toByteArray();
// JSONObject ob = new JSONObject();
// ob.put("devEUI", "70b3d58ff0031f00");
// ob.put("data", snsr);
// System.out.println("pollution sensor size: " + ob.toJSONString().getBytes().length);
}
public String jsonenergyMeter() {
double YPhaseReactivePower = ThreadLocalRandom.current().nextDouble(5, 30);
double YPhaseApparentPower = ThreadLocalRandom.current().nextDouble(10, 30);
double YPhaseActivePower = ThreadLocalRandom.current().nextDouble(-10, 10);
double BPhaseVoltage = ThreadLocalRandom.current().nextDouble(100, 400);
double RPhasePowerFactor = ThreadLocalRandom.current().nextDouble(-1, 1);
double BPhaseActivePower = ThreadLocalRandom.current().nextDouble(10, 30);
double EnergyReactive = ThreadLocalRandom.current().nextDouble(10000, 30000);
double BPhaseCurrent = ThreadLocalRandom.current().nextDouble(0, 1);
double RPhaseApparentPower = ThreadLocalRandom.current().nextDouble(1000, 5000);
double RPhaseReactivePower = ThreadLocalRandom.current().nextDouble(100, 1000);
double YPhasePowerFactor = ThreadLocalRandom.current().nextDouble(-1, 1);
double RPhaseVoltage = ThreadLocalRandom.current().nextDouble(100, 400);
double BPhaseReactivePower = ThreadLocalRandom.current().nextDouble(5, 30);
double BPhasePowerFactor = ThreadLocalRandom.current().nextDouble(-1, 1);
double RPhaseActivePower = ThreadLocalRandom.current().nextDouble(100, 1500);
double YPhaseCurrent = ThreadLocalRandom.current().nextDouble(0, 1);
double YPhaseVoltage = ThreadLocalRandom.current().nextDouble(100, 400);
double RPhaseCurrent = ThreadLocalRandom.current().nextDouble(1, 10);
double BPhaseApparentPower = ThreadLocalRandom.current().nextDouble(10, 30);
int dataSamplingInstant = ThreadLocalRandom.current().nextInt(10000000, 99999999);
double EnergyActive = ThreadLocalRandom.current().nextDouble(10000, 30000);
data = new JSONObject();
data.put("YPhaseReactivePower", YPhaseReactivePower);
data.put("YPhaseApparentPower", YPhaseApparentPower);
data.put("YPhaseActivePower", YPhaseActivePower);
data.put("BPhaseVoltage", BPhaseVoltage);
data.put("RPhasePowerFactor", RPhasePowerFactor);
data.put("BPhaseActivePower", BPhaseActivePower);
data.put("EnergyReactive", EnergyReactive);
data.put("BPhaseCurrent", BPhaseCurrent);
data.put("RPhaseApparentPower", RPhaseApparentPower);
data.put("RPhaseReactivePower", RPhaseReactivePower);
data.put("YPhasePowerFactor", YPhasePowerFactor);
data.put("RPhaseVoltage", RPhaseVoltage);
data.put("BPhaseReactivePower", BPhaseReactivePower);
data.put("BPhasePowerFactor", BPhasePowerFactor);
data.put("RPhaseActivePower", RPhaseActivePower);
data.put("YPhaseCurrent", YPhaseCurrent);
data.put("YPhaseVoltage", YPhaseVoltage);
data.put("RPhaseCurrent", RPhaseCurrent);
data.put("BPhaseApparentPower", BPhaseApparentPower);
data.put("dataSamplingInstant", dataSamplingInstant);
data.put("EnergyActive", EnergyActive);
String msgid = "energy-" + UUID.randomUUID().toString();
JSONObject finalobj = new JSONObject();
finalobj.put("msgid", msgid);
finalobj.put("data", data);
String packet = finalobj.toJSONString();
// try {
// String topic = "sahil";
// publish.write(System.currentTimeMillis() + "," + msgid + "\n");
// channel.basicPublish("", topic, null, packet.getBytes());
//
// } catch(IOException e) {
// e.printStackTrace();
// }
return packet;
}
private Channel createbrokerChannel(String deviceId) {
ConnectionFactory connfac = new ConnectionFactory();
//connfac.setHost("10.156.14.6");
connfac.setHost("18.219.118.74");
//connfac.setPort(5672);
connfac.setPort(12082);
connfac.setUsername("rbccps");
connfac.setPassword("rbccps@123");
Channel channel = null;
try {
Connection conn = connfac.newConnection();
channel = conn.createChannel();
channel.queueDeclare(deviceId, false, false, false, null);
} catch(IOException e) {
e.printStackTrace();
} catch(TimeoutException t) {
t.printStackTrace();
}
return channel;
}
private void publishToNetworkServer(byte[] data) {
MqttConnectOptions connection = new MqttConnectOptions();
connection.setAutomaticReconnect(true);
connection.setCleanSession(false);
connection.setConnectionTimeout(30);
connection.setUserName("loraserver");
connection.setPassword("loraserver".toCharArray());
try {
MqttClient client = new MqttClient("tcp://gateways.rbccps.org:1883", MqttClient.generateClientId());
client.setCallback(this);
client.connect(connection);
mqttpub = new MqttMessage();
mqttpub.setQos(2);
mqttpub.setPayload(data);
client.publish("sahil1", mqttpub);
} catch(MqttException m) {
m.printStackTrace();
}
}
public static void main(String[] args) throws Exception {
//simulator emitting a single device
SmartcityDataSimulator obj = new SmartcityDataSimulator();
channel = obj.createbrokerChannel("sahil");
String publishfile = "/Users/sahiltyagi/Desktop/publish.txt";
//String publishfile = "/home/ubuntu/publish.txt";
publish = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(publishfile)));
int iterations =Integer.parseInt(args[0]);
long sleeptime =Long.parseLong(args[1]);
String param = args[2];
int index=0;
while(true) {
if(param.equals("light")) {
obj.jsonstreetLight();
} else if(param.equals("energy")) {
obj.jsonenergyMeter();
}
index++;
Thread.sleep(sleeptime);
if(iterations == index+1) {
publish.close();
break;
}
}
System.out.println("complete.");
// SmartcityDataSimulator obj = new SmartcityDataSimulator();
// System.out.println(obj.jsonstreetLight());
}
@Override
public void connectionLost(Throwable arg0) {
// TODO Auto-generated method stub
}
@Override
public void deliveryComplete(IMqttDeliveryToken arg0) {
// TODO Auto-generated method stub
}
@Override
public void messageArrived(String arg0, MqttMessage arg1) throws Exception {
// TODO Auto-generated method stub
}
}
| |
package de.tu_darmstadt.elc.olw.api.converter;
import java.io.File;
import java.io.IOException;
import java.util.Vector;
import java.util.zip.ZipException;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
import de.tu_darmstadt.elc.olw.api.constant.EndProduct;
import de.tu_darmstadt.elc.olw.api.constant.MaterialProfile;
import de.tu_darmstadt.elc.olw.api.constant.MaterialProfile.Profile;
import de.tu_darmstadt.elc.olw.api.constant.MaterialType;
import de.tu_darmstadt.elc.olw.api.constant.ProductName;
import de.tu_darmstadt.elc.olw.api.document.PDFConverter;
import de.tu_darmstadt.elc.olw.api.media.Converter;
import de.tu_darmstadt.elc.olw.api.media.XMLFlash;
import de.tu_darmstadt.elc.olw.api.media.audio.MP3Converter;
import de.tu_darmstadt.elc.olw.api.media.audio.MP3XMLFlash;
import de.tu_darmstadt.elc.olw.api.media.video.camrec.CamRecConverter;
import de.tu_darmstadt.elc.olw.api.media.video.camrec.CamRecExtractor;
import de.tu_darmstadt.elc.olw.api.media.video.camrec.CamRecXMLFlash;
import de.tu_darmstadt.elc.olw.api.media.video.lecturnity.LPDContainer;
import de.tu_darmstadt.elc.olw.api.media.video.lecturnity.LPDConverter;
import de.tu_darmstadt.elc.olw.api.media.video.lecturnity.LecturnityXMLFlash;
import de.tu_darmstadt.elc.olw.api.media.video.mp4.MP4Converter;
import de.tu_darmstadt.elc.olw.api.media.video.mp4.MP4XMLFlash;
import de.tu_darmstadt.elc.olw.api.misc.MaterialFileExplorer;
import de.tu_darmstadt.elc.olw.api.misc.UUIDGenerator;
import de.tu_darmstadt.elc.olw.api.misc.execution.ExecutionException;
import de.tu_darmstadt.elc.olw.api.misc.io.FileExtractor;
public class OLWConverter {
private static Logger logger = Logger.getLogger(OLWConverter.class);
private static final String STREAM_URL = "";
private static final String XML_CONFIG = "100.xml";
private String ffmpegPath = "";
private File materialFolder = null;
private File tmpFolder = null;
private MaterialType materialType;
/**
* @param ffmpegPath
* @param materialFolder
* @param tmpFolder
*/
public OLWConverter(String ffmpegPath, File materialFolder, File tmpFolder) {
super();
this.ffmpegPath = ffmpegPath;
this.materialFolder = materialFolder;
this.tmpFolder = tmpFolder;
this.materialType = MaterialType.RAW;
}
public String getMaterialType() {
return materialType.toString();
}
/**
*
* @param materialFile
* @param outputFolder
* @throws ZipException
* @throws IOException
*/
private void exploreMaterialFile(File materialFile, File outputFolder) throws ZipException, IOException {
MaterialFileExplorer explorer = new MaterialFileExplorer(materialFile,
tmpFolder, ffmpegPath);
materialType = explorer.getMaterialType();
materialFolder = explorer.getMaterialFolder();
if (materialFolder == null)
materialFolder = materialFile;
if (!outputFolder.exists())
outputFolder.mkdirs();
if (explorer.getMetadataFile() != null && explorer.getMetadataFile().length() > 0)
FileUtils.copyFileToDirectory(explorer.getMetadataFile(),
outputFolder);
}
/**
* converts material
*
* @param materialFile
* @param outputFolder
* @throws ZipException
* @throws ExecutionException
* @throws IOException
*/
public void convertMaterial(File materialFile, File outputFolder,
String uuid, File logFile) throws ZipException, IOException, ExecutionException {
exploreMaterialFile(materialFile, outputFolder);
logger.info("Material Type: " + materialType);
switch (materialType) {
case MP3:
if (materialFolder.isDirectory())
materialFile = FileExtractor.findFileWithSuffix(materialFolder, "mp3");
convertMP3(materialFile, outputFolder, uuid, logFile);
break;
case MP4_LQ:
case MP4_HQ:
case MP4_HD:
if (materialFolder.isDirectory())
materialFile = FileExtractor.findFileWithSuffix(materialFolder, "mp4");
convertMP4(materialFile, outputFolder, uuid, logFile);
break;
case LPD_VIDEO_LQ:
case LPD_VIDEO_HQ:
case LPD_AUDIO:
if (materialFolder.isDirectory())
materialFile = FileExtractor.findFileWithSuffix(materialFolder, "lpd");
convertLPD(materialFile, outputFolder, uuid, logFile);
break;
case CAM_VIDEO_LQ:
case CAM_VIDEO_HQ:
case CAM_AUDIO:
convertCAM(materialFolder,outputFolder, uuid, logFile);
break;
case PDF:
if (materialFolder.isDirectory())
materialFile = FileExtractor.findFileWithSuffix(materialFolder, "pdf");
convertPDF(materialFile,outputFolder, logFile);
break;
default:
FileUtils.copyFileToDirectory(materialFile, outputFolder);
break;
}
}
/**
*
* @param materialFile
* @param uuid
* @throws ExecutionException
* @throws ZipException
* @throws IOException
*/
public void convertMaterial(File materialFile, String uuid, File logFile)
throws ExecutionException, ZipException, IOException {
File uuidFolder = new File(materialFolder,
UUIDGenerator.getPathFromUUID(uuid));
if (uuidFolder.exists())
uuidFolder.mkdirs();
convertMaterial(materialFile, uuidFolder, uuid, logFile);
}
private void convertMedia(File mediaFile, File destFolder,
Vector<Profile> mediaProfile, Converter converter, File logFile)
throws ExecutionException {
for (Profile profile : mediaProfile) {
File outputFolder = new File(destFolder,
profile.getOutputFolderName());
if (outputFolder.exists())
outputFolder.mkdirs();
File outputMedia = new File(outputFolder, profile.getProductName());
converter.convertMedia(outputMedia, profile.getFfmpegSettings(),
EndProduct.toEndProduct(profile.toString()), logFile);
}
}
private void convertMP3(File materialFile, File outputFolder, String uuid, File logFile) throws ExecutionException, IOException {
Vector<Profile> profile = MaterialProfile
.getMaterialProfile(materialType);
MP3Converter mp3Converter = new MP3Converter(materialFile,
ffmpegPath);
MP3XMLFlash mp3XML = new MP3XMLFlash();
convertMedia(materialFolder, outputFolder, profile, mp3Converter,logFile);
createXMLFlash(mp3XML, outputFolder, uuid, materialType);
}
private void convertMP4(File materialFile, File outputFolder, String uuid, File logFile)
throws ExecutionException, IOException {
Vector<Profile> profile = MaterialProfile
.getMaterialProfile(materialType);
MP4Converter mp4Converter = new MP4Converter(materialFile,
ffmpegPath);
MP4XMLFlash mp4XML = new MP4XMLFlash();
convertMedia(materialFolder, outputFolder, profile, mp4Converter, logFile);
createXMLFlash(mp4XML, outputFolder, uuid, materialType);
}
private void convertLPD(File materialFile, File outputFolder, String uuid, File logFile) throws ExecutionException, IOException {
Vector<Profile> profile = MaterialProfile
.getMaterialProfile(materialType);
LPDContainer container = new LPDContainer(materialFile, tmpFolder,
ffmpegPath);
container.prepareLPD();
LPDConverter lpdConverter = new LPDConverter(container, ffmpegPath);
convertMedia(materialFolder, outputFolder, profile, lpdConverter, logFile);
lpdConverter.createPresentationZipFile(new File(outputFolder,
"red5"));
LecturnityXMLFlash lpdXML = new LecturnityXMLFlash(
container.getLmdFile(), container.getEvqFile());
logger.info("Creating flash xml");
createXMLFlash(lpdXML, outputFolder, uuid, materialType);
}
private void convertCAM(File materialFile, File outputFolder, String uuid, File logFile) throws IOException, ExecutionException {
Vector<Profile> profile = MaterialProfile
.getMaterialProfile(materialType);
CamRecExtractor extractor = new CamRecExtractor(materialFolder);
extractor.prepareCamtasia(ffmpegPath);
CamRecConverter camConverter = new CamRecConverter(
extractor.getUnzipFolder(), ffmpegPath);
convertMedia(materialFile, outputFolder, profile, camConverter, logFile);
CamRecXMLFlash camXML = new CamRecXMLFlash(
extractor.getXmlConfigFile(), extractor.hasManyParts());
logger.info("Creating flash xml ...");
createXMLFlash(camXML, outputFolder, uuid, materialType);
}
private void convertPDF(File materialFile, File outputFolder, File logFile) throws IOException {
PDFConverter pdfConverter = new PDFConverter(materialFile,
ffmpegPath);
File imageFolder = new File (outputFolder,"11");
pdfConverter.convertPDF(imageFolder,logFile);
FileUtils.copyFile(materialFile, new File(outputFolder,ProductName.getPDF_DocumentName()));
}
/**
* creates the xml file for flash player
* @param xml
* @param outputFolder
* @param uuid
* @param type
* @throws IOException
*/
private void createXMLFlash(XMLFlash xml, File outputFolder, String uuid,
MaterialType type) throws IOException {
logger.info("Creating xml config file for flash");
String lectureURL = "";
String slidesVideoURL = "";
switch (type) {
case MP3:
lectureURL = UUIDGenerator.getPathFromUUID(uuid) + "/red5/7.flv";
break;
case MP4_LQ:
lectureURL = UUIDGenerator.getPathFromUUID(uuid) + "/red5/5.flv";
break;
case MP4_HQ:
lectureURL = UUIDGenerator.getPathFromUUID(uuid) + "/red5/6.flv";
break;
case MP4_HD:
lectureURL = UUIDGenerator.getPathFromUUID(uuid) + "/red5/6.flv";
break;
case LPD_VIDEO_LQ:
lectureURL = UUIDGenerator.getPathFromUUID(uuid) + "/red5/5.flv";
break;
case LPD_VIDEO_HQ:
lectureURL = UUIDGenerator.getPathFromUUID(uuid) + "/red5/6.flv";
break;
case LPD_AUDIO:
lectureURL = UUIDGenerator.getPathFromUUID(uuid) + "/red5/5.flv";
break;
case CAM_VIDEO_LQ:
case CAM_AUDIO:
lectureURL = UUIDGenerator.getPathFromUUID(uuid) + "/red5/5.flv";
slidesVideoURL = UUIDGenerator.getPathFromUUID(uuid)
+ "/red5/25.flv";
break;
case CAM_VIDEO_HQ:
lectureURL = UUIDGenerator.getPathFromUUID(uuid) + "/red5/6.flv";
slidesVideoURL = UUIDGenerator.getPathFromUUID(uuid)
+ "/red5/26.flv";
break;
default:
break;
}
xml.setLectureURL(lectureURL);
xml.setSlidesVideoURL(slidesVideoURL);
xml.setStreamURL(STREAM_URL);
File red5Folder = new File(outputFolder, "red5");
File xmlFile = new File(red5Folder, XML_CONFIG);
xml.createXMLFlash(xmlFile);
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.identity;
import java.util.List;
import org.camunda.bpm.engine.ProcessEngineException;
import org.camunda.bpm.engine.identity.Group;
import org.camunda.bpm.engine.identity.GroupQuery;
import org.camunda.bpm.engine.impl.test.PluggableProcessEngineTestCase;
/**
* @author Joram Barrez
*/
public class GroupQueryTest extends PluggableProcessEngineTestCase {
@Override
protected void setUp() throws Exception {
super.setUp();
createGroup("muppets", "Muppet show characters", "user");
createGroup("frogs", "Famous frogs", "user");
createGroup("mammals", "Famous mammals from eighties", "user");
createGroup("admin", "Administrators", "security");
identityService.saveUser(identityService.newUser("kermit"));
identityService.saveUser(identityService.newUser("fozzie"));
identityService.saveUser(identityService.newUser("mispiggy"));
identityService.saveTenant(identityService.newTenant("tenant"));
identityService.createMembership("kermit", "muppets");
identityService.createMembership("fozzie", "muppets");
identityService.createMembership("mispiggy", "muppets");
identityService.createMembership("kermit", "frogs");
identityService.createMembership("fozzie", "mammals");
identityService.createMembership("mispiggy", "mammals");
identityService.createMembership("kermit", "admin");
identityService.createTenantGroupMembership("tenant", "frogs");
}
private Group createGroup(String id, String name, String type) {
Group group = identityService.newGroup(id);
group.setName(name);
group.setType(type);
identityService.saveGroup(group);
return group;
}
@Override
protected void tearDown() throws Exception {
identityService.deleteUser("kermit");
identityService.deleteUser("fozzie");
identityService.deleteUser("mispiggy");
identityService.deleteGroup("muppets");
identityService.deleteGroup("mammals");
identityService.deleteGroup("frogs");
identityService.deleteGroup("admin");
identityService.deleteTenant("tenant");
super.tearDown();
}
public void testQueryById() {
GroupQuery query = identityService.createGroupQuery().groupId("muppets");
verifyQueryResults(query, 1);
}
public void testQueryByInvalidId() {
GroupQuery query = identityService.createGroupQuery().groupId("invalid");
verifyQueryResults(query, 0);
try {
identityService.createGroupQuery().groupId(null).list();
fail();
} catch (ProcessEngineException e) {}
}
public void testQueryByIdIn() {
// empty list
assertTrue(identityService.createGroupQuery().groupIdIn("a", "b").list().isEmpty());
// collect all ids
List<Group> list = identityService.createGroupQuery().list();
String[] ids = new String[list.size()];
for (int i = 0; i < ids.length; i++) {
ids[i] = list.get(i).getId();
}
List<Group> idInList = identityService.createGroupQuery().groupIdIn(ids).list();
assertEquals(list.size(), idInList.size());
for (Group group : idInList) {
boolean found = false;
for (Group otherGroup : list) {
if(otherGroup.getId().equals(group.getId())) {
found = true; break;
}
}
if(!found) {
fail("Expected to find group " + group);
}
}
}
public void testQueryByName() {
GroupQuery query = identityService.createGroupQuery().groupName("Muppet show characters");
verifyQueryResults(query, 1);
query = identityService.createGroupQuery().groupName("Famous frogs");
verifyQueryResults(query, 1);
}
public void testQueryByInvalidName() {
GroupQuery query = identityService.createGroupQuery().groupName("invalid");
verifyQueryResults(query, 0);
try {
identityService.createGroupQuery().groupName(null).list();
fail();
} catch (ProcessEngineException e) {}
}
public void testQueryByNameLike() {
GroupQuery query = identityService.createGroupQuery().groupNameLike("%Famous%");
verifyQueryResults(query, 2);
query = identityService.createGroupQuery().groupNameLike("Famous%");
verifyQueryResults(query, 2);
query = identityService.createGroupQuery().groupNameLike("%show%");
verifyQueryResults(query, 1);
}
public void testQueryByInvalidNameLike() {
GroupQuery query = identityService.createGroupQuery().groupNameLike("%invalid%");
verifyQueryResults(query, 0);
try {
identityService.createGroupQuery().groupNameLike(null).list();
fail();
} catch (ProcessEngineException e) {}
}
public void testQueryByType() {
GroupQuery query = identityService.createGroupQuery().groupType("user");
verifyQueryResults(query, 3);
query = identityService.createGroupQuery().groupType("admin");
verifyQueryResults(query, 0);
}
public void testQueryByInvalidType() {
GroupQuery query = identityService.createGroupQuery().groupType("invalid");
verifyQueryResults(query, 0);
try {
identityService.createGroupQuery().groupType(null).list();
fail();
} catch (ProcessEngineException e) {}
}
public void testQueryByMember() {
GroupQuery query = identityService.createGroupQuery().groupMember("fozzie");
verifyQueryResults(query, 2);
query = identityService.createGroupQuery().groupMember("kermit");
verifyQueryResults(query, 3);
query = query.orderByGroupId().asc();
List<Group> groups = query.list();
assertEquals(3, groups.size());
assertEquals("admin", groups.get(0).getId());
assertEquals("frogs", groups.get(1).getId());
assertEquals("muppets", groups.get(2).getId());
query = query.groupType("user");
groups = query.list();
assertEquals(2, groups.size());
assertEquals("frogs", groups.get(0).getId());
assertEquals("muppets", groups.get(1).getId());
}
public void testQueryByInvalidMember() {
GroupQuery query = identityService.createGroupQuery().groupMember("invalid");
verifyQueryResults(query, 0);
try {
identityService.createGroupQuery().groupMember(null).list();
fail();
} catch (ProcessEngineException e) {}
}
public void testQueryByMemberOfTenant() {
GroupQuery query = identityService.createGroupQuery().memberOfTenant("nonExisting");
verifyQueryResults(query, 0);
query = identityService.createGroupQuery().memberOfTenant("tenant");
verifyQueryResults(query, 1);
Group group = query.singleResult();
assertEquals("frogs", group.getId());
}
public void testQuerySorting() {
// asc
assertEquals(4, identityService.createGroupQuery().orderByGroupId().asc().count());
assertEquals(4, identityService.createGroupQuery().orderByGroupName().asc().count());
assertEquals(4, identityService.createGroupQuery().orderByGroupType().asc().count());
// desc
assertEquals(4, identityService.createGroupQuery().orderByGroupId().desc().count());
assertEquals(4, identityService.createGroupQuery().orderByGroupName().desc().count());
assertEquals(4, identityService.createGroupQuery().orderByGroupType().desc().count());
// Multiple sortings
GroupQuery query = identityService.createGroupQuery().orderByGroupType().asc().orderByGroupName().desc();
List<Group> groups = query.list();
assertEquals(4, query.count());
assertEquals("security", groups.get(0).getType());
assertEquals("user", groups.get(1).getType());
assertEquals("user", groups.get(2).getType());
assertEquals("user", groups.get(3).getType());
assertEquals("admin", groups.get(0).getId());
assertEquals("muppets", groups.get(1).getId());
assertEquals("mammals", groups.get(2).getId());
assertEquals("frogs", groups.get(3).getId());
}
public void testQueryInvalidSortingUsage() {
try {
identityService.createGroupQuery().orderByGroupId().list();
fail();
} catch (ProcessEngineException e) {}
try {
identityService.createGroupQuery().orderByGroupId().orderByGroupName().list();
fail();
} catch (ProcessEngineException e) {}
}
private void verifyQueryResults(GroupQuery query, int countExpected) {
assertEquals(countExpected, query.list().size());
assertEquals(countExpected, query.count());
if (countExpected == 1) {
assertNotNull(query.singleResult());
} else if (countExpected > 1){
verifySingleResultFails(query);
} else if (countExpected == 0) {
assertNull(query.singleResult());
}
}
private void verifySingleResultFails(GroupQuery query) {
try {
query.singleResult();
fail();
} catch (ProcessEngineException e) {}
}
}
| |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.deeplearning4j.ui;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import org.datavec.image.loader.LFWLoader;
import org.deeplearning4j.datasets.iterator.impl.LFWDataSetIterator;
import org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator;
import org.deeplearning4j.eval.Evaluation;
import org.deeplearning4j.models.embeddings.reader.impl.BasicModelUtils;
import org.deeplearning4j.models.word2vec.VocabWord;
import org.deeplearning4j.models.word2vec.Word2Vec;
import org.deeplearning4j.nn.conf.GradientNormalization;
import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.inputs.InputType;
import org.deeplearning4j.nn.conf.layers.ConvolutionLayer;
import org.deeplearning4j.nn.conf.layers.DenseLayer;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.conf.layers.SubsamplingLayer;
import org.deeplearning4j.nn.conf.layers.misc.FrozenLayer;
import org.deeplearning4j.nn.conf.weightnoise.DropConnect;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.deeplearning4j.plot.BarnesHutTsne;
import org.deeplearning4j.text.sentenceiterator.BasicLineIterator;
import org.deeplearning4j.text.sentenceiterator.SentenceIterator;
import org.deeplearning4j.text.tokenization.tokenizer.preprocessor.CommonPreprocessor;
import org.deeplearning4j.text.tokenization.tokenizerfactory.DefaultTokenizerFactory;
import org.deeplearning4j.text.tokenization.tokenizerfactory.TokenizerFactory;
import org.deeplearning4j.ui.api.UIServer;
import org.deeplearning4j.ui.weights.ConvolutionalIterationListener;
import org.junit.Ignore;
import org.junit.Test;
import org.nd4j.common.io.ClassPathResource;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.buffer.util.DataTypeUtil;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.dataset.SplitTestAndTrain;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.NDArrayIndex;
import org.nd4j.linalg.learning.config.AdaGrad;
import org.nd4j.linalg.learning.config.Nesterovs;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import org.nd4j.common.resources.Resources;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import static org.junit.Assert.fail;
/**
* Test environment for building/debugging UI.
*
* Please, do NOT remove @Ignore annotation
*
* @author raver119@gmail.com
*/
@Ignore
@Slf4j
public class ManualTests {
private static Logger log = LoggerFactory.getLogger(ManualTests.class);
@Test
public void testLaunch() throws Exception {
// UiServer server = UiServer.getInstance();
//
// System.out.println("http://localhost:" + server.getPort()+ "/");
Thread.sleep(10000000000L);
new ScoreIterationListener(100);
fail("not implemneted");
}
@Test(timeout = 300000)
public void testTsne() throws Exception {
DataTypeUtil.setDTypeForContext(DataType.DOUBLE);
Nd4j.getRandom().setSeed(123);
BarnesHutTsne b = new BarnesHutTsne.Builder().stopLyingIteration(10).setMaxIter(10).theta(0.5).learningRate(500)
.useAdaGrad(true).build();
File f = Resources.asFile("/deeplearning4j-core/mnist2500_X.txt");
INDArray data = Nd4j.readNumpy(f.getAbsolutePath(), " ").get(NDArrayIndex.interval(0, 100),
NDArrayIndex.interval(0, 784));
ClassPathResource labels = new ClassPathResource("mnist2500_labels.txt");
List<String> labelsList = IOUtils.readLines(labels.getInputStream()).subList(0, 100);
b.fit(data);
File save = new File(System.getProperty("java.io.tmpdir"), "labels-" + UUID.randomUUID().toString());
System.out.println("Saved to " + save.getAbsolutePath());
save.deleteOnExit();
b.saveAsFile(labelsList, save.getAbsolutePath());
INDArray output = b.getData();
System.out.println("Coordinates");
UIServer server = UIServer.getInstance();
Thread.sleep(10000000000L);
}
/**
* This test is for manual execution only, since it's here just to get working CNN and visualize it's layers
*
* @throws Exception
*/
@Test
public void testCNNActivationsVisualization() throws Exception {
final int numRows = 40;
final int numColumns = 40;
int nChannels = 3;
int outputNum = LFWLoader.NUM_LABELS;
int numSamples = LFWLoader.NUM_IMAGES;
boolean useSubset = false;
int batchSize = 200;// numSamples/10;
int iterations = 5;
int splitTrainNum = (int) (batchSize * .8);
int seed = 123;
int listenerFreq = iterations / 5;
DataSet lfwNext;
SplitTestAndTrain trainTest;
DataSet trainInput;
List<INDArray> testInput = new ArrayList<>();
List<INDArray> testLabels = new ArrayList<>();
log.info("Load data....");
DataSetIterator lfw = new LFWDataSetIterator(batchSize, numSamples, new int[] {numRows, numColumns, nChannels},
outputNum, useSubset, true, 1.0, new Random(seed));
log.info("Build model....");
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
.activation(Activation.RELU).weightInit(WeightInit.XAVIER)
.gradientNormalization(GradientNormalization.RenormalizeL2PerLayer)
.updater(new AdaGrad(0.01)).weightNoise(new DropConnect(0.5)).list()
.layer(0, new ConvolutionLayer.Builder(4, 4).name("cnn1").nIn(nChannels).stride(1, 1).nOut(20)
.build())
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
.name("pool1").build())
.layer(2, new ConvolutionLayer.Builder(3, 3).name("cnn2").stride(1, 1).nOut(40).build())
.layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
.name("pool2").build())
.layer(4, new ConvolutionLayer.Builder(3, 3).name("cnn3").stride(1, 1).nOut(60).build())
.layer(5, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2})
.name("pool3").build())
.layer(6, new ConvolutionLayer.Builder(2, 2).name("cnn3").stride(1, 1).nOut(80).build())
.layer(7, new DenseLayer.Builder().name("ffn1").nOut(160).dropOut(0.5).build())
.layer(8, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum).activation(Activation.SOFTMAX).build())
.setInputType(InputType.convolutional(numRows, numColumns, nChannels));
MultiLayerNetwork model = new MultiLayerNetwork(builder.build());
model.init();
log.info("Train model....");
model.setListeners(new ScoreIterationListener(listenerFreq), new ConvolutionalIterationListener(listenerFreq));
while (lfw.hasNext()) {
lfwNext = lfw.next();
lfwNext.scale();
trainTest = lfwNext.splitTestAndTrain(splitTrainNum, new Random(seed)); // train set that is the result
trainInput = trainTest.getTrain(); // get feature matrix and labels for training
testInput.add(trainTest.getTest().getFeatures());
testLabels.add(trainTest.getTest().getLabels());
model.fit(trainInput);
}
log.info("Evaluate model....");
Evaluation eval = new Evaluation(lfw.getLabels());
for (int i = 0; i < testInput.size(); i++) {
INDArray output = model.output(testInput.get(i));
eval.eval(testLabels.get(i), output);
}
INDArray output = model.output(testInput.get(0));
eval.eval(testLabels.get(0), output);
log.info(eval.stats());
log.info("****************Example finished********************");
}
@Test(timeout = 300000)
public void testWord2VecPlot() throws Exception {
File inputFile = Resources.asFile("big/raw_sentences.txt");
SentenceIterator iter = new BasicLineIterator(inputFile.getAbsolutePath());
TokenizerFactory t = new DefaultTokenizerFactory();
t.setTokenPreProcessor(new CommonPreprocessor());
Word2Vec vec = new Word2Vec.Builder().minWordFrequency(5).iterations(2).batchSize(1000).learningRate(0.025)
.layerSize(100).seed(42).sampling(0).negativeSample(0).windowSize(5)
.modelUtils(new BasicModelUtils<VocabWord>()).useAdaGrad(false).iterate(iter).workers(10)
.tokenizerFactory(t).build();
vec.fit();
// UiConnectionInfo connectionInfo = UiServer.getInstance().getConnectionInfo();
// vec.getLookupTable().plotVocab(100, connectionInfo);
Thread.sleep(10000000000L);
fail("Not implemented");
}
@Test
public void testImage() throws Exception {
INDArray array = Nd4j.create(11, 13);
for (int i = 0; i < array.rows(); i++) {
array.putRow(i, Nd4j.create(new double[] {0.0f, 0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f, 0.7f, 0.8f, 0.9f, 1.0f,
1.2f, 1.3f}));
}
writeImage(array, new File("test.png"));
}
private void writeImage(INDArray array, File file) {
// BufferedImage image = ImageLoader.toImage(array);
log.info("Array.rank(): " + array.rank());
log.info("Size(-1): " + array.size(-1));
log.info("Size(-2): " + array.size(-2));
BufferedImage imageToRender = new BufferedImage(array.columns(), array.rows(), BufferedImage.TYPE_BYTE_GRAY);
for (int x = 0; x < array.columns(); x++) {
for (int y = 0; y < array.rows(); y++) {
log.info("x: " + (x) + " y: " + y);
imageToRender.getRaster().setSample(x, y, 0, (int) (255 * array.getRow(y).getDouble(x)));
}
}
try {
ImageIO.write(imageToRender, "png", file);
} catch (IOException e) {
log.error("",e);
}
}
@Test
public void testCNNActivations2() throws Exception {
int nChannels = 1;
int outputNum = 10;
int batchSize = 64;
int nEpochs = 10;
int seed = 123;
log.info("Load data....");
DataSetIterator mnistTrain = new MnistDataSetIterator(batchSize, true, 12345);
DataSetIterator mnistTest = new MnistDataSetIterator(batchSize, false, 12345);
log.info("Build model....");
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
.l2(0.0005)
.weightInit(WeightInit.XAVIER)
.updater(new Nesterovs(0.01, 0.9)).list()
.layer(0, new ConvolutionLayer.Builder(5, 5)
//nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
.nIn(nChannels).stride(1, 1).nOut(20).activation(Activation.IDENTITY).build())
.layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
.stride(2, 2).build())
.layer(2, new ConvolutionLayer.Builder(5, 5)
//Note that nIn needed be specified in later layers
.stride(1, 1).nOut(50).activation(Activation.IDENTITY).build())
.layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
.stride(2, 2).build())
.layer(4, new DenseLayer.Builder().activation(Activation.RELU).nOut(500).build())
.layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum).activation(Activation.SOFTMAX).build())
.setInputType(InputType.convolutional(28, 28, nChannels));
MultiLayerConfiguration conf = builder.build();
MultiLayerNetwork model = new MultiLayerNetwork(conf);
model.init();
/*
ParallelWrapper wrapper = new ParallelWrapper.Builder(model)
.averagingFrequency(1)
.prefetchBuffer(12)
.workers(2)
.reportScoreAfterAveraging(false)
.useLegacyAveraging(false)
.build();
*/
log.info("Train model....");
model.setListeners(new ConvolutionalIterationListener(1));
//((NativeOpExecutioner) Nd4j.getExecutioner()).getLoop().setOmpNumThreads(8);
long timeX = System.currentTimeMillis();
// nEpochs = 2;
for (int i = 0; i < nEpochs; i++) {
long time1 = System.currentTimeMillis();
model.fit(mnistTrain);
//wrapper.fit(mnistTrain);
long time2 = System.currentTimeMillis();
log.info("*** Completed epoch {}, Time elapsed: {} ***", i, (time2 - time1));
}
long timeY = System.currentTimeMillis();
log.info("Evaluate model....");
Evaluation eval = new Evaluation(outputNum);
while (mnistTest.hasNext()) {
DataSet ds = mnistTest.next();
INDArray output = model.output(ds.getFeatures(), false);
eval.eval(ds.getLabels(), output);
}
log.info(eval.stats());
mnistTest.reset();
log.info("****************Example finished********************");
}
@Test
public void testCNNActivationsFrozen() throws Exception {
int nChannels = 1;
int outputNum = 10;
int batchSize = 64;
int nEpochs = 10;
int seed = 123;
log.info("Load data....");
DataSetIterator mnistTrain = new MnistDataSetIterator(batchSize, true, 12345);
log.info("Build model....");
MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
.l2(0.0005)
.weightInit(WeightInit.XAVIER)
.updater(new Nesterovs(0.01, 0.9)).list()
.layer(0, new FrozenLayer(new ConvolutionLayer.Builder(5, 5)
//nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
.nIn(nChannels).stride(1, 1).nOut(20).activation(Activation.IDENTITY).build()))
.layer(1, new FrozenLayer(new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
.stride(2, 2).build()))
.layer(2, new FrozenLayer(new DenseLayer.Builder().activation(Activation.RELU).nOut(500).build()))
.layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
.nOut(outputNum).activation(Activation.SOFTMAX).build())
.setInputType(InputType.convolutionalFlat(28, 28, nChannels));
MultiLayerConfiguration conf = builder.build();
MultiLayerNetwork model = new MultiLayerNetwork(conf);
model.init();
log.info("Train model....");
model.setListeners(new ConvolutionalIterationListener(1));
for (int i = 0; i < nEpochs; i++) {
model.fit(mnistTrain);
}
}
}
| |
/*
* Copyright 2011 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.j2objc.translate;
import com.google.devtools.j2objc.GenerationTest;
import java.io.IOException;
/**
* Unit tests for {@link InitializationNormalization} phase.
*
* @author Tom Ball
*/
public class InitializationNormalizerTest extends GenerationTest {
// TODO(tball): update bug id in comments to public issue numbers when
// issue tracking is sync'd.
InitializationNormalizer instance;
@Override
protected void setUp() throws IOException {
super.setUp();
instance = new InitializationNormalizer();
}
/**
* Verify that for a constructor that calls another constructor and has
* other statements, the "this-constructor" statement is used to
* initialize self, rather than a super constructor call.
*/
public void testThisConstructorCallInlined() throws IOException {
String source = "class Test {"
+ "boolean b1; boolean b2;"
+ "Test() { this(true); b2 = true; }"
+ "Test(boolean b) { b1 = b; }}";
String translation = translateSourceFile(source, "Test", "Test.m");
assertTranslatedLines(translation,
"void Test_init(Test *self) {",
" Test_initWithBoolean_(self, YES);",
" self->b2_ = YES;",
"}");
}
/**
* Regression test (b/5822974): translation fails with an
* ArrayIndexOutOfBoundsException in JDT, due to a syntax error in the
* vertices initializer after initialization normalization.
* @throws IOException
*/
public void testFieldArrayInitializer() throws IOException {
String source = "public class Distance {"
+ "private class SimplexVertex {}"
+ "private class Simplex {"
+ " public final SimplexVertex vertices[] = {"
+ " new SimplexVertex() "
+ " }; }}";
String translation = translateSourceFile(source, "Distance", "Distance.m");
assertTranslation(translation,
"[IOSObjectArray newArrayWithObjects:(id[]){ "
+ "[new_Distance_SimplexVertex_initWithDistance_(outer$) autorelease] } "
+ "count:1 type:Distance_SimplexVertex_class_()]");
}
public void testStaticVarInitialization() throws IOException {
String translation = translateSourceFile(
"class Test { static java.util.Date date = new java.util.Date(); }", "Test", "Test.m");
// test that initializer was stripped from the declaration
assertTranslation(translation, "JavaUtilDate *Test_date_;");
// test that initializer was moved to new initialize method
assertTranslatedLines(translation,
"+ (void)initialize {",
"if (self == [Test class]) {",
"JreStrongAssignAndConsume(&Test_date_, nil, new_JavaUtilDate_init());");
}
public void testFieldInitializer() throws IOException {
String translation = translateSourceFile(
"class Test { java.util.Date date = new java.util.Date(); }", "Test", "Test.m");
// Test that a default constructor was created and the initializer statement
// moved to the constructor.
assertTranslatedLines(translation,
"void Test_init(Test *self) {",
" NSObject_init(self);",
" Test_setAndConsume_date_(self, new_JavaUtilDate_init());",
"}");
}
public void testInitializationBlock() throws IOException {
String translation = translateSourceFile(
"class Test { java.util.Date date; { date = new java.util.Date(); } }", "Test", "Test.m");
// Test that a default constructor was created and the initializer statement
// moved to the constructor.
assertTranslatedLines(translation,
"void Test_init(Test *self) {",
" NSObject_init(self);",
" {",
" Test_setAndConsume_date_(self, new_JavaUtilDate_init());",
" }",
"}");
}
public void testStaticInitializerBlock() throws IOException {
String translation = translateSourceFile(
"class Test { static { System.out.println(\"foo\"); } }", "Test", "Test.m");
// test that a static initialize() method was created and that it contains
// the block's statement.
assertTranslatedLines(translation,
"+ (void)initialize {",
"if (self == [Test class]) {",
"{",
"[((JavaIoPrintStream *) nil_chk(JavaLangSystem_get_out_())) "
+ "printlnWithNSString:@\"foo\"];");
}
public void testInitializerMovedToDesignatedConstructor() throws IOException {
String translation = translateSourceFile(
"class Test { java.util.Date date; { date = new java.util.Date(); } "
+ "public Test() { this(2); } public Test(int i) { System.out.println(i); } }",
"Test", "Test.m");
// test that default constructor was untouched, since it calls self()
assertTranslatedLines(translation,
"void Test_init(Test *self) {",
" Test_initWithInt_(self, 2);",
"}");
// test that initializer statement was added to second constructor
assertTranslatedLines(translation,
"void Test_initWithInt_(Test *self, jint i) {",
" NSObject_init(self);",
" {",
" Test_setAndConsume_date_(self, new_JavaUtilDate_init());",
" }",
" [((JavaIoPrintStream *) nil_chk(JavaLangSystem_get_out_())) printlnWithInt:i];",
"}");
}
public void testInitializerMovedToEmptyConstructor() throws IOException {
String translation = translateSourceFile(
"class Test { java.util.Date date = new java.util.Date(); public Test() {} }",
"Test", "Test.m");
assertTranslatedLines(translation,
"void Test_init(Test *self) {",
" NSObject_init(self);",
" Test_setAndConsume_date_(self, new_JavaUtilDate_init());",
"}");
}
/**
* Regression test (b/5861660): translation fails with an NPE when
* an interface has a constant defined.
*/
public void testInterfaceConstantsIgnored() throws IOException {
String source = "public interface Mouse { int BUTTON_LEFT = 0; }";
String translation = translateSourceFile(source, "Mouse", "Mouse.h");
assertTranslation(translation, "#define Mouse_BUTTON_LEFT 0");
}
public void testStringWithInvalidCppCharacters() throws IOException {
String source = "class Test { static final String foo = \"\\uffff\"; }";
String translation = translateSourceFile(source, "Test", "Test.m");
assertTranslation(translation, "NSString *Test_foo_;");
assertTranslation(translation,
"JreStrongAssign(&Test_foo_, nil, [NSString stringWithCharacters:(jchar[]) { "
+ "(int) 0xffff } length:1]);");
}
public void testStringConcatWithInvalidCppCharacters() throws IOException {
String source = "class Test { static final String foo = \"hello\" + \"\\uffff\"; }";
String translation = translateSourceFile(source, "Test", "Test.m");
assertTranslation(translation, "NSString *Test_foo_;");
assertTranslation(translation,
"JreStrongAssign(&Test_foo_, nil, JreStrcat(\"$$\", @\"hello\", "
+ "[NSString stringWithCharacters:(jchar[]) { (int) 0xffff } length:1]));");
}
public void testInitializersPlacedAfterOuterAssignments() throws IOException {
String source = "class Test { "
+ " int outerVar = 1; "
+ " class Inner { int innerVar = outerVar; void test() { outerVar++; } } }";
String translation = translateSourceFile(source, "Test", "Test.m");
assertTranslation(translation, "Test_Inner_set_this$0_(self, outer$);");
assertTranslation(translation, "innerVar_ = outer$->outerVar_;");
assertTrue(translation.indexOf("Test_Inner_set_this$0_(self, outer$);")
< translation.indexOf("innerVar_ = outer$->outerVar_;"));
}
public void testStaticInitializersKeptInOrder() throws IOException {
String source =
"public class Test { "
+ " public static final int I = 1; "
+ " public static final java.util.Set<Integer> iSet = new java.util.HashSet<Integer>(); "
+ " static { iSet.add(I); } "
+ " public static final int iSetSize = iSet.size(); }";
String translation = translateSourceFile(source, "Test", "Test.m");
String setInit = "JreStrongAssignAndConsume(&Test_iSet_, nil, new_JavaUtilHashSet_init())";
String setAdd = "[Test_iSet_ addWithId:JavaLangInteger_valueOfWithInt_(Test_I)]";
String setSize = "Test_iSetSize_ = [Test_iSet_ size]";
assertTranslation(translation, setInit);
assertTranslation(translation, setAdd);
assertTranslation(translation, setSize);
assertTrue(translation.indexOf(setInit) < translation.indexOf(setAdd));
assertTrue(translation.indexOf(setAdd) < translation.indexOf(setSize));
}
public void testStaticFinalStringAssignedToStaticFinalString() throws IOException {
String translation = translateSourceFile(
"class Test { static final String FOO = Inner.BAR; "
+ "class Inner { static final String BAR = \"bar\"; } }", "Test", "Test.m");
assertTranslation(translation, "NSString *Test_FOO_ = @\"bar\";");
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.apple;
import static com.google.devtools.build.lib.packages.Attribute.ConfigurationTransition.HOST;
import static com.google.devtools.build.lib.packages.Attribute.attr;
import static com.google.devtools.build.lib.packages.BuildType.LABEL;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.analysis.RuleDefinition;
import com.google.devtools.build.lib.analysis.RuleDefinitionEnvironment;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.packages.Attribute.LateBoundLabel;
import com.google.devtools.build.lib.packages.AttributeMap;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.RuleClass;
import com.google.devtools.build.lib.packages.RuleClass.Builder;
import com.google.devtools.build.lib.packages.RuleClass.Builder.RuleClassType;
import com.google.devtools.build.lib.skylarkinterface.SkylarkCallable;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModule;
import com.google.devtools.build.xcode.xcodegen.proto.XcodeGenProtos.XcodeprojBuildSetting;
/**
* Utility class for resolving items for the Apple toolchain (such as common tool flags, and paths).
*/
@SkylarkModule(
name = "apple_toolchain",
doc = "Utilities for resolving items from the Apple toolchain."
)
public class AppleToolchain {
// These next two strings are shared secrets with the xcrunwrapper.sh to allow
// expansion of DeveloperDir and SDKRoot and runtime, since they aren't known
// until compile time on any given build machine.
@VisibleForTesting public static final String DEVELOPER_DIR = "__BAZEL_XCODE_DEVELOPER_DIR__";
@VisibleForTesting public static final String SDKROOT_DIR = "__BAZEL_XCODE_SDKROOT__";
// These two paths are framework paths relative to SDKROOT.
@VisibleForTesting
public static final String DEVELOPER_FRAMEWORK_PATH = "/Developer/Library/Frameworks";
@VisibleForTesting
public static final String SYSTEM_FRAMEWORK_PATH = "/System/Library/Frameworks";
// There is a handy reference to many clang warning flags at
// http://nshipster.com/clang-diagnostics/
// There is also a useful narrative for many Xcode settings at
// http://www.xs-labs.com/en/blog/2011/02/04/xcode-build-settings/
public static final ImmutableMap<String, String> DEFAULT_WARNINGS =
new ImmutableMap.Builder<String, String>()
.put("GCC_WARN_64_TO_32_BIT_CONVERSION", "-Wshorten-64-to-32")
.put("CLANG_WARN_BOOL_CONVERSION", "-Wbool-conversion")
.put("CLANG_WARN_CONSTANT_CONVERSION", "-Wconstant-conversion")
// Double-underscores are intentional - thanks Xcode.
.put("CLANG_WARN__DUPLICATE_METHOD_MATCH", "-Wduplicate-method-match")
.put("CLANG_WARN_EMPTY_BODY", "-Wempty-body")
.put("CLANG_WARN_ENUM_CONVERSION", "-Wenum-conversion")
.put("CLANG_WARN_INT_CONVERSION", "-Wint-conversion")
.put("CLANG_WARN_UNREACHABLE_CODE", "-Wunreachable-code")
.put("GCC_WARN_ABOUT_RETURN_TYPE", "-Wmismatched-return-types")
.put("GCC_WARN_UNDECLARED_SELECTOR", "-Wundeclared-selector")
.put("GCC_WARN_UNINITIALIZED_AUTOS", "-Wuninitialized")
.put("GCC_WARN_UNUSED_FUNCTION", "-Wunused-function")
.put("GCC_WARN_UNUSED_VARIABLE", "-Wunused-variable")
.build();
/** Returns the platform directory inside of Xcode for a platform name. */
public static String platformDir(String platformName) {
return DEVELOPER_DIR + "/Platforms/" + platformName + ".platform";
}
/**
* Returns the platform directory inside of Xcode for a given configuration.
*/
@SkylarkCallable(
name = "sdk_dir",
doc = "Returns the platform directory inside of Xcode for a given configuration."
)
public static String sdkDir() {
return SDKROOT_DIR;
}
/**
* Returns the platform frameworks directory inside of Xcode for a given configuration.
*/
@SkylarkCallable(
name = "platform_developer_framework_dir",
doc = "Returns the platform frameworks directory inside of Xcode for a given configuration."
)
public static String platformDeveloperFrameworkDir(AppleConfiguration configuration) {
String platformDir = platformDir(configuration.getSingleArchPlatform().getNameInPlist());
return platformDir + "/Developer/Library/Frameworks";
}
/**
* Returns the SDK frameworks directory inside of Xcode for a given configuration.
*/
public static String sdkFrameworkDir(Platform targetPlatform,
AppleConfiguration configuration) {
String relativePath;
switch (targetPlatform) {
case IOS_DEVICE:
case IOS_SIMULATOR:
if (configuration.getSdkVersionForPlatform(targetPlatform)
.compareTo(DottedVersion.fromString("9.0")) >= 0) {
relativePath = SYSTEM_FRAMEWORK_PATH;
} else {
relativePath = DEVELOPER_FRAMEWORK_PATH;
}
break;
case MACOS_X:
relativePath = DEVELOPER_FRAMEWORK_PATH;
break;
case WATCHOS_DEVICE:
case WATCHOS_SIMULATOR:
case TVOS_DEVICE:
case TVOS_SIMULATOR:
relativePath = SYSTEM_FRAMEWORK_PATH;
break;
default:
throw new IllegalArgumentException("Unhandled platform " + targetPlatform);
}
return sdkDir() + relativePath;
}
/** Returns swift libraries path. */
public static String swiftLibDir(Platform platform) {
return DEVELOPER_DIR
+ "/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift/"
+ platform.getLowerCaseNameInPlist();
}
/**
* Returns a series of xcode build settings which configure compilation warnings to
* "recommended settings". Without these settings, compilation might result in some spurious
* warnings, and xcode would complain that the settings be changed to these values.
*/
public static Iterable<? extends XcodeprojBuildSetting> defaultWarningsForXcode() {
return Iterables.transform(DEFAULT_WARNINGS.keySet(),
new Function<String, XcodeprojBuildSetting>() {
@Override
public XcodeprojBuildSetting apply(String key) {
return XcodeprojBuildSetting.newBuilder().setName(key).setValue("YES").build();
}
});
}
/**
* The default label of the build-wide {@code xcode_config} configuration rule.
*/
@Immutable
public static final class XcodeConfigLabel extends LateBoundLabel<BuildConfiguration> {
public XcodeConfigLabel(String toolsRepository) {
super(toolsRepository + AppleCommandLineOptions.DEFAULT_XCODE_VERSION_CONFIG_LABEL,
AppleConfiguration.class);
}
@Override
public Label resolve(Rule rule, AttributeMap attributes, BuildConfiguration configuration) {
return configuration.getFragment(AppleConfiguration.class).getXcodeConfigLabel();
}
}
/**
* Base rule definition to be ancestor for rules which may require an xcode toolchain.
*/
public static class RequiresXcodeConfigRule implements RuleDefinition {
private final String toolsRepository;
public RequiresXcodeConfigRule(String toolsRepository) {
this.toolsRepository = toolsRepository;
}
@Override
public RuleClass build(Builder builder, RuleDefinitionEnvironment env) {
return builder
.add(attr(":xcode_config", LABEL)
.allowedRuleClasses("xcode_config")
.checkConstraints()
.direct_compile_time_input()
.cfg(HOST)
.value(new XcodeConfigLabel(toolsRepository)))
.build();
}
@Override
public Metadata getMetadata() {
return RuleDefinition.Metadata.builder()
.name("$requires_xcode_config")
.type(RuleClassType.ABSTRACT)
.build();
}
}
}
| |
package org.openxava.test.tests;
import java.math.*;
import org.openxava.jpa.*;
import org.openxava.test.model.*;
import org.openxava.tests.*;
/**
* @author Javier Paniza
*/
public class Product2Test extends ModuleTestBase {
public Product2Test(String testName) {
super(testName, "Product2");
}
public void testCustomDialog() throws Exception {
// In detail mode
execute("CRUD.new");
assertCustomDialog();
// In list mode
execute("Mode.list");
assertCustomDialog();
// Using generateExcel that does not hide the dialog
assertNoDialog();
assertAction("Product2.reportBySubfamily");
assertNoAction("FamilyProductsReport.generateExcel");
execute("Product2.reportBySubfamily");
assertDialog();
assertNoAction("Product2.reportBySubfamily");
assertAction("FamilyProductsReport.generateExcel");
setValue("subfamily.number", "2");
// The next line fails on WebSphere Portal 6.1 because of the
// WebSphere6-1/PortalServer/lwo/prereq.odc/shared/app/jakarta-poi.jar file.
// Just remove this file and this test will pass. Changing classloading policy of war does not work
execute("FamilyProductsReport.generateExcel");
assertNoErrors();
assertContentTypeForPopup("application/vnd.ms-excel");
assertDialog();
assertNoAction("Product2.reportBySubfamily");
assertAction("FamilyProductsReport.generateExcel");
}
private void assertCustomDialog() throws Exception {
assertNoDialog();
assertAction("Product2.reportBySubfamily");
assertNoAction("FamilyProductsReport.generatePdf");
execute("Product2.reportBySubfamily");
assertDialog();
assertNoAction("Product2.reportBySubfamily");
assertAction("FamilyProductsReport.generatePdf");
execute("FamilyProductsReport.generatePdf");
assertError("Value for Subfamily in Filter by subfamily is required");
setValue("subfamily.number", "2");
execute("FamilyProductsReport.generatePdf");
assertNoErrors();
assertContentTypeForPopup("application/pdf");
assertNoDialog();
assertAction("Product2.reportBySubfamily");
assertNoAction("FamilyProductsReport.generatePdf");
}
public void testFormula() throws Exception {
assertValueInList(0, "unitPrice", "11.00");
assertValueInList(0, "unitPriceWithTax", "12.76");
assertListRowCount(7); // We rely in that there are 7 products,
// you can adapt this number if needed
setConditionValues(new String [] {"", "", "", "", "", "12.76"});
execute("List.filter");
assertListRowCount(2); // We rely in that there are 2 products
// with 11.00 as price, you can adapt this number if needed
assertValueInList(0, "unitPriceWithTax", "12.76");
assertValueInList(1, "unitPriceWithTax", "12.76");
execute("Mode.detailAndFirst");
assertValue("unitPrice", "11.00");
assertValue("unitPriceWithTax", "12.76");
assertEditable("unitPrice");
assertNoEditable("unitPriceWithTax");
}
public void testEditorForReferenceInEditorsXML() throws Exception {
execute("Mode.detailAndFirst");
setValue("color.number", "1");
execute("CRUD.save");
assertNoErrors();
assertValue("description", "");
execute("Mode.list");
execute("Mode.detailAndFirst");
assertValue("color.number", "1");
setValue("color.number", "28");
execute("CRUD.save");
assertNoErrors();
assertValue("description", "");
execute("Mode.list");
execute("Mode.detailAndFirst");
assertValue("color.number", "28");
assertTrue(
getHtml().indexOf(
"<input name=\"ox_OpenXavaTest_Product2__color___number\" value=\"0\" type=\"radio\"")
>= 0
);
assertTrue(getHtml().indexOf("Color Frame Editor:") < 0);
}
public void testImagesGallery() throws Exception {
// We remove oid from product 1 in order to test that images gallery works well in the first attemp.
Product2.findByNumber(1).setPhotos("");
XPersistence.commit();
// Verifying product 1 has no images
assertTrue("At least 2 products are required to run this test", getListRowCount() >= 2);
execute("Mode.detailAndFirst");
assertValue("number", "1");
execute("Gallery.edit", "galleryProperty=photos");
assertNoErrors();
assertMessage("No images");
assertNoAction("Gallery.maximizeImage");
assertNoAction("Gallery.minimizeImage");
assertNoAction("Gallery.removeImage");
assertEquals("Images count does not match", 0, getForm().getInputsByName("xava.GALLERY.images").size());
// Canceling the adding of and image
execute("Gallery.addImage");
assertNoErrors();
String imageUrl = System.getProperty("user.dir") + "/test-images/foto_javi.jpg";
setFileValue("newImage", imageUrl);
execute("LoadImageIntoGallery.cancel");
assertNoErrors();
assertNoAction("Gallery.maximizeImage");
assertNoAction("Gallery.minimizeImage");
assertNoAction("Gallery.removeImage");
assertEquals("Images count does not match", 0, getForm().getInputsByName("xava.GALLERY.images").size());
// Adding one image
execute("Gallery.addImage");
assertNoErrors();
imageUrl = System.getProperty("user.dir") + "/test-images/foto_javi.jpg";
setFileValue("newImage", imageUrl);
execute("LoadImageIntoGallery.loadImage");
assertNoErrors();
assertMessage("Image added to the gallery");
assertAction("Gallery.maximizeImage");
assertNoAction("Gallery.minimizeImage");
assertAction("Gallery.removeImage");
assertEquals("Images count does not match", 1, getForm().getInputsByName("xava.GALLERY.images").size());
// Returning to the main entity
execute("Gallery.close");
//execute("CRUD.save"); It's not needed explicit saving of the main entity
assertNoErrors();
// Verifying that product 2 has no images
execute("Navigation.next");
assertValue("number", "2");
execute("Gallery.edit", "galleryProperty=photos");
assertNoErrors();
assertMessage("No images");
assertNoAction("Gallery.maximizeImage");
assertNoAction("Gallery.minimizeImage");
assertNoAction("Gallery.removeImage");
assertEquals("Images count does not match", 0, getForm().getInputsByName("xava.GALLERY.images").size());
execute("Gallery.close");
// Verifying that product 1 has the added image
execute("CRUD.new");
setValue("number", "1");
execute("CRUD.refresh");
assertNoErrors();
execute("Gallery.edit", "galleryProperty=photos");
assertNoErrors();
assertNoMessages();
assertAction("Gallery.maximizeImage");
assertNoAction("Gallery.minimizeImage");
assertAction("Gallery.removeImage");
assertEquals("Images count does not match", 1, getForm().getInputsByName("xava.GALLERY.images").size());
String imageOid = getForm().getInputByName("xava.GALLERY.images").getValueAttribute();
// Maximizing the image
execute("Gallery.maximizeImage", "oid="+imageOid);
assertNoErrors();
assertNoAction("Gallery.maximizeImage");
assertAction("Gallery.minimizeImage");
assertNoAction("Gallery.removeImage");
// Minimizing the image
execute("Gallery.minimizeImage");
assertNoErrors();
assertAction("Gallery.maximizeImage");
assertNoAction("Gallery.minimizeImage");
assertAction("Gallery.removeImage");
assertEquals("Images count does not match", 1, getForm().getInputsByName("xava.GALLERY.images").size());
// Verifying read-only
execute("Gallery.close");
execute("EditableOnOff.setOff");
execute("Gallery.edit", "galleryProperty=photos");
assertNoErrors();
assertNoMessages();
assertNoAction("Gallery.addImage");
assertAction("Gallery.maximizeImage");
assertNoAction("Gallery.minimizeImage");
assertNoAction("Gallery.removeImage");
assertEquals("Images count does not match", 1, getForm().getInputsByName("xava.GALLERY.images").size());
execute("Close.close");
execute("EditableOnOff.setOn");
// Removing the image
execute("Gallery.edit", "galleryProperty=photos");
assertEquals("Images count does not match", 1, getForm().getInputsByName("xava.GALLERY.images").size());
execute("Gallery.removeImage", "oid="+imageOid);
assertNoErrors();
assertNoAction("Gallery.maximizeImage");
assertNoAction("Gallery.minimizeImage");
assertNoAction("Gallery.removeImage");
assertEquals("Images count does not match", 0, getForm().getInputsByName("xava.GALLERY.images").size());
// Verifying that product 1 has no images
execute("Gallery.close");
execute("CRUD.new");
setValue("number", "1");
execute("CRUD.refresh");
assertNoErrors();
execute("Gallery.edit", "galleryProperty=photos");
assertNoErrors();
assertMessage("No images");
assertEquals("Images count does not match", 0, getForm().getInputsByName("xava.GALLERY.images").size());
}
public void testReferencesAsDescriptionListUsesFilterOfDefaultTab() throws Exception {
execute("CRUD.new");
execute("Product2.changeLimitZone");
Warehouse key1 = new Warehouse();
key1.setZoneNumber(1);
key1.setNumber(1);
Warehouse key2 = new Warehouse();
key2.setZoneNumber(1);
key2.setNumber(2);
Warehouse key3 = new Warehouse();
key3.setZoneNumber(1);
key3.setNumber(3);
String [][] warehouses = {
{ "", "" },
{ toKeyString(key1), "CENTRAL VALENCIA" },
{ toKeyString(key3), "VALENCIA NORTE" },
{ toKeyString(key2), "VALENCIA SURETE" }
};
assertValidValues("warehouse.KEY", warehouses);
}
public void testDefaultValueCalculatorForReferences_genericI18nForTabs() throws Exception {
assertLabelInList(2, "Family");
assertLabelInList(3, "Subfamily");
execute("CRUD.new");
assertValue("family.number", "2");
assertValue("warehouse.KEY", "[.4.4.]");
}
public void testFocusMoveToReferenceAsDescriptionsList() throws Exception {
execute("CRUD.new");
setValue("family.number", "1");
assertFocusOn("subfamily.number");
}
/*
// Since 2.2.1 "Mode.list" is not available when navigating to
// another view than main one, then this case is impossible
public void testListToDetailAlwaysMainView() throws Exception {
execute("CRUD.new");
assertExists("unitPrice");
execute("Reference.createNew", "model=Family2,keyProperty=xava.Product2.family.number");
assertNotExists("unitPrice");
execute("Mode.list"); // Since 2.2.1 this link is not here
execute("CRUD.new");
assertExists("unitPrice");
}
*/
public void testSetEditableOnReferencesAsDescriptionsList() throws Exception {
execute("CRUD.new");
assertEditable("family");
execute("Product2.deactivateType");
assertNoEditable("family");
}
public void testOnChangeDescriptionsListReferenceMultipleKey() throws Exception {
execute("CRUD.new");
assertNotExists("zoneOne");
Warehouse warehouseKeyZone1 = new Warehouse();
warehouseKeyZone1.setNumber(1);
warehouseKeyZone1.setZoneNumber(1);
setValue("warehouse.KEY", toKeyString(warehouseKeyZone1));
assertExists("zoneOne");
Warehouse warehouseKeyZone2 = new Warehouse();
warehouseKeyZone2.setNumber(1);
warehouseKeyZone2.setZoneNumber(2);
setValue("warehouse.KEY", toKeyString(warehouseKeyZone2));
assertNotExists("zoneOne");
createProduct(66, "JUNIT ZONE 1", 1);
createProduct(67, "JUNIT ZONE 2", 2);
setValue("number", "66");
execute("CRUD.refresh");
assertNoErrors();
assertValue("description", "JUNIT ZONE 1");
assertExists("zoneOne");
execute("CRUD.new");
setValue("number", "67");
execute("CRUD.refresh");
assertNoErrors();
assertValue("description", "JUNIT ZONE 2");
assertNotExists("zoneOne");
setValue("warehouse.KEY", "");
assertValue("warehouse.KEY", "");
deleteProduct(66);
deleteProduct(67);
}
public void testDescriptionsListReferenceDependents() throws Exception {
execute("CRUD.new");
// Verifying initial state
String [][] familyValues = {
{ "", "" },
{ "1", "SOFTWARE" },
{ "2", "HARDWARE" },
{ "3", "SERVICIOS" }
};
assertValue("family.number", "2"); // 2 is the default value
assertValidValues("family.number", familyValues);
setValue("family.number", "");
String [][] voidValues = {
{ "", "" }
};
assertValue("subfamily.number", "");
assertValidValues("subfamily.number", voidValues);
// Change value
setValue("family.number", "2");
String [][] hardwareValues = {
{ "", ""},
{ "12", "PC"},
{ "13", "PERIFERICOS"},
{ "11", "SERVIDORES"}
};
assertValue("subfamily.number", "");
assertValidValues("subfamily.number", hardwareValues);
// Changing the value again
setValue("family.number", "1");
String [][] softwareValues = {
{ "", ""},
{ "1", "DESARROLLO"},
{ "2", "GESTION"},
{ "3", "SISTEMA"}
};
assertValue("subfamily.number", "");
assertValidValues("subfamily.number", softwareValues);
}
public void testNavigationWithDescriptionsListReferenceDependents() throws Exception {
execute("Mode.detailAndFirst");
assertValue("number", "1");
assertValue("family.number", "1");
assertValue("subfamily.number", "2");
execute("Navigation.next");
assertValue("number", "2");
assertValue("family.number", "2");
assertValue("subfamily.number", "11");
execute("Navigation.next");
assertValue("number", "3");
assertValue("family.number", "1");
assertValue("subfamily.number", "1");
}
public void testCreateModifyAndReadWithDescriptionsListReference() throws Exception {
// Create
execute("CRUD.new");
setValue("number", "66");
setValue("description", "JUNIT PRODUCT");
setValue("family.number", "2");
assertNoErrors();
setValue("subfamily.number", "12");
Warehouse warehouseKey = new Warehouse();
warehouseKey.setNumber(1);
warehouseKey.setZoneNumber(2);
setValue("warehouse.KEY", toKeyString(warehouseKey));
setValue("unitPrice", "125.66");
assertNoErrors();
assertNoEditable("unitPriceInPesetas");
execute("CRUD.save");
assertNoErrors();
// Search for verify
setValue("number", "66");
execute("CRUD.refresh");
assertNoErrors();
assertValue("number", "66");
assertValue("description", "JUNIT PRODUCT");
assertValue("family.number", "2");
assertValue("subfamily.number", "12");
assertValue("warehouse.KEY", toKeyString(warehouseKey));
assertValue("unitPrice", "125.66");
// Modify
setValue("subfamily.number", "13");
execute("CRUD.save");
assertNoErrors();
assertValue("number", "");
assertValue("description", "");
// Verifying just modified
setValue("number", "66");
execute("CRUD.refresh");
assertNoErrors();
assertValue("number", "66");
assertValue("description", "JUNIT PRODUCT");
assertValue("family.number", "2");
assertValue("subfamily.number", "13");
// Delete
execute("CRUD.delete");
assertMessage("Product deleted successfully");
}
public void testReferencesInListMode() throws Exception {
assertValueInList(1, "number", "2");
assertValueInList(1, "family.description", "HARDWARE");
assertValueInList(1, "subfamily.description", "SERVIDORES");
}
public void testCreateReferencesFromDescriptionsList() throws Exception {
execute("CRUD.new");
// Verifying initial state
String [][] familyValues = {
{ "", "" },
{ "1", "SOFTWARE" },
{ "2", "HARDWARE" },
{ "3", "SERVICIOS" }
};
assertValidValues("family.number", familyValues);
execute("Reference.createNew", "model=Family2,keyProperty=xava.Product2.family.number");
assertAction("NewCreation.saveNew");
assertAction("NewCreation.cancel");
execute("NewCreation.cancel");
execute("Reference.createNew", "model=Family2,keyProperty=xava.Product2.family.number");
assertAction("NewCreation.saveNew");
assertAction("NewCreation.cancel");
execute("NewCreation.saveNew");
assertError("Value for Number in Family is required");
assertError("Value for Description in Family is required");
setValue("Family2", "number", "1");
setValue("Family2", "description", "JUNIT TEST");
execute("NewCreation.saveNew");
assertError("Impossible to create: an object with that key already exists");
setValue("Family2", "number", "66");
execute("NewCreation.saveNew");
assertNoErrors();
// Test just added
String [][] familyValuesUpdated = {
{ "", "" },
{ "1", "SOFTWARE" },
{ "2", "HARDWARE" },
{ "3", "SERVICIOS" },
{ "66", "JUNIT TEST" }
};
assertValidValues("family.number", familyValuesUpdated);
assertValue("family.number", "66"); // The just created family is automatically selected
// Delete it
Family2 f = XPersistence.getManager().find(Family2.class, 66);
XPersistence.getManager().remove(f);
}
public void testDescriptionsListReferenceValidation() throws Exception {
execute("CRUD.new");
setValue("family.number", ""); // because has a default value
execute("CRUD.save");
assertError("Value for Family in Product is required");
assertError("Value for Subfamily in Product is required");
}
private void createProduct(int number, String description, int zone) throws Exception {
Product2 p = new Product2();
p.setNumber(number);
p.setDescription(description);
Family2 f = new Family2();
f.setNumber(1);
p.setFamily(f);
Subfamily2 sf = new Subfamily2();
sf.setNumber(1);
p.setSubfamily(sf);
Warehouse w = new Warehouse();
w.setNumber(1);
w.setZoneNumber(zone);
p.setWarehouse(w);
p.setUnitPrice(new BigDecimal("1.00"));
XPersistence.getManager().persist(p);
XPersistence.commit();
}
private void deleteProduct(long number) throws Exception {
Product2 k = XPersistence.getManager().find(Product2.class, number);
XPersistence.getManager().remove(k);
XPersistence.commit();
}
}
| |
/*
* This is free and unencumbered software released into the public domain.
*
* Anyone is free to copy, modify, publish, use, compile, sell, or
* distribute this software, either in source code form or as a compiled
* binary, for any purpose, commercial or non-commercial, and by any
* means.
*
* In jurisdictions that recognize copyright laws, the author or authors
* of this software dedicate any and all copyright interest in the
* software to the public domain. We make this dedication for the benefit
* of the public at large and to the detriment of our heirs and
* successors. We intend this dedication to be an overt act of
* relinquishment in perpetuity of all present and future rights to this
* software under copyright law.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* For more information, please refer to <http://unlicense.org/>
*/
package net.adamcin.httpsig.ssh.jce;
import net.adamcin.httpsig.api.Base64;
import net.adamcin.httpsig.api.DefaultKeychain;
import net.adamcin.httpsig.api.Key;
import net.adamcin.httpsig.api.Keychain;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.math.BigInteger;
import java.nio.charset.Charset;
import java.security.KeyFactory;
import java.security.PublicKey;
import java.security.spec.DSAPublicKeySpec;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.KeySpec;
import java.security.spec.RSAPublicKeySpec;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Factory methods for creating {@link net.adamcin.httpsig.api.DefaultVerifier} instances around SSH authorized_keys files
*/
public final class AuthorizedKeys {
private static final Logger LOGGER = LoggerFactory.getLogger(AuthorizedKeys.class);
private static final Pattern AUTHORIZED_KEY_PATTERN = Pattern.compile("^([^\\s]+)\\s+([^\\s]+)(\\s+([^\\s].*)|$)");
private static final String DEFAULT_PATH = ".ssh/authorized_keys";
private static final int GROUP_FORMAT = 1;
private static final int GROUP_KEY = 2;
private static final int GROUP_COMMENT = 4;
private static final Charset ASCII = Charset.forName("US-ASCII");
/**
* @return a {@link Keychain} attached to the authorized_keys identities read from
* @throws IOException if the default authorized_Keys file does not exist.
*/
public static Keychain defaultKeychain() throws IOException {
return newKeychain(new File(new File(System.getProperty("user.home", "/")), DEFAULT_PATH));
}
/**
* Convenience method which should simplify the interaction for 99% of implementations.
*
* @param authorizedKeysFile File in the RFC4253 authorized_keys format that every Linux admin knows and loves,
* which contains a list of public keys which are allowed for authentication
* @return a {@link Keychain} attached to the authorized_keys identities
*/
public static Keychain newKeychain(File authorizedKeysFile) throws IOException {
DefaultKeychain identities = new DefaultKeychain();
for (AuthorizedKey authorizedKey : parseAuthorizedKeys(authorizedKeysFile)) {
identities.add(getAuthorizedKeyIdentity(authorizedKey));
}
return identities;
}
static List<AuthorizedKey> parseAuthorizedKeys(File authorizedKeys) throws IOException {
Reader reader = null;
try {
reader = new InputStreamReader(new FileInputStream(authorizedKeys), ASCII);
return parseAuthorizedKeys(reader);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
// shouldn't happen
e.printStackTrace(System.err);
}
}
}
}
static List<AuthorizedKey> parseAuthorizedKeys(Reader reader) throws IOException {
List<AuthorizedKey> authorizedKeys = new ArrayList<AuthorizedKey>();
BufferedReader bufferedReader = new BufferedReader(reader);
String line;
while ((line = bufferedReader.readLine()) != null) {
AuthorizedKey authorizedKey = parseAuthorizedKey(line);
if (authorizedKey != null) {
authorizedKeys.add(authorizedKey);
}
}
return Collections.unmodifiableList(authorizedKeys);
}
static AuthorizedKey parseAuthorizedKey(String authorizedKeyString) {
if (authorizedKeyString != null) {
Matcher matcher = AUTHORIZED_KEY_PATTERN.matcher(authorizedKeyString);
if (matcher.find()) {
String format = matcher.group(GROUP_FORMAT);
String encodedKey = matcher.group(GROUP_KEY);
String comment = matcher.group(GROUP_COMMENT);
return new AuthorizedKey(format, encodedKey, comment);
}
}
return null;
}
static Key getAuthorizedKeyIdentity(AuthorizedKey authorizedKey) {
if (authorizedKey != null) {
byte[] keyBlob = Base64.fromBase64String(authorizedKey.getEncodedKey());
return readPublicIdentity(keyBlob);
}
return null;
}
static final class PublicPair {
private final KeyFormat format;
private final KeySpec spec;
PublicPair(KeyFormat format, KeySpec spec) {
this.format = format;
this.spec = spec;
}
public KeyFormat getFormat() {
return format;
}
public KeySpec getSpec() {
return spec;
}
}
static PublicPair readPublicPair(byte[] keyBlob) {
ByteArrayInputStream is = new ByteArrayInputStream(keyBlob);
DataInputStream dis = new DataInputStream(is);
String type;
try {
byte[] b_type = new byte[dis.readInt()];
int r_type = dis.read(b_type);
type = new String(b_type, ASCII);
} catch (IOException e) {
throw new IllegalStateException("how did an IOException get thrown from a byte array input stream?", e);
}
final KeyFormat keyFormat = KeyFormat.forIdentifier(type);
KeySpec spec = null;
try {
switch (keyFormat) {
case SSH_RSA:
byte[] b_e = new byte[dis.readInt()];
int r_e = dis.read(b_e);
BigInteger e = new BigInteger(b_e);
byte[] b_m = new byte[dis.readInt()];
int r_m = dis.read(b_m);
BigInteger m = new BigInteger(b_m);
spec = new RSAPublicKeySpec(m, e);
break;
case SSH_DSS:
byte[] b_p = new byte[dis.readInt()];
int r_p = dis.read(b_p);
BigInteger p = new BigInteger(b_p);
byte[] b_q = new byte[dis.readInt()];
int r_q = dis.read(b_q);
BigInteger q = new BigInteger(b_q);
byte[] b_g = new byte[dis.readInt()];
int r_g = dis.read(b_g);
BigInteger g = new BigInteger(b_g);
byte[] b_y = new byte[dis.readInt()];
int r_y = dis.read(b_y);
BigInteger y = new BigInteger(b_y);
spec = new DSAPublicKeySpec(y, p, q, g);
break;
case UNKOWN:
throw new IllegalArgumentException("unknown key format: " + type);
}
} catch (IOException e) {
throw new IllegalStateException("how did an IOException get thrown from a byte array input stream?", e);
}
return new PublicPair(keyFormat, spec);
}
static Key createPublicIdentity(PublicPair pair) {
KeyFactory keyFactory = pair.getFormat().getKeyFactory();
assert keyFactory != null;
try {
PublicKey publicKey = keyFactory.generatePublic(pair.getSpec());
return new SSHKey(pair.getFormat(), publicKey, null);
} catch (InvalidKeySpecException e) {
LOGGER.error("this exception should not have been thrown.", e);
}
return null;
}
/**
* @param keyBlob
* @return a shiny new {@link net.adamcin.httpsig.api.Key}
* @see <a href="http://tools.ietf.org/html/rfc4253#section-6.6">[RFC4253] Section 6.6: Public Key Algorithms</a>
*/
static Key readPublicIdentity(byte[] keyBlob) {
PublicPair pair = readPublicPair(keyBlob);
return createPublicIdentity(pair);
}
static final class AuthorizedKey {
private final String format;
private final String encodedKey;
private final String comment;
AuthorizedKey(String format, String encodedKey, String comment) {
this.format = format;
this.encodedKey = encodedKey;
this.comment = comment;
}
public String getFormat() {
return format;
}
public String getEncodedKey() {
return encodedKey;
}
public String getComment() {
return comment;
}
@Override
public String toString() {
return String.format("%s %s %s", format, encodedKey, comment);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.jdbc.thin;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Callable;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.cache.query.SqlFieldsQuery;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.testframework.GridTestUtils;
/**
* Test that checks indexes handling with JDBC.
*/
public abstract class JdbcThinDynamicIndexAbstractSelfTest extends JdbcThinAbstractDmlStatementSelfTest {
/** */
private static final String CREATE_INDEX = "create index idx on Person (id desc)";
/** */
private static final String DROP_INDEX = "drop index idx";
/** */
private static final String CREATE_INDEX_IF_NOT_EXISTS = "create index if not exists idx on Person (id desc)";
/** */
private static final String DROP_INDEX_IF_EXISTS = "drop index idx if exists";
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
super.beforeTest();
try (PreparedStatement ps =
conn.prepareStatement("INSERT INTO Person (_key, id, age, firstName, lastName) values (?, ?, ?, ?, ?)")) {
ps.setString(1, "j");
ps.setInt(2, 1);
ps.setInt(3, 10);
ps.setString(4, "John");
ps.setString(5, "Smith");
ps.executeUpdate();
ps.setString(1, "m");
ps.setInt(2, 2);
ps.setInt(3, 20);
ps.setString(4, "Mark");
ps.setString(5, "Stone");
ps.executeUpdate();
ps.setString(1, "s");
ps.setInt(2, 3);
ps.setInt(3, 30);
ps.setString(4, "Sarah");
ps.setString(5, "Pazzi");
ps.executeUpdate();
}
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override CacheConfiguration cacheConfig() {
CacheConfiguration ccfg = super.cacheConfig();
ccfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
ccfg.setCacheMode(cacheMode());
ccfg.setAtomicityMode(atomicityMode());
if (nearCache())
ccfg.setNearConfiguration(new NearCacheConfiguration());
return ccfg;
}
/**
* @return Cache mode to use.
*/
protected abstract CacheMode cacheMode();
/**
* @return Cache atomicity mode to use.
*/
protected abstract CacheAtomicityMode atomicityMode();
/**
* @return Whether to use near cache.
*/
protected abstract boolean nearCache();
/**
* Execute given SQL statement.
* @param sql Statement.
* @throws SQLException if failed.
*/
private void jdbcRun(String sql) throws SQLException {
try (Statement stmt = conn.createStatement()) {
stmt.execute(sql);
}
}
/**
* @param rs Result set.
* @return The value of the first column at the first row from result set.
* @throws SQLException If failed.
*/
private Object getSingleValue(ResultSet rs) throws SQLException {
assertEquals(1, rs.getMetaData().getColumnCount());
assertTrue(rs.next());
Object res = rs.getObject(1);
assertTrue(rs.isLast());
return res;
}
/**
* Test that after index creation index is used by queries.
* @throws SQLException If failed.
*/
public void testCreateIndex() throws SQLException {
assertSize(3);
assertColumnValues(30, 20, 10);
jdbcRun(CREATE_INDEX);
// Test that local queries on all server nodes use new index.
for (int i = 0 ; i < 3; i++) {
List<List<?>> locRes = ignite(i).cache(DEFAULT_CACHE_NAME).query(new SqlFieldsQuery("explain select id from " +
"Person where id = 5").setLocal(true)).getAll();
assertEquals(F.asList(
Collections.singletonList("SELECT\n" +
" ID\n" +
"FROM \"" + DEFAULT_CACHE_NAME + "\".PERSON\n" +
" /* \"" + DEFAULT_CACHE_NAME + "\".IDX: ID = 5 */\n" +
"WHERE ID = 5")
), locRes);
}
assertSize(3);
assertColumnValues(30, 20, 10);
}
/**
* Test that creating an index with duplicate name yields an error.
* @throws SQLException If failed.
*/
public void testCreateIndexWithDuplicateName() throws SQLException {
jdbcRun(CREATE_INDEX);
GridTestUtils.assertThrowsAnyCause(log, new Callable<Void>() {
@Override public Void call() throws Exception {
jdbcRun(CREATE_INDEX);
return null;
}
}, IgniteCheckedException.class, "Index already exists: IDX");
}
/**
* Test that creating an index with duplicate name does not yield an error with {@code IF NOT EXISTS}.
* @throws SQLException If failed.
*/
public void testCreateIndexIfNotExists() throws SQLException {
jdbcRun(CREATE_INDEX);
// Despite duplicate name, this does not yield an error.
jdbcRun(CREATE_INDEX_IF_NOT_EXISTS);
}
/**
* Test that after index drop there are no attempts to use it, and data state remains intact.
* @throws SQLException If failed.
*/
public void testDropIndex() throws SQLException {
assertSize(3);
jdbcRun(CREATE_INDEX);
assertSize(3);
jdbcRun(DROP_INDEX);
// Test that no local queries on server nodes use new index.
for (int i = 0 ; i < 3; i++) {
List<List<?>> locRes = ignite(i).cache(DEFAULT_CACHE_NAME).query(new SqlFieldsQuery("explain select id from " +
"Person where id = 5").setLocal(true)).getAll();
assertEquals(F.asList(
Collections.singletonList("SELECT\n" +
" ID\n" +
"FROM \"" + DEFAULT_CACHE_NAME + "\".PERSON\n" +
" /* \"" + DEFAULT_CACHE_NAME + "\".PERSON.__SCAN_ */\n" +
"WHERE ID = 5")
), locRes);
}
assertSize(3);
}
/**
* Test that dropping a non-existent index yields an error.
*/
public void testDropMissingIndex() {
GridTestUtils.assertThrowsAnyCause(log, new Callable<Void>() {
@Override public Void call() throws Exception {
jdbcRun(DROP_INDEX);
return null;
}
}, IgniteCheckedException.class, "Index doesn't exist: IDX");
}
/**
* Test that dropping a non-existent index does not yield an error with {@code IF EXISTS}.
* @throws SQLException If failed.
*/
public void testDropMissingIndexIfExists() throws SQLException {
// Despite index missing, this does not yield an error.
jdbcRun(DROP_INDEX_IF_EXISTS);
}
/**
* Test that changes in cache affect index, and vice versa.
* @throws SQLException If failed.
*/
public void testIndexState() throws SQLException {
IgniteCache<String, Person> cache = cache();
assertSize(3);
assertColumnValues(30, 20, 10);
jdbcRun(CREATE_INDEX);
assertSize(3);
assertColumnValues(30, 20, 10);
cache.remove("m");
assertColumnValues(30, 10);
cache.put("a", new Person(4, "someVal", "a", 5));
assertColumnValues(5, 30, 10);
jdbcRun(DROP_INDEX);
assertColumnValues(5, 30, 10);
}
/**
* Check that values of {@code field1} match what we expect.
* @param vals Expected values.
* @throws SQLException If failed.
*/
private void assertColumnValues(int... vals) throws SQLException {
try (Statement stmt = conn.createStatement()) {
try (ResultSet rs = stmt.executeQuery("SELECT age FROM Person ORDER BY id desc")) {
assertEquals(1, rs.getMetaData().getColumnCount());
for (int i = 0; i < vals.length; i++) {
assertTrue("Result set must have " + vals.length + " rows, got " + i, rs.next());
assertEquals(vals[i], rs.getInt(1));
}
assertFalse("Result set must have exactly " + vals.length + " rows", rs.next());
}
}
}
/**
* Do a {@code SELECT COUNT(*)} query to check index state correctness.
* @param expSize Expected number of items in table.
* @throws SQLException If failed.
*/
private void assertSize(long expSize) throws SQLException {
assertEquals(expSize, cache().size());
try (Statement stmt = conn.createStatement()) {
conn.setSchema(DEFAULT_CACHE_NAME);
try (ResultSet rs = stmt.executeQuery("SELECT COUNT(*) from Person")) {
assertEquals(expSize, getSingleValue(rs));
}
}
}
/**
* @return Cache.
*/
private IgniteCache<String, Person> cache() {
return grid(0).cache(DEFAULT_CACHE_NAME);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.newplan.logical.relational;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.util.MultiMap;
import org.apache.pig.newplan.Operator;
import org.apache.pig.newplan.PlanVisitor;
import org.apache.pig.newplan.logical.expression.LogicalExpressionPlan;
public class LOJoin extends LogicalRelationalOperator implements Serializable {
private static final long serialVersionUID = 2L;
/**
* Enum for the type of join
*/
public static enum JOINTYPE {
HASH, // Hash Join
REPLICATED, // Fragment Replicated join
SKEWED, // Skewed Join
MERGE, // Sort Merge Join
MERGESPARSE // Sort Merge Index Join
};
/**
* LOJoin contains a list of logical operators corresponding to the
* relational operators and a list of generates for each relational
* operator. Each generate operator in turn contains a list of expressions
* for the columns that are projected
*/
//private static Log log = LogFactory.getLog(LOJoin.class);
// expression plans for each input.
private MultiMap<Integer, LogicalExpressionPlan> mJoinPlans;
// indicator for each input whether it is inner
private boolean[] mInnerFlags;
private JOINTYPE mJoinType; // Retains the type of the join
/**
* static constant to refer to the option of selecting a join type
*/
public final static Integer OPTION_JOIN = 1;
public LOJoin(LogicalPlan plan) {
super("LOJoin", plan);
}
public LOJoin(LogicalPlan plan,
MultiMap<Integer, LogicalExpressionPlan> joinPlans,
JOINTYPE jt,
boolean[] isInner) {
super("LOJoin", plan);
mJoinPlans = joinPlans;
mJoinType = jt;
mInnerFlags = isInner;
}
public void setJoinPlans(MultiMap<Integer, LogicalExpressionPlan> joinPlans) {
this.mJoinPlans = joinPlans;
}
public void setInnerFlags(boolean[] isInner) {
this.mInnerFlags = isInner;
}
public void setJoinType(JOINTYPE jt) {
this.mJoinType = jt;
}
public boolean isInner(int inputIndex) {
return mInnerFlags[inputIndex];
}
public boolean[] getInnerFlags() {
return mInnerFlags;
}
public JOINTYPE getJoinType() {
return mJoinType;
}
public void resetJoinType() {
mJoinType = JOINTYPE.HASH;
}
public Collection<LogicalExpressionPlan> getJoinPlan(int inputIndex) {
return mJoinPlans.get(inputIndex);
}
/**
* Get all of the expressions plans that are in this join.
* @return collection of all expression plans.
*/
public MultiMap<Integer,LogicalExpressionPlan> getExpressionPlans() {
return mJoinPlans;
}
public Collection<LogicalExpressionPlan> getExpressionPlanValues() {
return mJoinPlans.values();
}
@Override
public LogicalSchema getSchema() throws FrontendException {
// if schema is calculated before, just return
if (schema != null) {
return schema;
}
List<Operator> inputs = null;
inputs = plan.getPredecessors(this);
if (inputs == null) {
return null;
}
List<LogicalSchema.LogicalFieldSchema> fss = new ArrayList<LogicalSchema.LogicalFieldSchema>();
for (Operator op : inputs) {
LogicalSchema inputSchema = ((LogicalRelationalOperator)op).getSchema();
// the schema of one input is unknown, so the join schema is unknown, just return
if (inputSchema == null) {
schema = null;
return schema;
}
for (int i=0; i<inputSchema.size(); i++) {
LogicalSchema.LogicalFieldSchema fs = inputSchema.getField(i);
LogicalSchema.LogicalFieldSchema newFS = null;
if(fs.alias != null) {
newFS = new LogicalSchema.LogicalFieldSchema(((LogicalRelationalOperator)op).getAlias()+"::"+fs.alias ,fs.schema, fs.type, fs.uid);
} else {
newFS = new LogicalSchema.LogicalFieldSchema(fs.alias, fs.schema, fs.type, fs.uid);
}
fss.add(newFS);
}
}
schema = new LogicalSchema();
for(LogicalSchema.LogicalFieldSchema fieldSchema: fss) {
schema.addField(fieldSchema);
}
return schema;
}
@Override
public void accept(PlanVisitor v) throws FrontendException {
if (!(v instanceof LogicalRelationalNodesVisitor)) {
throw new FrontendException("Expected LogicalPlanVisitor", 2223);
}
((LogicalRelationalNodesVisitor)v).visit(this);
}
@Override
public boolean isEqual(Operator other) throws FrontendException {
if (other != null && other instanceof LOJoin) {
LOJoin oj = (LOJoin)other;
if (mJoinType != oj.mJoinType) return false;
if (mInnerFlags.length != oj.mInnerFlags.length) return false;
for (int i = 0; i < mInnerFlags.length; i++) {
if (mInnerFlags[i] != oj.mInnerFlags[i]) return false;
}
if (!checkEquality(oj)) return false;
if (mJoinPlans.size() != oj.mJoinPlans.size()) return false;
// Now, we need to make sure that for each input we are projecting
// the same columns. This is slightly complicated since MultiMap
// doesn't return any particular order, so we have to find the
// matching input in each case.
for (Integer p : mJoinPlans.keySet()) {
Iterator<Integer> iter = oj.mJoinPlans.keySet().iterator();
int op = -1;
while (iter.hasNext()) {
op = iter.next();
if (p.equals(op)) break;
}
if (op != -1) {
Collection<LogicalExpressionPlan> c = mJoinPlans.get(p);
Collection<LogicalExpressionPlan> oc = oj.mJoinPlans.get(op);
if (c.size() != oc.size()) return false;
if (!(c instanceof List) || !(oc instanceof List)) {
throw new FrontendException(
"Expected list of expression plans", 2238);
}
List<LogicalExpressionPlan> elist = (List<LogicalExpressionPlan>)c;
List<LogicalExpressionPlan> oelist = (List<LogicalExpressionPlan>)oc;
for (int i = 0; i < elist.size(); i++) {
if (!elist.get(i).isEqual(oelist.get(i))) return false;
}
} else {
return false;
}
}
return true;
} else {
return false;
}
}
@Override
public String getName() {
return name + "(" + mJoinType.toString() + ")";
}
public List<Operator> getInputs(LogicalPlan plan) {
return plan.getPredecessors(this);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.orc.reader;
import com.facebook.presto.memory.context.LocalMemoryContext;
import com.facebook.presto.orc.StreamDescriptor;
import com.facebook.presto.orc.TupleDomainFilter;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.Int128ArrayBlock;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.UnscaledDecimal128Arithmetic;
import io.airlift.slice.Slice;
import io.airlift.slice.UnsafeSlice;
import java.io.IOException;
import java.util.Arrays;
import java.util.Optional;
import static com.facebook.presto.spi.type.UnscaledDecimal128Arithmetic.rescale;
public class LongDecimalSelectiveStreamReader
extends AbstractDecimalSelectiveStreamReader
{
public LongDecimalSelectiveStreamReader(
StreamDescriptor streamDescriptor,
Optional<TupleDomainFilter> filter,
Optional<Type> outputType,
LocalMemoryContext systemMemoryContext)
{
super(streamDescriptor, filter, outputType, systemMemoryContext, 2);
}
@Override
protected int readNoFilter(int[] positions, int positionCount)
throws IOException
{
Slice decimal = UnscaledDecimal128Arithmetic.unscaledDecimal();
Slice rescaledDecimal = UnscaledDecimal128Arithmetic.unscaledDecimal();
int streamPosition = 0;
for (int i = 0; i < positionCount; i++) {
int position = positions[i];
if (position > streamPosition) {
skip(position - streamPosition);
streamPosition = position;
}
if (presentStream != null && !presentStream.nextBit()) {
nulls[i] = true;
}
else {
int scale = (int) scaleStream.next();
dataStream.nextLongDecimal(decimal);
rescale(decimal, this.scale - scale, rescaledDecimal);
values[2 * i] = UnsafeSlice.getLongUnchecked(rescaledDecimal, 0);
values[2 * i + 1] = UnsafeSlice.getLongUnchecked(rescaledDecimal, Long.BYTES);
if (presentStream != null) {
nulls[i] = false;
}
}
streamPosition++;
}
outputPositionCount = positionCount;
return streamPosition;
}
@Override
protected int readWithFilter(int[] positions, int positionCount)
throws IOException
{
int streamPosition = 0;
outputPositionCount = 0;
Slice decimal = UnscaledDecimal128Arithmetic.unscaledDecimal();
Slice rescaledDecimal = UnscaledDecimal128Arithmetic.unscaledDecimal();
for (int i = 0; i < positionCount; i++) {
int position = positions[i];
if (position > streamPosition) {
skip(position - streamPosition);
streamPosition = position;
}
if (presentStream != null && !presentStream.nextBit()) {
if ((nonDeterministicFilter && filter.testNull()) || nullsAllowed) {
if (outputRequired) {
nulls[outputPositionCount] = true;
}
outputPositions[outputPositionCount] = position;
outputPositionCount++;
}
}
else {
int scale = (int) scaleStream.next();
dataStream.nextLongDecimal(decimal);
rescale(decimal, this.scale - scale, rescaledDecimal);
long low = UnsafeSlice.getLongUnchecked(rescaledDecimal, 0);
long high = UnsafeSlice.getLongUnchecked(rescaledDecimal, Long.BYTES);
if (filter.testDecimal(low, high)) {
if (outputRequired) {
values[2 * outputPositionCount] = low;
values[2 * outputPositionCount + 1] = high;
if (nullsAllowed && presentStream != null) {
nulls[outputPositionCount] = false;
}
}
outputPositions[outputPositionCount] = position;
outputPositionCount++;
}
}
streamPosition++;
if (filter != null) {
outputPositionCount -= filter.getPrecedingPositionsToFail();
int succeedingPositionsToFail = filter.getSucceedingPositionsToFail();
if (succeedingPositionsToFail > 0) {
int positionsToSkip = 0;
for (int j = 0; j < succeedingPositionsToFail; j++) {
i++;
int nextPosition = positions[i];
positionsToSkip += 1 + nextPosition - streamPosition;
streamPosition = nextPosition + 1;
}
skip(positionsToSkip);
}
}
}
return streamPosition;
}
@Override
protected void copyValues(int[] positions, int positionsCount, long[] valuesCopy, boolean[] nullsCopy)
{
int positionIndex = 0;
int nextPosition = positions[positionIndex];
for (int i = 0; i < outputPositionCount; i++) {
if (outputPositions[i] < nextPosition) {
continue;
}
assert outputPositions[i] == nextPosition;
valuesCopy[2 * positionIndex] = this.values[2 * i];
valuesCopy[2 * positionIndex + 1] = this.values[2 * i + 1];
if (nullsCopy != null) {
nullsCopy[positionIndex] = this.nulls[i];
}
positionIndex++;
if (positionIndex >= positionsCount) {
break;
}
nextPosition = positions[positionIndex];
}
}
@Override
protected void compactValues(int[] positions, int positionCount, boolean compactNulls)
{
if (outputPositionsReadOnly) {
outputPositions = Arrays.copyOf(outputPositions, outputPositionCount);
outputPositionsReadOnly = false;
}
int positionIndex = 0;
int nextPosition = positions[positionIndex];
for (int i = 0; i < outputPositionCount; i++) {
if (outputPositions[i] < nextPosition) {
continue;
}
assert outputPositions[i] == nextPosition;
values[2 * positionIndex] = values[2 * i];
values[2 * positionIndex + 1] = values[2 * i + 1];
if (compactNulls) {
nulls[positionIndex] = nulls[i];
}
outputPositions[positionIndex] = nextPosition;
positionIndex++;
if (positionIndex >= positionCount) {
break;
}
nextPosition = positions[positionIndex];
}
outputPositionCount = positionCount;
}
@Override
protected Block makeBlock(int positionCount, boolean includeNulls, boolean[] nulls, long[] values)
{
return new Int128ArrayBlock(positionCount, Optional.ofNullable(includeNulls ? nulls : null), values);
}
}
| |
package org.gluu.oxauth.ciba;
import static org.gluu.oxauth.model.register.RegisterRequestParam.BACKCHANNEL_AUTHENTICATION_REQUEST_SIGNING_ALG;
import static org.gluu.oxauth.model.register.RegisterRequestParam.BACKCHANNEL_TOKEN_DELIVERY_MODE;
import static org.gluu.oxauth.model.register.RegisterRequestParam.BACKCHANNEL_USER_CODE_PARAMETER;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import org.apache.commons.lang.time.DateUtils;
import org.gluu.oxauth.BaseTest;
import org.gluu.oxauth.client.AuthorizationRequest;
import org.gluu.oxauth.client.AuthorizationResponse;
import org.gluu.oxauth.client.AuthorizeClient;
import org.gluu.oxauth.client.BackchannelAuthenticationClient;
import org.gluu.oxauth.client.BackchannelAuthenticationRequest;
import org.gluu.oxauth.client.BackchannelAuthenticationResponse;
import org.gluu.oxauth.client.JwkClient;
import org.gluu.oxauth.client.RegisterClient;
import org.gluu.oxauth.client.RegisterRequest;
import org.gluu.oxauth.client.RegisterResponse;
import org.gluu.oxauth.client.model.authorize.JwtAuthorizationRequest;
import org.gluu.oxauth.model.ciba.BackchannelAuthenticationErrorResponseType;
import org.gluu.oxauth.model.common.BackchannelTokenDeliveryMode;
import org.gluu.oxauth.model.common.GrantType;
import org.gluu.oxauth.model.common.ResponseType;
import org.gluu.oxauth.model.crypto.OxAuthCryptoProvider;
import org.gluu.oxauth.model.crypto.signature.AsymmetricSignatureAlgorithm;
import org.gluu.oxauth.model.crypto.signature.RSAPublicKey;
import org.gluu.oxauth.model.crypto.signature.SignatureAlgorithm;
import org.gluu.oxauth.model.jws.RSASigner;
import org.gluu.oxauth.model.jwt.Jwt;
import org.gluu.oxauth.model.jwt.JwtClaimName;
import org.gluu.oxauth.model.jwt.JwtHeaderName;
import org.gluu.oxauth.model.register.ApplicationType;
import org.gluu.oxauth.model.util.StringUtils;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
/**
* Responsible to validate many cases using JWT Requests for Ciba Poll flows.
*/
public class CibaPollModeJwtAuthRequestTests extends BaseTest {
private RegisterResponse registerResponse;
private String idTokenHintRS384;
@Parameters({"PS256_keyId", "userId", "dnName", "keyStoreFile", "keyStoreSecret", "clientJwksUri"})
@Test
public void pollFlowPS256HappyFlow(final String keyId, final String userId, final String dnName,
final String keyStoreFile, final String keyStoreSecret,
final String clientJwksUri) throws Exception {
showTitle("pollFlowPS256HappyFlow");
registerPollClient(clientJwksUri, BackchannelTokenDeliveryMode.POLL, AsymmetricSignatureAlgorithm.PS256);
JwtAuthorizationRequest jwtAuthorizationRequest = createJwtRequest(keyStoreFile, keyStoreSecret, dnName,
userId, keyId, SignatureAlgorithm.PS256);
processCibaAuthorizationEndpointSuccessfulCall(jwtAuthorizationRequest.getEncodedJwt(),
registerResponse.getClientId(), registerResponse.getClientSecret());
}
@Parameters({"PS384_keyId", "userId", "dnName", "keyStoreFile", "keyStoreSecret", "clientJwksUri"})
@Test
public void pollFlowPS384HappyFlow(final String keyId, final String userId, final String dnName,
final String keyStoreFile, final String keyStoreSecret,
final String clientJwksUri) throws Exception {
showTitle("pollFlowPS384HappyFlow");
registerPollClient(clientJwksUri, BackchannelTokenDeliveryMode.POLL, AsymmetricSignatureAlgorithm.PS384);
JwtAuthorizationRequest jwtAuthorizationRequest = createJwtRequest(keyStoreFile, keyStoreSecret, dnName,
userId, keyId, SignatureAlgorithm.PS384);
processCibaAuthorizationEndpointSuccessfulCall(jwtAuthorizationRequest.getEncodedJwt(),
registerResponse.getClientId(), registerResponse.getClientSecret());
}
@Parameters({"PS512_keyId", "userId", "dnName", "keyStoreFile", "keyStoreSecret", "clientJwksUri"})
@Test
public void pollFlowPS512HappyFlow(final String keyId, final String userId, final String dnName,
final String keyStoreFile, final String keyStoreSecret,
final String clientJwksUri) throws Exception {
showTitle("pollFlowPS512HappyFlow");
registerPollClient(clientJwksUri, BackchannelTokenDeliveryMode.POLL, AsymmetricSignatureAlgorithm.PS512);
JwtAuthorizationRequest jwtAuthorizationRequest = createJwtRequest(keyStoreFile, keyStoreSecret, dnName,
userId, keyId, SignatureAlgorithm.PS512);
processCibaAuthorizationEndpointSuccessfulCall(jwtAuthorizationRequest.getEncodedJwt(),
registerResponse.getClientId(), registerResponse.getClientSecret());
}
@Parameters({"ES256_keyId", "userId", "dnName", "keyStoreFile", "keyStoreSecret", "clientJwksUri"})
@Test
public void pollFlowES256HappyFlow(final String keyId, final String userId, final String dnName,
final String keyStoreFile, final String keyStoreSecret,
final String clientJwksUri) throws Exception {
showTitle("pollFlowES256HappyFlow");
registerPollClient(clientJwksUri, BackchannelTokenDeliveryMode.POLL, AsymmetricSignatureAlgorithm.ES256);
JwtAuthorizationRequest jwtAuthorizationRequest = createJwtRequest(keyStoreFile, keyStoreSecret, dnName,
userId, keyId, SignatureAlgorithm.ES256);
processCibaAuthorizationEndpointSuccessfulCall(jwtAuthorizationRequest.getEncodedJwt(),
registerResponse.getClientId(), registerResponse.getClientSecret());
}
@Parameters({"ES384_keyId", "userId", "dnName", "keyStoreFile", "keyStoreSecret", "clientJwksUri"})
@Test
public void pollFlowES384HappyFlow(final String keyId, final String userId, final String dnName,
final String keyStoreFile, final String keyStoreSecret,
final String clientJwksUri) throws Exception {
showTitle("pollFlowES384HappyFlow");
registerPollClient(clientJwksUri, BackchannelTokenDeliveryMode.POLL, AsymmetricSignatureAlgorithm.ES384);
JwtAuthorizationRequest jwtAuthorizationRequest = createJwtRequest(keyStoreFile, keyStoreSecret, dnName,
userId, keyId, SignatureAlgorithm.ES384);
processCibaAuthorizationEndpointSuccessfulCall(jwtAuthorizationRequest.getEncodedJwt(),
registerResponse.getClientId(), registerResponse.getClientSecret());
}
@Parameters({"ES512_keyId", "userId", "dnName", "keyStoreFile", "keyStoreSecret", "clientJwksUri"})
@Test
public void pollFlowES512HappyFlow(final String keyId, final String userId, final String dnName,
final String keyStoreFile, final String keyStoreSecret,
final String clientJwksUri) throws Exception {
showTitle("pollFlowES512HappyFlow");
registerPollClient(clientJwksUri, BackchannelTokenDeliveryMode.POLL, AsymmetricSignatureAlgorithm.ES512);
JwtAuthorizationRequest jwtAuthorizationRequest = createJwtRequest(keyStoreFile, keyStoreSecret, dnName,
userId, keyId, SignatureAlgorithm.ES512);
processCibaAuthorizationEndpointSuccessfulCall(jwtAuthorizationRequest.getEncodedJwt(),
registerResponse.getClientId(), registerResponse.getClientSecret());
}
@Parameters({"PS256_keyId", "userId", "dnName", "keyStoreFile", "keyStoreSecret", "clientJwksUri"})
@Test
public void cibaPollJWTRequestDataValidations(final String keyId, final String userId, final String dnName,
final String keyStoreFile, final String keyStoreSecret,
final String clientJwksUri) throws Exception {
showTitle("cibaPollJWTRequestDataValidations");
registerPollClient(clientJwksUri, BackchannelTokenDeliveryMode.POLL, AsymmetricSignatureAlgorithm.PS256);
String clientId = registerResponse.getClientId();
// 1. Request doesn't include Aud
JwtAuthorizationRequest jwtAuthorizationRequest = createJwtRequest(keyStoreFile, keyStoreSecret, dnName, userId, keyId, SignatureAlgorithm.PS256);
jwtAuthorizationRequest.setAud(null);
processCibaAuthorizationEndpointFailCall(jwtAuthorizationRequest.getEncodedJwt(), clientId,
registerResponse.getClientSecret(), 400, BackchannelAuthenticationErrorResponseType.INVALID_REQUEST.getParameter());
// 2. Request doesn't include any hint
jwtAuthorizationRequest = createJwtRequest(keyStoreFile, keyStoreSecret, dnName, userId, keyId, SignatureAlgorithm.PS256);
jwtAuthorizationRequest.setLoginHint(null);
processCibaAuthorizationEndpointFailCall(jwtAuthorizationRequest.getEncodedJwt(), clientId,
registerResponse.getClientSecret(), 400, BackchannelAuthenticationErrorResponseType.UNKNOWN_USER_ID.getParameter());
// 3. Request has a wrong Binding Message
jwtAuthorizationRequest = createJwtRequest(keyStoreFile, keyStoreSecret, dnName, userId, keyId, SignatureAlgorithm.PS256);
jwtAuthorizationRequest.setBindingMessage("(/)=&/(%&/(%$/&($%/&)");
processCibaAuthorizationEndpointFailCall(jwtAuthorizationRequest.getEncodedJwt(), clientId,
registerResponse.getClientSecret(), 400, BackchannelAuthenticationErrorResponseType.INVALID_BINDING_MESSAGE.getParameter());
// 4. Request has wrong Client Id
jwtAuthorizationRequest = createJwtRequest(keyStoreFile, keyStoreSecret, dnName, userId, keyId, SignatureAlgorithm.PS256);
jwtAuthorizationRequest.setClientId("abcabcabcabcabcabcabcabcabcabc");
processCibaAuthorizationEndpointFailCall(jwtAuthorizationRequest.getEncodedJwt(), "abcabcabcabcabcabcabcabcabcabc",
registerResponse.getClientSecret(), 401, BackchannelAuthenticationErrorResponseType.INVALID_CLIENT.getParameter());
}
@Parameters({"PS256_keyId", "userId", "dnName", "keyStoreFile", "keyStoreSecret", "clientJwksUri"})
@Test(dependsOnMethods = "idTokenHintRS384")
public void cibaPollJWTRequestIdTokenHint(final String keyId, final String userId, final String dnName,
final String keyStoreFile, final String keyStoreSecret,
final String clientJwksUri) throws Exception {
showTitle("cibaPollJWTRequestIdTokenHint");
registerPollClient(clientJwksUri, BackchannelTokenDeliveryMode.POLL, AsymmetricSignatureAlgorithm.PS256);
// 1. Request doesn't include Aud
JwtAuthorizationRequest jwtAuthorizationRequest = createJwtRequest(keyStoreFile, keyStoreSecret, dnName, userId, keyId, SignatureAlgorithm.PS256);
jwtAuthorizationRequest.setLoginHint(null);
jwtAuthorizationRequest.setIdTokenHint(idTokenHintRS384);
processCibaAuthorizationEndpointSuccessfulCall(jwtAuthorizationRequest.getEncodedJwt(),
registerResponse.getClientId(), registerResponse.getClientSecret());
}
@Parameters({"PS256_keyId", "userId", "dnName", "keyStoreFile", "keyStoreSecret", "clientJwksUri"})
@Test
public void cibaPollJWTRequestWrongSigning(final String keyId, final String userId, final String dnName,
final String keyStoreFile, final String keyStoreSecret,
final String clientJwksUri) throws Exception {
showTitle("cibaPollJWTRequestWrongSigning");
registerPollClient(clientJwksUri, BackchannelTokenDeliveryMode.POLL, AsymmetricSignatureAlgorithm.PS256);
JwtAuthorizationRequest jwtAuthorizationRequest = createJwtRequest(keyStoreFile, keyStoreSecret, dnName, userId, keyId, SignatureAlgorithm.PS256);
String jwt = jwtAuthorizationRequest.getEncodedJwt();
String[] jwtParts = jwt.split("\\.");
String jwtWithWrongSigning = jwtParts[0] + "." + jwtParts[1] + ".WRONG-SIGNING";
processCibaAuthorizationEndpointFailCall(jwtWithWrongSigning, registerResponse.getClientId(),
registerResponse.getClientSecret(), 400, BackchannelAuthenticationErrorResponseType.INVALID_REQUEST.getParameter());
}
/**
* Registers a client using CIBA configuration for Poll flow and PS256
* @param clientJwksUri
*/
private void registerPollClient(final String clientJwksUri, BackchannelTokenDeliveryMode mode, AsymmetricSignatureAlgorithm algorithm) {
RegisterRequest registerRequest = new RegisterRequest(ApplicationType.WEB, "oxAuth test app", null);
registerRequest.setJwksUri(clientJwksUri);
registerRequest.setGrantTypes(Collections.singletonList(GrantType.CIBA));
registerRequest.setBackchannelTokenDeliveryMode(mode);
registerRequest.setBackchannelAuthenticationRequestSigningAlg(algorithm);
registerRequest.setBackchannelUserCodeParameter(false);
RegisterClient registerClient = new RegisterClient(registrationEndpoint);
registerClient.setRequest(registerRequest);
registerResponse = registerClient.exec();
showClient(registerClient);
assertEquals(registerResponse.getStatus(), 200, "Unexpected response code: " + registerResponse.getEntity());
assertNotNull(registerResponse.getClientId());
assertNotNull(registerResponse.getClientSecret());
assertNotNull(registerResponse.getRegistrationAccessToken());
assertNotNull(registerResponse.getClientSecretExpiresAt());
assertTrue(registerResponse.getClaims().containsKey(BACKCHANNEL_TOKEN_DELIVERY_MODE.toString()));
assertTrue(registerResponse.getClaims().containsKey(BACKCHANNEL_AUTHENTICATION_REQUEST_SIGNING_ALG.toString()));
assertTrue(registerResponse.getClaims().containsKey(BACKCHANNEL_USER_CODE_PARAMETER.toString()));
assertEquals(registerResponse.getClaims().get(BACKCHANNEL_TOKEN_DELIVERY_MODE.toString()), mode.getValue());
assertEquals(registerResponse.getClaims().get(BACKCHANNEL_AUTHENTICATION_REQUEST_SIGNING_ALG.toString()), algorithm.getValue());
assertEquals(registerResponse.getClaims().get(BACKCHANNEL_USER_CODE_PARAMETER.toString()), "false");
}
/**
* Process a Ciba call to the OP using JWT Request object.
* @param jwtRequest JWT in plain String.
* @param clientId Client identifier.
* @param clientSecret Client secret.
*/
private void processCibaAuthorizationEndpointSuccessfulCall(String jwtRequest, String clientId, String clientSecret) {
BackchannelAuthenticationRequest backchannelAuthenticationRequest = new BackchannelAuthenticationRequest();
backchannelAuthenticationRequest.setRequest(jwtRequest);
backchannelAuthenticationRequest.setClientId(clientId);
backchannelAuthenticationRequest.setAuthUsername(clientId);
backchannelAuthenticationRequest.setAuthPassword(clientSecret);
BackchannelAuthenticationClient backchannelAuthenticationClient = new BackchannelAuthenticationClient(backchannelAuthenticationEndpoint);
backchannelAuthenticationClient.setRequest(backchannelAuthenticationRequest);
BackchannelAuthenticationResponse backchannelAuthenticationResponse = backchannelAuthenticationClient.exec();
showClient(backchannelAuthenticationClient);
assertEquals(backchannelAuthenticationResponse.getStatus(), 200, "Unexpected response code: " + backchannelAuthenticationResponse.getEntity());
assertNotNull(backchannelAuthenticationResponse.getAuthReqId());
assertNotNull(backchannelAuthenticationResponse.getExpiresIn());
assertNotNull(backchannelAuthenticationResponse.getInterval()); // This parameter will only be present if the Client is registered to use the Poll or Ping modes.
}
/**
* Process a Ciba call to the OP using JWT Request object and validate HTTP status and error type.
* @param jwtRequest JWT in plain String.
* @param clientId Client identifier.
* @param clientSecret Client secret.
* @param httpStatus Param used to validate response from the server.
* @param error Error used to validate error response from the server.
*/
private void processCibaAuthorizationEndpointFailCall(String jwtRequest, String clientId, String clientSecret, int httpStatus, String error) {
BackchannelAuthenticationRequest backchannelAuthenticationRequest = new BackchannelAuthenticationRequest();
backchannelAuthenticationRequest.setRequest(jwtRequest);
backchannelAuthenticationRequest.setClientId(clientId);
backchannelAuthenticationRequest.setAuthUsername(clientId);
backchannelAuthenticationRequest.setAuthPassword(clientSecret);
BackchannelAuthenticationClient backchannelAuthenticationClient = new BackchannelAuthenticationClient(backchannelAuthenticationEndpoint);
backchannelAuthenticationClient.setRequest(backchannelAuthenticationRequest);
BackchannelAuthenticationResponse backchannelAuthenticationResponse = backchannelAuthenticationClient.exec();
showClient(backchannelAuthenticationClient);
assertEquals(backchannelAuthenticationResponse.getStatus(), httpStatus, "Unexpected response code: " + backchannelAuthenticationResponse.getEntity());
assertNotNull(backchannelAuthenticationResponse.getErrorType());
assertNotNull(backchannelAuthenticationResponse.getErrorDescription());
assertEquals(error, backchannelAuthenticationResponse.getErrorType().getParameter());
assertNull(backchannelAuthenticationResponse.getAuthReqId());
assertNull(backchannelAuthenticationResponse.getExpiresIn());
assertNull(backchannelAuthenticationResponse.getInterval());
}
/**
* Creates a new JwtAuthorizationRequest using default configuration and params.
*/
private JwtAuthorizationRequest createJwtRequest(String keyStoreFile, String keyStoreSecret, String dnName,
String userId, String keyId, SignatureAlgorithm signatureAlgorithm) throws Exception {
OxAuthCryptoProvider cryptoProvider = new OxAuthCryptoProvider(keyStoreFile, keyStoreSecret, dnName);
String clientId = registerResponse.getClientId();
int now = (int)(System.currentTimeMillis() / 1000);
JwtAuthorizationRequest jwtAuthorizationRequest = new JwtAuthorizationRequest(
null, signatureAlgorithm, cryptoProvider);
jwtAuthorizationRequest.setAud(issuer);
jwtAuthorizationRequest.setLoginHint(userId);
jwtAuthorizationRequest.setNbf(now);
jwtAuthorizationRequest.setScopes(Collections.singletonList("openid"));
jwtAuthorizationRequest.setIss(clientId);
jwtAuthorizationRequest.setBindingMessage("1234");
jwtAuthorizationRequest.setExp((int)(DateUtils.addMinutes(new Date(), 5).getTime() / 1000));
jwtAuthorizationRequest.setIat(now);
jwtAuthorizationRequest.setJti(UUID.randomUUID().toString());
jwtAuthorizationRequest.setKeyId(keyId);
return jwtAuthorizationRequest;
}
@Parameters({"userId", "userSecret", "redirectUri", "redirectUris", "sectorIdentifierUri"})
@Test
public void idTokenHintRS384(
final String userId, final String userSecret, final String redirectUri, final String redirectUris,
final String sectorIdentifierUri) throws Exception {
showTitle("idTokenHintRS384");
List<ResponseType> responseTypes = Arrays.asList(ResponseType.TOKEN, ResponseType.ID_TOKEN);
// 1. Register client
RegisterRequest registerRequest = new RegisterRequest(ApplicationType.WEB, "oxAuth test app",
StringUtils.spaceSeparatedToList(redirectUris));
registerRequest.setResponseTypes(responseTypes);
registerRequest.setSectorIdentifierUri(sectorIdentifierUri);
registerRequest.setIdTokenSignedResponseAlg(SignatureAlgorithm.RS384);
RegisterClient registerClient = new RegisterClient(registrationEndpoint);
registerClient.setRequest(registerRequest);
RegisterResponse registerResponse = registerClient.exec();
showClient(registerClient);
assertEquals(registerResponse.getStatus(), 200, "Unexpected response code: " + registerResponse.getEntity());
assertNotNull(registerResponse.getClientId());
assertNotNull(registerResponse.getClientSecret());
assertNotNull(registerResponse.getRegistrationAccessToken());
assertNotNull(registerResponse.getClientIdIssuedAt());
assertNotNull(registerResponse.getClientSecretExpiresAt());
String clientId = registerResponse.getClientId();
// 2. Request authorization
List<String> scopes = Arrays.asList("openid");
String nonce = UUID.randomUUID().toString();
String state = UUID.randomUUID().toString();
AuthorizationRequest authorizationRequest = new AuthorizationRequest(responseTypes, clientId, scopes, redirectUri, nonce);
authorizationRequest.setState(state);
AuthorizeClient authorizeClient = new AuthorizeClient(authorizationEndpoint);
authorizeClient.setRequest(authorizationRequest);
AuthorizationResponse authorizationResponse = authenticateResourceOwnerAndGrantAccess(
authorizationEndpoint, authorizationRequest, userId, userSecret);
assertNotNull(authorizationResponse.getLocation(), "The location is null");
assertNotNull(authorizationResponse.getAccessToken(), "The accessToken is null");
assertNotNull(authorizationResponse.getTokenType(), "The tokenType is null");
assertNotNull(authorizationResponse.getIdToken(), "The idToken is null");
assertNotNull(authorizationResponse.getState(), "The state is null");
String idToken = authorizationResponse.getIdToken();
// 3. Validate id_token
Jwt jwt = Jwt.parse(idToken);
assertNotNull(jwt.getHeader().getClaimAsString(JwtHeaderName.TYPE));
assertNotNull(jwt.getHeader().getClaimAsString(JwtHeaderName.ALGORITHM));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ISSUER));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.AUDIENCE));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.EXPIRATION_TIME));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ISSUED_AT));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.SUBJECT_IDENTIFIER));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.ACCESS_TOKEN_HASH));
assertNotNull(jwt.getClaims().getClaimAsString(JwtClaimName.AUTHENTICATION_TIME));
RSAPublicKey publicKey = JwkClient.getRSAPublicKey(
jwksUri,
jwt.getHeader().getClaimAsString(JwtHeaderName.KEY_ID));
RSASigner rsaSigner = new RSASigner(SignatureAlgorithm.RS384, publicKey);
assertTrue(rsaSigner.validate(jwt));
idTokenHintRS384 = idToken;
}
}
| |
package com.company.project.cache;
import com.aerospike.client.*;
import com.aerospike.client.policy.*;
import org.json.simple.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
public class AeroSpikeDBManager {
private static Logger logger = LoggerFactory.getLogger(AeroSpikeDBManager.class);
private static final String ALL = "!>all";
private AerospikeClient client;
public AeroSpikeDBManager(String host, int port) {
ClientPolicy policy = new ClientPolicy();
Host hosts = new Host(host, port);
client = new AerospikeClient(policy, hosts);
}
public AeroSpikeDBManager(List<String> hostUrls) {
ClientPolicy policy = new ClientPolicy();
List<Host> hostList = new ArrayList<Host>();
for (String h : hostUrls) {
String host = h;
int port = 3000;
if (h.indexOf(':') > 0) {
host = h.substring(0, h.indexOf(':'));
host = host.trim();
String portString = h.substring(h.indexOf(':') + 1).trim();
try {
port = Integer.parseInt(portString);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
host = host.trim();
Host dbHost = new Host(host, port);
hostList.add(dbHost);
}
Host[] hosts = hostList.toArray(new Host[0]);
client = new AerospikeClient(policy, hosts);
}
public AerospikeClient getClient() {
return client;
}
public void insertOrUpdate(String dbName, String table, String key, JSONObject value) {
insertOrUpdate(dbName, table, key, value, -1);
}
public void insertOrUpdate(String dbName, String table, String key, JSONObject value, int ttl) {
WritePolicy policy = new WritePolicy();
policy.recordExistsAction = RecordExistsAction.UPDATE;
policy.expiration = ttl;
insert(policy, dbName, table, key, value);
}
public void insertOrReplace(String dbName, String table, String key, JSONObject value) {
insertOrReplace(dbName, table, key, value, -1);
}
public void insertOrReplace(String dbName, String table, String key, JSONObject value, int ttl) {
WritePolicy policy = new WritePolicy();
policy.recordExistsAction = RecordExistsAction.REPLACE;
policy.expiration = ttl;
insert(policy, dbName, table, key, value);
}
public void insert(String dbName, String table, String key, JSONObject value) {
insert(dbName, table, key, value, -1);
}
public void insert(String dbName, String table, String key, JSONObject value, int ttl) {
WritePolicy policy = new WritePolicy();
policy.recordExistsAction = RecordExistsAction.CREATE_ONLY;
policy.expiration = ttl;
insert(policy, dbName, table, key, value);
}
public void insert(WritePolicy policy, String dbName, String table, String key, JSONObject value) {
ArrayList<Bin> bins = new ArrayList<Bin>();
if (value != null) {
for (Object en : value.entrySet()) {
if (en instanceof Entry) {
Entry entry = (Entry) en;
Object jsonkey = entry.getKey();
Object jsonValue = entry.getValue();
if (jsonkey instanceof String) {
String keyStr = (String) jsonkey;
Bin bin = new Bin(keyStr, jsonValue);
bins.add(bin);
}
}
}
}
client.put(policy, new Key(dbName, table, key), bins.toArray(new Bin[0]));
}
public void set(String dbName, String table, String key, String value) {
set(dbName, table, key, value, -1);
}
public void set(String dbName, String table, String key, Object value, int ttl) {
WritePolicy policy = new WritePolicy();
policy.expiration = ttl;
policy.recordExistsAction = RecordExistsAction.UPDATE;
client.put(policy, new Key(dbName, table, key), new Bin("value", value));
}
public boolean exists(String dbName, String table, String key) {
try {
Policy policy = new Policy();
boolean exists = client.exists(policy, new Key(dbName, table, key));
return exists;
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return false;
}
public List<String> exists(String dbName, String table, List<String> keys) {
try {
if (keys == null) {
return null;
}
List<String> list = new ArrayList<String>();
BatchPolicy policy = new BatchPolicy();
Key[] keyArr = new Key[keys.size()];
for (int i = 0; i < keys.size(); i++) {
keyArr[i] = new Key(dbName, table, keys.get(i));
}
boolean[] existArr = client.exists(policy, keyArr);
for (int i = 0; i < keys.size(); i++) {
if (existArr[i]) {
list.add(keys.get(i));
}
}
return list;
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return null;
}
public String get(String dbName, String table, String key) {
try {
Policy policy = new Policy();
Record record = client.get(policy, new Key(dbName, table, key));
if (record != null) {
String value = (String) record.getValue("value");
return value;
}
} catch (Exception e) {
e.printStackTrace();
logger.error(e.getMessage(), e);
}
return null;
}
public List<String> getAll(String dbName, String table, List<String> keys) {
if (keys == null || keys.size() == 0) {
return null;
}
try {
BatchPolicy policy = new BatchPolicy();
Key[] keyArr = new Key[keys.size()];
for (int i = 0; i < keys.size(); i++) {
keyArr[i] = new Key(dbName, table, keys.get(i));
}
Record[] records = client.get(policy, keyArr);
List<String> dbrs = new ArrayList<String>();
if (records != null) {
for (Record record : records) {
if (record == null) {
continue;
}
dbrs.add((String) record.getValue("value"));
}
}
return dbrs;
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return null;
}
public boolean delete(String dbName, String table, String key) {
try {
WritePolicy policy = new WritePolicy();
boolean delete = client.delete(policy, new Key(dbName, table, key));
return delete;
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return false;
}
public void deleteAll(String dbName, String table) {
final List<Key> keys = new ArrayList<Key>();
client.scanAll(new ScanPolicy(), dbName, table, new ScanCallback() {
@Override
public void scanCallback(Key arg0, Record r) throws AerospikeException {
keys.add(arg0);
}
});
for(Key k:keys){
client.delete(new WritePolicy(), k);
}
}
public void incrementValue(String dbName, String table, String key, Map<String, Number> binValueMap) {
try {
if (binValueMap == null || binValueMap.size() == 0) {
return;
}
Bin[] bins = new Bin[binValueMap.size()];
int pos = 0;
for (Entry<String, Number> en : binValueMap.entrySet()) {
bins[pos] = new Bin(en.getKey(), en.getValue().longValue());
pos++;
}
WritePolicy policy = new WritePolicy();
client.add(policy, new Key(dbName, table, key), bins);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
public void incrementValue(String dbName, String table, String key, Map<String, Number> binValueMap, int ttl) {
try {
if (binValueMap == null || binValueMap.size() == 0) {
return;
}
Bin[] bins = new Bin[binValueMap.size()];
int pos = 0;
for (Entry<String, Number> en : binValueMap.entrySet()) {
bins[pos] = new Bin(en.getKey(), en.getValue().longValue());
pos++;
}
WritePolicy policy = new WritePolicy();
policy.expiration = ttl;
client.add(policy, new Key(dbName, table, key), bins);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
public void scanAll(String databaseName, String tableName, ScanCallback scanCallback) {
try {
ScanPolicy policy = new ScanPolicy();
policy.concurrentNodes = true;
policy.priority = Priority.DEFAULT;
policy.includeBinData = true;
// client.(policy, databaseName, tableName, scanCallback);
}
finally {
client.close();
}
}
}
| |
/*
* Copyright 2011-2015, Institute of Cybernetics at Tallinn University of Technology
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ee.ioc.phon.android.speak;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.PendingIntent;
import android.app.SearchManager;
import android.app.PendingIntent.CanceledException;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.SharedPreferences;
import android.content.res.Resources;
import android.graphics.drawable.Drawable;
import android.media.MediaPlayer;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.SystemClock;
import android.preference.PreferenceManager;
import android.speech.RecognizerIntent;
import android.util.DisplayMetrics;
import android.util.SparseArray;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.Chronometer;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.List;
import ee.ioc.phon.android.speak.RecognizerIntentService.RecognizerBinder;
import ee.ioc.phon.android.speak.RecognizerIntentService.State;
import ee.ioc.phon.android.speak.provider.FileContentProvider;
import ee.ioc.phon.android.speak.utils.PreferenceUtils;
import ee.ioc.phon.netspeechapi.recsession.RecSessionResult;
/**
* <p>This activity responds to the following intent types:</p>
* <ul>
* <li>android.speech.action.RECOGNIZE_SPEECH</li>
* <li>android.speech.action.WEB_SEARCH</li>
* </ul>
* <p>We have tried to implement the complete interface of RecognizerIntent as of API level 7 (v2.1).</p>
*
* <p>It records audio, transcribes it using a speech-to-text server
* and returns the result as a non-empty list of Strings.
* In case of <code>android.intent.action.MAIN</code>,
* it submits the recorded/transcribed audio to a web search.
* It never returns an error code,
* all the errors are processed within this activity.</p>
*
* <p>This activity rewrites the error codes which originally come from the
* speech recognizer webservice (and which are then rewritten by the net-speech-api)
* to the RecognizerIntent result error codes. The RecognizerIntent error codes are the
* following (with my interpretation after the colon):</p>
*
* <ul>
* <li>RESULT_AUDIO_ERROR: recording of the audio fails</li>
* <li>RESULT_NO_MATCH: everything worked great just no transcription was produced</li>
* <li>RESULT_NETWORK_ERROR: cannot reach the recognizer server
* <ul>
* <li>Network is switched off on the device</li>
* <li>The recognizer webservice URL does not exist in the internet</li>
* </ul>
* </li>
* <li>RESULT_SERVER_ERROR: server was reached but it denied service for some reason,
* or produced results in a wrong format (i.e. maybe it provides a different service)</li>
* <li>RESULT_CLIENT_ERROR: generic client error
* <ul>
* <li>The URLs of the recognizer webservice and/or the grammar were malformed</li>
* </ul>
* </li>
* </ul>
*
* @author Kaarel Kaljurand
*/
public class RecognizerIntentActivity extends Activity {
private static final String LOG_TAG = RecognizerIntentActivity.class.getName();
private static final int TASK_CHUNKS_INTERVAL = 1500;
private static final int TASK_CHUNKS_DELAY = 100;
// Update the byte count every second
private static final int TASK_BYTES_INTERVAL = 1000;
// Start the task almost immediately
private static final int TASK_BYTES_DELAY = 100;
// Check for pause / max time limit twice a second
private static final int TASK_STOP_INTERVAL = 500;
private static final int TASK_STOP_DELAY = 1000;
// Check the volume 10 times a second
private static final int TASK_VOLUME_INTERVAL = 100;
private static final int TASK_VOLUME_DELAY = 500;
private static final int DELAY_AFTER_START_BEEP = 200;
private static final String MSG = "MSG";
private static final int MSG_TOAST = 1;
private static final int MSG_RESULT_ERROR = 2;
private static final String DOTS = "............";
private SparseArray<String> mErrorMessages;
private SharedPreferences mPrefs;
private TextView mTvPrompt;
private Button mBStartStop;
private LinearLayout mLlTranscribing;
private LinearLayout mLlProgress;
private LinearLayout mLlError;
private TextView mTvBytes;
private Chronometer mChronometer;
private ImageView mIvVolume;
private ImageView mIvWaveform;
private TextView mTvChunks;
private TextView mTvErrorMessage;
private List<Drawable> mVolumeLevels;
private SimpleMessageHandler mMessageHandler;
private Handler mHandlerBytes = new Handler();
private Handler mHandlerStop = new Handler();
private Handler mHandlerVolume = new Handler();
private Handler mHandlerChunks = new Handler();
private Runnable mRunnableBytes;
private Runnable mRunnableStop;
private Runnable mRunnableVolume;
private Runnable mRunnableChunks;
private ChunkedWebRecSessionBuilder mRecSessionBuilder;
private Resources mRes;
private MediaPlayer mMediaPlayer;
private PendingIntent mExtraResultsPendingIntent;
private Bundle mExtras;
private RecognizerIntentService mService;
private boolean mIsBound = false;
private boolean mStartRecording = false;
private int mLevel = 0;
// Note: only used with pre-Honeycomb
private boolean mIsStartActivity = false;
private ServiceConnection mConnection = new ServiceConnection() {
public void onServiceConnected(ComponentName className, IBinder service) {
Log.i(LOG_TAG, "Service connected");
mService = ((RecognizerBinder) service).getService();
mService.setOnResultListener(new RecognizerIntentService.OnResultListener() {
public boolean onResult(RecSessionResult result) {
// We trust that getLinearizations() returns a non-null non-empty list.
ArrayList<String> matches = new ArrayList<>();
matches.addAll(result.getLinearizations());
returnOrForwardMatches(mMessageHandler, matches);
return true;
}
});
mService.setOnErrorListener(new RecognizerIntentService.OnErrorListener() {
public boolean onError(int errorCode, Exception e) {
handleResultError(mMessageHandler, errorCode, "onError", e);
return true;
}
});
if (mStartRecording && ! mService.isWorking()) {
startRecording();
mStartRecording = false;
} else {
setGui();
}
}
public void onServiceDisconnected(ComponentName className) {
// This is called when the connection with the service has been
// unexpectedly disconnected -- that is, its process crashed.
// Because it is running in our same process, we should never
// see this happen.
mService = null;
Log.i(LOG_TAG, "Service disconnected");
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.recognizer);
mMessageHandler = new SimpleMessageHandler(this);
mErrorMessages = createErrorMessages();
// Don't shut down the screen
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
mTvPrompt = (TextView) findViewById(R.id.tvPrompt);
mBStartStop = (Button) findViewById(R.id.bStartStop);
mLlTranscribing = (LinearLayout) findViewById(R.id.llTranscribing);
mLlProgress = (LinearLayout) findViewById(R.id.llProgress);
mLlError = (LinearLayout) findViewById(R.id.llError);
mTvBytes = (TextView) findViewById(R.id.tvBytes);
mChronometer = (Chronometer) findViewById(R.id.chronometer);
mIvVolume = (ImageView) findViewById(R.id.ivVolume);
mIvWaveform = (ImageView) findViewById(R.id.ivWaveform);
mTvChunks = (TextView) findViewById(R.id.tvChunks);
mTvErrorMessage = (TextView) findViewById(R.id.tvErrorMessage);
mRes = getResources();
mVolumeLevels = new ArrayList<>();
mVolumeLevels.add(mRes.getDrawable(R.drawable.speak_now_level0));
mVolumeLevels.add(mRes.getDrawable(R.drawable.speak_now_level1));
mVolumeLevels.add(mRes.getDrawable(R.drawable.speak_now_level2));
mVolumeLevels.add(mRes.getDrawable(R.drawable.speak_now_level3));
mVolumeLevels.add(mRes.getDrawable(R.drawable.speak_now_level4));
mVolumeLevels.add(mRes.getDrawable(R.drawable.speak_now_level5));
mVolumeLevels.add(mRes.getDrawable(R.drawable.speak_now_level6));
mExtras = getIntent().getExtras();
if (mExtras == null) {
// For some reason getExtras() can return null, we map it
// to an empty Bundle if this occurs.
mExtras = new Bundle();
} else {
mExtraResultsPendingIntent = Utils.getPendingIntent(mExtras);
}
mPrefs = PreferenceManager.getDefaultSharedPreferences(getBaseContext());
// For the change in the autostart-setting to take effect,
// the user must restart the app. This seems more natural.
mStartRecording = mPrefs.getBoolean("keyAutoStart", false);
try {
mRecSessionBuilder = new ChunkedWebRecSessionBuilder(this, mExtras, getCallingActivity());
} catch (MalformedURLException e) {
// The user has managed to store a malformed URL in the configuration.
handleResultError(mMessageHandler, RecognizerIntent.RESULT_CLIENT_ERROR, "", e);
}
}
@Override
public void onStart() {
super.onStart();
mIsStartActivity = false;
// Show the length of the current recording in bytes
mRunnableBytes = new Runnable() {
public void run() {
if (mService != null) {
mTvBytes.setText(Utils.getSizeAsString(mService.getLength()));
}
mHandlerBytes.postDelayed(this, TASK_BYTES_INTERVAL);
}
};
// Show the number of audio chunks that have been sent to the server
mRunnableChunks = new Runnable() {
public void run() {
if (mService != null) {
mTvChunks.setText(makeBar(DOTS, mService.getChunkCount()));
}
mHandlerChunks.postDelayed(this, TASK_CHUNKS_INTERVAL);
}
};
// Decide if we should stop recording
// 1. Max recording time (in milliseconds) has passed
// 2. Speaker stopped speaking
final int maxRecordingTime = 1000 * Integer.parseInt(
mPrefs.getString(
getString(R.string.keyAutoStopAfterTime),
getString(R.string.defaultAutoStopAfterTime)));
mRunnableStop = new Runnable() {
public void run() {
if (mService != null) {
if (maxRecordingTime < (SystemClock.elapsedRealtime() - mService.getStartTime())) {
Log.i(LOG_TAG, "Max recording time exceeded");
stopRecording();
} else if (PreferenceUtils.getPrefBoolean(mPrefs, mRes, R.string.keyAutoStopAfterPause, R.bool.defaultAutoStopAfterPause) && mService.isPausing()) {
Log.i(LOG_TAG, "Speaker finished speaking");
stopRecording();
} else {
mHandlerStop.postDelayed(this, TASK_STOP_INTERVAL);
}
}
}
};
mRunnableVolume = new Runnable() {
public void run() {
if (mService != null) {
float db = mService.getRmsdb();
final int maxLevel = mVolumeLevels.size() - 1;
int index = (int) ((db - Constants.DB_MIN) / (Constants.DB_MAX - Constants.DB_MIN) * maxLevel);
final int level = Math.min(Math.max(0, index), maxLevel);
if (level != mLevel) {
mIvVolume.setImageDrawable(mVolumeLevels.get(level));
mLevel = level;
}
mHandlerVolume.postDelayed(this, TASK_VOLUME_INTERVAL);
}
}
};
mBStartStop.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
if (mIsBound) {
if (mService.getState() == State.RECORDING) {
stopRecording();
} else {
startRecording();
}
} else {
mStartRecording = true;
doBindService();
}
}
});
// Settings button
// Short click opens the settings
ImageButton bSettings = (ImageButton) findViewById(R.id.bSettings);
bSettings.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
mIsStartActivity = true;
startActivity(new Intent(getApplicationContext(), Preferences.class));
}
});
// Long click shows some technical details (for developers)
bSettings.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
Intent details = new Intent(getApplicationContext(), DetailsActivity.class);
details.putExtra(DetailsActivity.EXTRA_STRING_ARRAY, getDetails());
startActivity(details);
return false;
}
});
doBindService();
}
@Override
public void onResume() {
super.onResume();
setGui();
}
@SuppressLint("NewApi")
@Override
public void onStop() {
super.onStop();
Log.i("onStop");
if (mService != null) {
mService.setOnResultListener(null);
mService.setOnErrorListener(null);
}
stopAllTasks();
doUnbindService();
// We stop the service unless a configuration change causes onStop(),
// i.e. the service is not stopped because of rotation, but is
// stopped if BACK or HOME is pressed, or the Settings-activity is launched.
// Note: on pre-honeycomb HOME does not stop the service, as there does not seem
// to be a nice way to detect configuration change in onStop().
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
if (! isChangingConfigurations()) {
stopService(new Intent(this, RecognizerIntentService.class));
}
} else if (mIsStartActivity || isFinishing()) {
stopService(new Intent(this, RecognizerIntentService.class));
}
if (mMediaPlayer != null) {
mMediaPlayer.release();
mMediaPlayer = null;
}
}
void doBindService() {
// This can be called also on an already running service
startService(new Intent(this, RecognizerIntentService.class));
bindService(new Intent(this, RecognizerIntentService.class), mConnection, Context.BIND_AUTO_CREATE);
mIsBound = true;
Log.i(LOG_TAG, "Service is bound");
}
void doUnbindService() {
if (mIsBound) {
unbindService(mConnection);
mIsBound = false;
mService = null;
Log.i(LOG_TAG, "Service is UNBOUND");
}
}
private void setGui() {
if (mService == null) {
// in onResume() the service might not be ready yet
return;
}
switch(mService.getState()) {
case IDLE:
setGuiInit();
break;
case INITIALIZED:
setGuiInit();
break;
case RECORDING:
setGuiRecording();
break;
case PROCESSING:
setGuiTranscribing(mService.getCompleteRecording());
break;
case ERROR:
setGuiError(mService.getErrorCode());
break;
}
}
private void setRecorderStyle(int color) {
mTvBytes.setTextColor(color);
mChronometer.setTextColor(color);
}
private void stopRecording() {
mService.stop();
playStopSound();
setGui();
}
private void startAllTasks() {
mHandlerBytes.postDelayed(mRunnableBytes, TASK_BYTES_DELAY);
mHandlerStop.postDelayed(mRunnableStop, TASK_STOP_DELAY);
mHandlerVolume.postDelayed(mRunnableVolume, TASK_VOLUME_DELAY);
mHandlerChunks.postDelayed(mRunnableChunks, TASK_CHUNKS_DELAY);
}
private void stopAllTasks() {
mHandlerBytes.removeCallbacks(mRunnableBytes);
mHandlerStop.removeCallbacks(mRunnableStop);
mHandlerVolume.removeCallbacks(mRunnableVolume);
mHandlerChunks.removeCallbacks(mRunnableChunks);
stopChronometer();
}
private void setGuiInit() {
mLlTranscribing.setVisibility(View.GONE);
mIvWaveform.setVisibility(View.GONE);
// includes: bytes, chronometer, chunks
mLlProgress.setVisibility(View.INVISIBLE);
mTvChunks.setText("");
setTvPrompt();
if (mStartRecording) {
mBStartStop.setVisibility(View.GONE);
mIvVolume.setVisibility(View.VISIBLE);
} else {
mIvVolume.setVisibility(View.GONE);
mBStartStop.setText(getString(R.string.buttonSpeak));
mBStartStop.setVisibility(View.VISIBLE);
}
mLlError.setVisibility(View.GONE);
}
private void setGuiError() {
if (mService == null) {
setGuiError(RecognizerIntent.RESULT_CLIENT_ERROR);
} else {
setGuiError(mService.getErrorCode());
}
}
private void setGuiError(int errorCode) {
mLlTranscribing.setVisibility(View.GONE);
mIvVolume.setVisibility(View.GONE);
mIvWaveform.setVisibility(View.GONE);
// includes: bytes, chronometer, chunks
mLlProgress.setVisibility(View.GONE);
setTvPrompt();
mBStartStop.setText(getString(R.string.buttonSpeak));
mBStartStop.setVisibility(View.VISIBLE);
mLlError.setVisibility(View.VISIBLE);
mTvErrorMessage.setText(mErrorMessages.get(errorCode));
}
private void setGuiRecording() {
mChronometer.setBase(mService.getStartTime());
startChronometer();
startAllTasks();
setTvPrompt();
mLlProgress.setVisibility(View.VISIBLE);
mLlError.setVisibility(View.GONE);
setRecorderStyle(mRes.getColor(R.color.red));
if (PreferenceUtils.getPrefBoolean(mPrefs, mRes, R.string.keyAutoStopAfterPause, R.bool.defaultAutoStopAfterPause)) {
mBStartStop.setVisibility(View.GONE);
mIvVolume.setVisibility(View.VISIBLE);
} else {
mIvVolume.setVisibility(View.GONE);
mBStartStop.setText(getString(R.string.buttonStop));
mBStartStop.setVisibility(View.VISIBLE);
}
}
private void setGuiTranscribing(byte[] bytes) {
mChronometer.setBase(mService.getStartTime());
stopChronometer();
mHandlerBytes.removeCallbacks(mRunnableBytes);
mHandlerStop.removeCallbacks(mRunnableStop);
mHandlerVolume.removeCallbacks(mRunnableVolume);
// Chunk checking keeps running
mTvBytes.setText(Utils.getSizeAsString(bytes.length));
setRecorderStyle(mRes.getColor(R.color.grey2));
mBStartStop.setVisibility(View.GONE);
mTvPrompt.setVisibility(View.GONE);
mIvVolume.setVisibility(View.GONE);
mLlProgress.setVisibility(View.VISIBLE);
mLlTranscribing.setVisibility(View.VISIBLE);
// http://stackoverflow.com/questions/5012840/android-specifying-pixel-units-like-sp-px-dp-without-using-xml
DisplayMetrics metrics = mRes.getDisplayMetrics();
// This must match the layout_width of the top layout in recognizer.xml
float dp = 250f;
int waveformWidth = (int) (metrics.density * dp + 0.5f);
int waveformHeight = (int) (waveformWidth / 2.5);
mIvWaveform.setVisibility(View.VISIBLE);
mIvWaveform.setImageBitmap(Utils.drawWaveform(bytes, waveformWidth, waveformHeight, 0, bytes.length));
}
private void setTvPrompt() {
String prompt = getPrompt();
if (prompt == null || prompt.length() == 0) {
mTvPrompt.setVisibility(View.INVISIBLE);
} else {
mTvPrompt.setText(prompt);
mTvPrompt.setVisibility(View.VISIBLE);
}
}
private String getPrompt() {
String prompt = mExtras.getString(RecognizerIntent.EXTRA_PROMPT);
if (prompt == null && mExtraResultsPendingIntent == null && getCallingActivity() == null) {
return getString(R.string.promptSearch);
}
return prompt;
}
private void stopChronometer() {
mChronometer.stop();
}
private void startChronometer() {
mChronometer.start();
}
private void startRecording() {
int sampleRate = Integer.parseInt(
mPrefs.getString(
getString(R.string.keyRecordingRate),
getString(R.string.defaultRecordingRate)));
mRecSessionBuilder.setContentType(sampleRate);
if (mService.init(mRecSessionBuilder.build())) {
playStartSound();
mService.start(sampleRate);
setGui();
}
}
/**
* Sets the RESULT_OK intent. Adds the recorded audio data if the caller has requested it
* and the requested format is supported or unset.
*/
private void setResultIntent(final Handler handler, ArrayList<String> matches) {
Intent intent = new Intent();
if (mExtras.getBoolean(Extras.GET_AUDIO)) {
String audioFormat = mExtras.getString(Extras.GET_AUDIO_FORMAT);
if (audioFormat == null) {
audioFormat = Constants.DEFAULT_AUDIO_FORMAT;
}
if (Constants.SUPPORTED_AUDIO_FORMATS.contains(audioFormat)) {
try {
FileOutputStream fos = openFileOutput(Constants.AUDIO_FILENAME, Context.MODE_PRIVATE);
fos.write(mService.getCompleteRecordingAsWav());
fos.close();
Uri uri = Uri.parse("content://" + FileContentProvider.AUTHORITY + "/" + Constants.AUDIO_FILENAME);
// TODO: not sure about the type (or if it's needed)
intent.setDataAndType(uri, audioFormat);
} catch (FileNotFoundException e) {
Log.e(LOG_TAG, "FileNotFoundException: " + e.getMessage());
} catch (IOException e) {
Log.e(LOG_TAG, "IOException: " + e.getMessage());
}
} else {
if (Log.DEBUG) {
handler.sendMessage(createMessage(MSG_TOAST,
String.format(getString(R.string.toastRequestedAudioFormatNotSupported), audioFormat)));
}
}
}
intent.putStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS, matches);
setResult(Activity.RESULT_OK, intent);
}
private void toast(String message) {
Toast.makeText(getApplicationContext(), message, Toast.LENGTH_LONG).show();
}
// TODO: Use AudioCue
private void playStartSound() {
boolean soundPlayed = playSound(R.raw.explore_begin);
if (soundPlayed) {
SystemClock.sleep(DELAY_AFTER_START_BEEP);
}
}
private void playStopSound() {
playSound(R.raw.explore_end);
}
private void playErrorSound() {
playSound(R.raw.error);
}
private boolean playSound(int sound) {
if (mPrefs.getBoolean(mRes.getString(R.string.keyAudioCues),
mRes.getBoolean(R.bool.defaultAudioCues))) {
mMediaPlayer = MediaPlayer.create(this, sound);
// create can return null, e.g. on Android Wear
if (mMediaPlayer == null) {
return false;
}
mMediaPlayer.start();
return true;
}
return false;
}
/**
* <p>Only for developers, i.e. we are not going to localize these strings.</p>
*/
private String[] getDetails() {
String callingActivityClassName = null;
String callingActivityPackageName = null;
String pendingIntentTargetPackage = null;
ComponentName callingActivity = getCallingActivity();
if (callingActivity != null) {
callingActivityClassName = callingActivity.getClassName();
callingActivityPackageName = callingActivity.getPackageName();
}
if (mExtraResultsPendingIntent != null) {
pendingIntentTargetPackage = mExtraResultsPendingIntent.getTargetPackage();
}
List<String> info = new ArrayList<>();
info.add("ID: " + PreferenceUtils.getUniqueId(PreferenceManager.getDefaultSharedPreferences(this)));
info.add("User-Agent comment: " + mRecSessionBuilder.getUserAgentComment());
info.add("Calling activity class name: " + callingActivityClassName);
info.add("Calling activity package name: " + callingActivityPackageName);
info.add("Pending intent target package: " + pendingIntentTargetPackage);
info.add("Selected grammar: " + mRecSessionBuilder.getGrammarUrl());
info.add("Selected target lang: " + mRecSessionBuilder.getGrammarTargetLang());
info.add("Selected server: " + mRecSessionBuilder.getServerUrl());
info.add("Intent action: " + getIntent().getAction());
info.addAll(Utils.ppBundle(mExtras));
return info.toArray(new String[info.size()]);
}
private static Message createMessage(int type, String str) {
Bundle b = new Bundle();
b.putString(MSG, str);
Message msg = Message.obtain();
msg.what = type;
msg.setData(b);
return msg;
}
private static class SimpleMessageHandler extends Handler {
private final WeakReference<RecognizerIntentActivity> mRef;
public SimpleMessageHandler(RecognizerIntentActivity c) {
mRef = new WeakReference<>(c);
}
public void handleMessage(Message msg) {
RecognizerIntentActivity outerClass = mRef.get();
if (outerClass != null) {
Bundle b = msg.getData();
String msgAsString = b.getString(MSG);
switch (msg.what) {
case MSG_TOAST:
outerClass.toast(msgAsString);
break;
case MSG_RESULT_ERROR:
outerClass.playErrorSound();
outerClass.stopAllTasks();
outerClass.setGuiError();
break;
}
}
}
}
/**
* <p>Returns the transcription results (matches) to the caller,
* or sends them to the pending intent, or performs a web search.</p>
*
* <p>If a pending intent was specified then use it. This is the case with
* applications that use the standard search bar (e.g. Google Maps and YouTube).</p>
*
* <p>Otherwise. If there was no caller (i.e. we cannot return the results), or
* the caller asked us explicitly to perform "web search", then do that, possibly
* disambiguating the results or redoing the recognition.
* This is the case when K6nele was launched from its launcher icon (i.e. no caller),
* or from a browser app.
* (Note that trying to return the results to Google Chrome does not seem to work.)</p>
*
* <p>Otherwise. Just return the results to the caller.</p>
*
* <p>Note that we assume that the given list of matches contains at least one
* element.</p>
*
* @param handler message handler
* @param matches transcription results (one or more hypotheses)
*/
private void returnOrForwardMatches(final Handler handler, ArrayList<String> matches) {
// Throw away matches that the user is not interested in
int maxResults = mExtras.getInt(RecognizerIntent.EXTRA_MAX_RESULTS);
if (maxResults > 0 && matches.size() > maxResults) {
matches.subList(maxResults, matches.size()).clear();
}
if (mExtraResultsPendingIntent == null) {
if (getCallingActivity() == null
|| RecognizerIntent.ACTION_WEB_SEARCH.equals(getIntent().getAction())
|| mExtras.getBoolean(RecognizerIntent.EXTRA_WEB_SEARCH_ONLY)) {
handleResultsByWebSearch(matches);
return;
} else {
setResultIntent(handler, matches);
}
} else {
Bundle bundle = mExtras.getBundle(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE);
if (bundle == null) {
bundle = new Bundle();
}
String match = matches.get(0);
//mExtraResultsPendingIntentBundle.putString(SearchManager.QUERY, match);
Intent intent = new Intent();
intent.putExtras(bundle);
// This is for Google Maps, YouTube, ...
intent.putExtra(SearchManager.QUERY, match);
// This is for SwiftKey X, ...
intent.putStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS, matches);
String message;
if (matches.size() == 1) {
message = match;
} else {
message = matches.toString();
}
// Display a toast with the transcription.
handler.sendMessage(createMessage(MSG_TOAST, String.format(getString(R.string.toastForwardedMatches), message)));
try {
mExtraResultsPendingIntent.send(this, Activity.RESULT_OK, intent);
} catch (CanceledException e) {
handler.sendMessage(createMessage(MSG_TOAST, e.getMessage()));
}
}
finish();
}
// In case of multiple hypotheses, ask the user to select from a list dialog.
// TODO: fetch also confidence scores and treat a very confident hypothesis
// as a single hypothesis.
private void handleResultsByWebSearch(final ArrayList<String> results) {
// Some tweaking to cleanup the UI that would show under the
// dialog window that we are about to open.
runOnUiThread(new Runnable() {
public void run() {
mLlTranscribing.setVisibility(View.GONE);
}
});
Intent searchIntent;
if (results.size() == 1) {
// We construct a list of search intents.
// The first one that can be handled by the device is launched.
CharSequence query = results.get(0);
Intent intent1 = new Intent(Intent.ACTION_WEB_SEARCH);
intent1.putExtra(SearchManager.QUERY, query);
Intent intent2 = new Intent(Intent.ACTION_SEARCH);
intent2.putExtra(SearchManager.QUERY, query);
Utils.startActivityIfAvailable(this, intent1, intent2);
} else {
// TODO: it would be a bit cleaner to pass ACTION_WEB_SEARCH
// via a pending intent
searchIntent = new Intent(this, DetailsActivity.class);
searchIntent.putExtra(DetailsActivity.EXTRA_TITLE, getString(R.string.dialogTitleHypotheses));
searchIntent.putExtra(DetailsActivity.EXTRA_STRING_ARRAY, results.toArray(new String[results.size()]));
startActivity(searchIntent);
}
}
private void handleResultError(Handler handler, int resultCode, String type, Exception e) {
if (e != null) {
Log.e(LOG_TAG, "Exception: " + type + ": " + e.getMessage());
}
handler.sendMessage(createMessage(MSG_RESULT_ERROR, mErrorMessages.get(resultCode)));
}
private static String makeBar(String bar, int len) {
if (len <= 0) return "";
if (len >= bar.length()) return Integer.toString(len);
return bar.substring(0, len);
}
private SparseArray<String> createErrorMessages() {
SparseArray<String> errorMessages = new SparseArray<>();
errorMessages.put(RecognizerIntent.RESULT_AUDIO_ERROR, getString(R.string.errorResultAudioError));
errorMessages.put(RecognizerIntent.RESULT_CLIENT_ERROR, getString(R.string.errorResultClientError));
errorMessages.put(RecognizerIntent.RESULT_NETWORK_ERROR, getString(R.string.errorResultNetworkError));
errorMessages.put(RecognizerIntent.RESULT_SERVER_ERROR, getString(R.string.errorResultServerError));
errorMessages.put(RecognizerIntent.RESULT_NO_MATCH, getString(R.string.errorResultNoMatch));
return errorMessages;
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.siddhi.core.query.output.callback;
import com.lmax.disruptor.EventHandler;
import com.lmax.disruptor.RingBuffer;
import com.lmax.disruptor.SleepingWaitStrategy;
import com.lmax.disruptor.dsl.Disruptor;
import com.lmax.disruptor.dsl.ProducerType;
import org.apache.log4j.Logger;
import org.wso2.siddhi.core.config.ExecutionPlanContext;
import org.wso2.siddhi.core.event.ComplexEvent;
import org.wso2.siddhi.core.event.ComplexEventChunk;
import org.wso2.siddhi.core.event.Event;
import org.wso2.siddhi.core.event.stream.StreamEvent;
import org.wso2.siddhi.query.api.execution.query.Query;
import java.lang.reflect.Constructor;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public abstract class QueryCallback {
private static final Logger log = Logger.getLogger(QueryCallback.class);
private ExecutionPlanContext executionPlanContext;
private Query query;
private List<Event> currentEventBuffer = new ArrayList<Event>();
private List<Event> expiredEventBuffer = new ArrayList<Event>();
private Disruptor<EventHolder> disruptor;
private RingBuffer<EventHolder> ringBuffer;
private AsyncEventHandler asyncEventHandler;
public void setQuery(Query query) {
this.query = query;
}
public void setContext(ExecutionPlanContext executionPlanContext) {
this.executionPlanContext = executionPlanContext;
}
public void receiveStreamEvent(ComplexEventChunk complexEventChunk) {
Event[] currentEvents = null;
Event[] expiredEvents = null;
long timeStamp = -1;
while (complexEventChunk.hasNext()) {
ComplexEvent streamEvent = complexEventChunk.next();
if (streamEvent.getType() == StreamEvent.Type.EXPIRED) {
bufferEvent(streamEvent, expiredEventBuffer);
} else {
bufferEvent(streamEvent, currentEventBuffer);
}
timeStamp = streamEvent.getTimestamp();
}
if (!currentEventBuffer.isEmpty()) {
currentEvents = currentEventBuffer.toArray(new Event[currentEventBuffer.size()]);
currentEventBuffer.clear();
}
if (!expiredEventBuffer.isEmpty()) {
expiredEvents = expiredEventBuffer.toArray(new Event[expiredEventBuffer.size()]);
expiredEventBuffer.clear();
}
if (disruptor == null) {
send(timeStamp, currentEvents, expiredEvents);
} else {
sendAsync(timeStamp, currentEvents, expiredEvents);
}
}
private void sendAsync(long timeStamp, Event[] currentEvents, Event[] expiredEvents) {
long sequenceNo = ringBuffer.next();
try {
EventHolder holder = ringBuffer.get(sequenceNo);
holder.timeStamp = timeStamp;
holder.currentEvents = currentEvents;
holder.expiredEvents = expiredEvents;
} finally {
ringBuffer.publish(sequenceNo);
}
}
// private void send(long timeStamp, Event[] currentEvents, Event[] expiredEvents, boolean endOfBatch) {
//
// if (endOfBatch) {
// send(timeStamp, currentEvents, currentEvents);
// } else {
// StreamEvent processedEvent = currentStreamEvent;
// bufferEvent(processedEvent, currentEventBuffer);
//
// processedEvent = expiredStreamEvent;
// bufferEvent(processedEvent, expiredEventBuffer);
// }
// }
private void send(long timeStamp, Event[] currentEvents, Event[] expiredEvents) {
try {
receive(timeStamp, currentEvents, expiredEvents);
} catch (RuntimeException e) {
log.error("Error on sending events" + Arrays.deepToString(currentEvents) + ", " + Arrays.deepToString(expiredEvents), e);
}
}
private void bufferEvent(ComplexEvent complexEvent, List<Event> eventBuffer) {
eventBuffer.add(new Event(complexEvent.getOutputData().length).copyFrom(complexEvent));
// StreamEvent processedEvent = streamEventList;
// while (processedEvent != null) {
// eventBuffer.add(new Event(processedEvent.getOutputDataAttributes().length).copyFrom(processedEvent));
// processedEvent = processedEvent.getNext();
// }
}
public synchronized void startProcessing() {
Boolean asyncEnabled = null;
// try {
// Element element = AnnotationHelper.getAnnotationElement(SiddhiConstants.ANNOTATION_CONFIG,
// SiddhiConstants.ANNOTATION_ELEMENT_CALLBACK_ASYNC,
// query.getAnnotations());
//
// if (element != null) {
// asyncEnabled = SiddhiConstants.TRUE.equalsIgnoreCase(element.getValue());
// }
//
// } catch (DuplicateAnnotationException e) {
// throw new QueryCreationException(e.getMessage() + " for the same Query " +
// query.toString());
// }
if (asyncEnabled != null && asyncEnabled || asyncEnabled == null) {
for (Constructor constructor : Disruptor.class.getConstructors()) {
if (constructor.getParameterTypes().length == 5) { //if new disruptor implementation available
disruptor = new Disruptor<EventHolder>(new EventHolderFactory(),
executionPlanContext.getSiddhiContext().getEventBufferSize(),
executionPlanContext.getExecutorService(), ProducerType.SINGLE, new SleepingWaitStrategy());
break;
}
}
if (disruptor == null) {
disruptor = new Disruptor<EventHolder>(new EventHolderFactory(),
executionPlanContext.getSiddhiContext().getEventBufferSize(),
executionPlanContext.getExecutorService());
}
asyncEventHandler = new AsyncEventHandler(this);
disruptor.handleEventsWith(asyncEventHandler);
ringBuffer = disruptor.start();
}
}
public synchronized void stopProcessing() {
if (disruptor != null) {
asyncEventHandler.queryCallback = null;
disruptor.shutdown();
}
}
public abstract void receive(long timeStamp, Event[] inEvents, Event[] removeEvents);
public class AsyncEventHandler implements EventHandler<EventHolder> {
private QueryCallback queryCallback;
public AsyncEventHandler(QueryCallback queryCallback) {
this.queryCallback = queryCallback;
}
/**
* Called when a publisher has published an event to the {@link com.lmax.disruptor.RingBuffer}
*
* @param eventHolder published to the {@link com.lmax.disruptor.RingBuffer}
* @param sequence of the event being processed
* @param endOfBatch flag to indicate if this is the last event in a batch from the {@link com.lmax.disruptor.RingBuffer}
* @throws Exception if the EventHandler would like the exception handled further up the chain.
*/
@Override
public void onEvent(EventHolder eventHolder, long sequence, boolean endOfBatch) throws Exception {
if (queryCallback != null) {
queryCallback.send(eventHolder.timeStamp, eventHolder.currentEvents, eventHolder.expiredEvents);
}
}
}
public class EventHolder {
private long timeStamp;
private Event[] currentEvents;
private Event[] expiredEvents;
}
public class EventHolderFactory implements com.lmax.disruptor.EventFactory<EventHolder> {
public EventHolder newInstance() {
return new EventHolder();
}
}
}
| |
package org.sfm.reflect.asm;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.signature.SignatureReader;
import org.objectweb.asm.signature.SignatureVisitor;
import org.sfm.reflect.TypeHelper;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.*;
import java.util.*;
import static org.objectweb.asm.Opcodes.*;
public class AsmUtils {
public static final String ASM_DUMP_TARGET_DIR = "asm.dump.target.dir";
public static final Type[] EMPTY_TYPE_ARRAY = new Type[0];
public static String toType(final Type target) {
return toType(TypeHelper.toClass(target));
}
public static String toType(final Class<?> target) {
if (target.isPrimitive()) {
return primitivesType.get(target);
}
return toType(getPublicOrInterfaceClass(target).getName());
}
public static String toType(final String name) {
return name.replace('.', '/');
}
static final Map<Class<?>, Class<?>> wrappers = new HashMap<Class<?>, Class<?>>();
static {
wrappers.put(boolean.class, Boolean.class);
wrappers.put(byte.class, Byte.class);
wrappers.put(char.class, Character.class);
wrappers.put(double.class, Double.class);
wrappers.put(float.class, Float.class);
wrappers.put(int.class, Integer.class);
wrappers.put(long.class, Long.class);
wrappers.put(short.class, Short.class);
wrappers.put(void.class, Void.class);
}
static final Map<Class<?>, String> primitivesType = new HashMap<Class<?>, String>();
static {
primitivesType.put(boolean.class, "Z");
primitivesType.put(byte.class, "B");
primitivesType.put(char.class, "C");
primitivesType.put(double.class, "D");
primitivesType.put(float.class, "F");
primitivesType.put(int.class, "I");
primitivesType.put(long.class, "J");
primitivesType.put(short.class, "S");
primitivesType.put(void.class, "V");
}
static final Map<String, String> stringToPrimitivesType = new HashMap<String, String>();
static {
stringToPrimitivesType.put("Boolean", "Z");
stringToPrimitivesType.put("Byte", "B");
stringToPrimitivesType.put("Character", "C");
stringToPrimitivesType.put("Double", "D");
stringToPrimitivesType.put("Float", "F");
stringToPrimitivesType.put("Int", "I");
stringToPrimitivesType.put("Long", "J");
stringToPrimitivesType.put("Short", "S");
}
static final Map<Class<?>, Integer> loadOps = new HashMap<Class<?>, Integer>();
static {
loadOps.put(boolean.class, ILOAD);
loadOps.put(byte.class, ILOAD);
loadOps.put(char.class, ILOAD);
loadOps.put(double.class, DLOAD);
loadOps.put(float.class, FLOAD);
loadOps.put(int.class, ILOAD);
loadOps.put(long.class, LLOAD);
loadOps.put(short.class, ILOAD);
}
static final Map<Class<?>, Integer> returnOps = new HashMap<Class<?>, Integer>();
static {
returnOps.put(boolean.class, IRETURN);
returnOps.put(byte.class, IRETURN);
returnOps.put(char.class, IRETURN);
returnOps.put(double.class, DRETURN);
returnOps.put(float.class, FRETURN);
returnOps.put(int.class, IRETURN);
returnOps.put(long.class, LRETURN);
returnOps.put(short.class, IRETURN);
}
static final Map<Class<?>, Integer> defaultValue = new HashMap<Class<?>, Integer>();
static {
defaultValue.put(boolean.class, ICONST_0);
defaultValue.put(byte.class, ICONST_0);
defaultValue.put(char.class, ICONST_0);
defaultValue.put(double.class, DCONST_0);
defaultValue.put(float.class, FCONST_0);
defaultValue.put(int.class, ICONST_0);
defaultValue.put(long.class, LCONST_0);
defaultValue.put(short.class, ICONST_0);
}
static final Set<Class<?>> primitivesClassAndWrapper = new HashSet<Class<?>>();
static {
primitivesClassAndWrapper.addAll(wrappers.keySet());
primitivesClassAndWrapper.addAll(wrappers.values());
}
static File targetDir = null;
static {
String targetDirStr = System.getProperty(ASM_DUMP_TARGET_DIR);
if (targetDirStr != null) {
targetDir = new File(targetDirStr);
targetDir.mkdirs();
}
}
public static boolean isStillGeneric(Class<? > clazz) {
clazz = getPublicOrInterfaceClass(clazz);
final TypeVariable<?>[] typeParameters = clazz.getTypeParameters();
return typeParameters != null && typeParameters.length > 0;
}
public static byte[] writeClassToFile (final String className, final byte[] bytes) throws IOException {
return writeClassToFileInDir(className, bytes, AsmUtils.targetDir);
}
public static byte[] writeClassToFileInDir(String className, byte[] bytes, File targetDir) throws IOException {
if (targetDir != null) {
final int lastIndex = className.lastIndexOf('.');
final String filename = className.substring(lastIndex + 1) + ".class";
final String directory = className.substring(0, lastIndex).replace('.', '/');
final File packageDir = new File(targetDir, directory);
packageDir.mkdirs();
final FileOutputStream fos = new FileOutputStream(new File(packageDir, filename ));
try {
fos.write(bytes);
} finally {
fos.close();
}
}
return bytes;
}
public static String toTypeWithParam(Class<?> class1) {
StringBuilder sb = new StringBuilder();
class1 = getPublicOrInterfaceClass(class1);
sb.append(toType(class1));
TypeVariable<?>[] typeParameters = class1.getTypeParameters();
if (typeParameters != null && typeParameters.length > 0) {
sb.append("<");
for(TypeVariable<?> t : typeParameters) {
String typeName = t.getName();
sb.append(toTypeParam(typeName));
}
sb.append(">");
}
return sb.toString();
}
public static String toTypeParam(String typeName) {
if (typeName.startsWith("[")) {
return typeName;
} else {
return "L" + toType(typeName) + ";";
}
}
public static Type toGenericType(String sig, List<String> genericTypeNames, Type target) throws ClassNotFoundException {
if (sig.length() == 1) {
switch (sig.charAt(0)) {
case 'Z': return boolean.class;
case 'B': return byte.class;
case 'C': return char.class;
case 'D': return double.class;
case 'F': return float.class;
case 'I': return int.class;
case 'J': return long.class;
case 'S': return short.class;
}
}
if (sig.startsWith("L")) {
sig = sig.substring(1);
if (sig.endsWith(";")) {
sig = sig.substring(0, sig.length() - 1);
}
} else if (sig.startsWith("T")) {
String templateType = sig.substring(1, sig.length() - (sig.endsWith(";") ? 1 : 0));
int indexOfParam = genericTypeNames.indexOf(templateType);
if (target instanceof ParameterizedType) {
return ((ParameterizedType) target).getActualTypeArguments()[indexOfParam];
} else {
throw new IllegalArgumentException("Cannot resolve generic type " + sig + " from non ParameterizedType " + target);
}
}
int indexOf = sig.indexOf('<');
if (indexOf == -1) {
return Class.forName(sig.replace('/','.'));
} else {
final Class<?> rawType = Class.forName(sig.substring(0, indexOf).replace('/','.'));
final Type[] types = parseTypes(sig.substring(indexOf+ 1, sig.length() - 1), genericTypeNames, target);
return new ParameterizedTypeImpl(rawType, types);
}
}
public static Class<?> getPublicOrInterfaceClass(Class<?> clazz) {
if (! Modifier.isPublic(clazz.getModifiers()) && ! Modifier.isStatic(clazz.getModifiers())) {
Class<?>[] interfaces = clazz.getInterfaces();
if (interfaces != null && interfaces.length > 0) {
return interfaces[0];
} else {
return getPublicOrInterfaceClass(clazz.getSuperclass());
}
}
return clazz;
}
public static void invoke(MethodVisitor mv, Class<?> target,
String method, String sig) {
Class<?> publicClass = getPublicOrInterfaceClass(target);
boolean isInterface = publicClass.isInterface();
mv.visitMethodInsn(isInterface ? Opcodes.INVOKEINTERFACE : Opcodes.INVOKEVIRTUAL, toType(publicClass), method, sig, isInterface);
}
public static String toDeclaredLType(String sourceType) {
if (sourceType.startsWith("[L") || sourceType.startsWith("L")) {
return sourceType;
}
return "L" + sourceType + ";";
}
public static Class<?> toWrapperClass(Type type) {
final Class<?> clazz = TypeHelper.toClass(type);
if (clazz.isPrimitive()) {
return wrappers.get(clazz);
} else return clazz;
}
public static String toWrapperType(Type type) {
return toType(toWrapperClass(type));
}
public static List<String> extractGenericTypeNames(String sig) {
List<String> types = new ArrayList<String>();
boolean nameDetected = false;
int currentStart = -1;
for(int i = 0; i < sig.length(); i++) {
char c = sig.charAt(i);
switch(c) {
case '<' :
case ';' :
if (!nameDetected) {
nameDetected = true;
currentStart = i + 1;
}
break;
case ':' :
types.add(sig.substring(currentStart, i));
nameDetected = false;
break;
}
}
return types;
}
private static Type[] parseTypes(String sig, List<String> genericTypeNames, Type target) throws ClassNotFoundException {
List<Type> types = new ArrayList<Type>();
int genericLevel = 0;
int currentStart = 0;
for(int i = 0; i < sig.length(); i++) {
char c = sig.charAt(i);
switch(c) {
case '<': genericLevel ++; break;
case '>': genericLevel --; break;
case ';' :
if (genericLevel == 0) {
types.add(toGenericType(sig.substring(currentStart, i), genericTypeNames, target));
currentStart = i + 1;
}
break;
}
}
return types.toArray(EMPTY_TYPE_ARRAY);
}
public static List<String> extractTypeNames(String sig) {
final List<String> types = new ArrayList<String>();
SignatureReader reader = new SignatureReader(sig);
reader.accept(new SignatureVisitor(Opcodes.ASM5) {
//TypeSignature =
// visitBaseType | visitTypeVariable | visitArrayType | ( visitClassType visitTypeArgument* ( visitInnerClassType visitTypeArgument* )* visitEnd ) )
class AppendType extends SignatureVisitor {
StringBuilder sb = new StringBuilder();
int l = 0;
public AppendType() {
super(Opcodes.ASM5);
}
@Override
public void visitBaseType(char descriptor) {
if (descriptor != 'V') {
sb.append(descriptor);
visitEnd();
}
}
@Override
public void visitTypeVariable(String name) {
sb.append("T");
sb.append(name);
visitEnd();
}
@Override
public SignatureVisitor visitArrayType() {
sb.append("[");
return this;
}
@Override
public void visitClassType(String name) {
sb.append("L");
sb.append(name);
visitEnd();
}
@Override
public void visitInnerClassType(String name) {
visitClassType(name);
}
@Override
public void visitTypeArgument() {
System.out.println("visitTypeArgument");
}
@Override
public SignatureVisitor visitTypeArgument(char wildcard) {
l++;
if (sb.length() == 0) {
String t = types.remove(types.size() - 1);
if (t.endsWith(";")) {
t = t.substring(0, t.length() -1);
}
sb.append(t);
sb.append("<");
}
if (wildcard != '=') {
sb.append(wildcard);
}
return this;
}
@Override
public void visitEnd() {
if (l == 0) {
flush();
} else {
sb.append(";>");
l--;
}
}
private void flush() {
if (sb.length() >0) {
if (sb.charAt(0) == 'L' || sb.charAt(0) == 'T') {
sb.append(";");
}
types.add(sb.toString());
sb = new StringBuilder();
}
}
}
@Override
public void visitFormalTypeParameter(String name) {
System.out.println(name);
}
@Override
public SignatureVisitor visitClassBound() {
return super.visitInterfaceBound();
}
@Override
public SignatureVisitor visitInterfaceBound() {
return super.visitInterfaceBound();
}
@Override
public SignatureVisitor visitParameterType() {
return new AppendType();
}
@Override
public SignatureVisitor visitReturnType() {
return new AppendType();
}
@Override
public SignatureVisitor visitExceptionType() {
return new AppendType();
}
});
return types;
}
public static String toDeclaredLType(Class<?> clazz) {
if (clazz.isPrimitive()) {
return primitivesType.get(clazz);
}
return toDeclaredLType(toType(clazz));
}
public static String toSignature(Method exec) {
StringBuilder sb = new StringBuilder();
sb.append("(");
for(Class<?> clazz : exec.getParameterTypes()) {
sb.append(AsmUtils.toDeclaredLType(clazz));
}
sb.append(")");
sb.append(AsmUtils.toDeclaredLType(exec.getReturnType()));
return sb.toString();
}
private static class ParameterizedTypeImpl implements ParameterizedType {
private final Class<?> rawType;
private final Type[] types;
public ParameterizedTypeImpl(Class<?> rawType, Type[] types) {
this.rawType = rawType;
this.types = types;
}
@Override
public Type getRawType() {
return rawType;
}
@Override
public Type getOwnerType() {
return null;
}
@Override
public Type[] getActualTypeArguments() {
return types;
}
@Override
public String toString() {
return "ParameterizedTypeImpl{" +
"rawType=" + rawType +
", types=" + Arrays.toString(types) +
'}';
}
}
public static void addIndex(MethodVisitor mv, int i) {
switch(i) {
case 0:
mv.visitInsn(ICONST_0);
return;
case 1:
mv.visitInsn(ICONST_1);
return;
case 2:
mv.visitInsn(ICONST_2);
return;
case 3:
mv.visitInsn(ICONST_3);
return;
case 4:
mv.visitInsn(ICONST_4);
return;
case 5:
mv.visitInsn(ICONST_5);
return;
default:
if (i <= Byte.MAX_VALUE) {
mv.visitIntInsn(BIPUSH, i);
} else if (i <= Short.MAX_VALUE) {
mv.visitIntInsn(SIPUSH, i);
} else {
mv.visitLdcInsn(i);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.hadoop;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteFileSystem;
import org.apache.ignite.IgniteSpringBean;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.FileSystemConfiguration;
import org.apache.ignite.hadoop.mapreduce.IgniteHadoopMapReducePlanner;
import org.apache.ignite.igfs.IgfsBlockLocation;
import org.apache.ignite.igfs.IgfsFile;
import org.apache.ignite.igfs.IgfsMetrics;
import org.apache.ignite.igfs.IgfsOutputStream;
import org.apache.ignite.igfs.IgfsPath;
import org.apache.ignite.igfs.IgfsPathSummary;
import org.apache.ignite.igfs.mapreduce.IgfsRecordResolver;
import org.apache.ignite.igfs.mapreduce.IgfsTask;
import org.apache.ignite.igfs.secondary.IgfsSecondaryFileSystem;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.cluster.IgniteClusterEx;
import org.apache.ignite.internal.processors.cache.GridCacheUtilityKey;
import org.apache.ignite.internal.processors.cache.IgniteInternalCache;
import org.apache.ignite.internal.processors.igfs.IgfsBlockLocationImpl;
import org.apache.ignite.internal.processors.igfs.IgfsContext;
import org.apache.ignite.internal.processors.igfs.IgfsEx;
import org.apache.ignite.internal.processors.igfs.IgfsInputStreamAdapter;
import org.apache.ignite.internal.processors.igfs.IgfsLocalMetrics;
import org.apache.ignite.internal.processors.igfs.IgfsPaths;
import org.apache.ignite.internal.processors.igfs.IgfsStatus;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.testframework.GridTestNode;
import org.apache.ignite.testframework.GridTestUtils;
import org.jetbrains.annotations.Nullable;
/**
*
*/
public class HadoopDefaultMapReducePlannerSelfTest extends HadoopAbstractSelfTest {
/** */
private static final UUID ID_1 = new UUID(0, 1);
/** */
private static final UUID ID_2 = new UUID(0, 2);
/** */
private static final UUID ID_3 = new UUID(0, 3);
/** */
private static final String HOST_1 = "host1";
/** */
private static final String HOST_2 = "host2";
/** */
private static final String HOST_3 = "host3";
/** */
private static final String INVALID_HOST_1 = "invalid_host1";
/** */
private static final String INVALID_HOST_2 = "invalid_host2";
/** */
private static final String INVALID_HOST_3 = "invalid_host3";
/** Mocked Grid. */
private static final MockIgnite GRID = new MockIgnite();
/** Mocked IGFS. */
private static final IgniteFileSystem IGFS = new MockIgfs();
/** Planner. */
private static final HadoopMapReducePlanner PLANNER = new IgniteHadoopMapReducePlanner();
/** Block locations. */
private static final Map<Block, Collection<IgfsBlockLocation>> BLOCK_MAP = new HashMap<>();
/** Proxy map. */
private static final Map<URI, Boolean> PROXY_MAP = new HashMap<>();
/** Last created plan. */
private static final ThreadLocal<HadoopMapReducePlan> PLAN = new ThreadLocal<>();
/**
*
*/
static {
GridTestUtils.setFieldValue(PLANNER, "ignite", GRID);
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
GridTestUtils.setFieldValue(PLANNER, "log", log());
BLOCK_MAP.clear();
PROXY_MAP.clear();
}
/**
* @throws IgniteCheckedException If failed.
*/
public void testIgfsOneBlockPerNode() throws IgniteCheckedException {
HadoopFileBlock split1 = split(true, "/file1", 0, 100, HOST_1);
HadoopFileBlock split2 = split(true, "/file2", 0, 100, HOST_2);
HadoopFileBlock split3 = split(true, "/file3", 0, 100, HOST_3);
mapIgfsBlock(split1.file(), 0, 100, location(0, 100, ID_1));
mapIgfsBlock(split2.file(), 0, 100, location(0, 100, ID_2));
mapIgfsBlock(split3.file(), 0, 100, location(0, 100, ID_3));
plan(1, split1);
assert ensureMappers(ID_1, split1);
assert ensureReducers(ID_1, 1);
assert ensureEmpty(ID_2);
assert ensureEmpty(ID_3);
plan(2, split1);
assert ensureMappers(ID_1, split1);
assert ensureReducers(ID_1, 2);
assert ensureEmpty(ID_2);
assert ensureEmpty(ID_3);
plan(1, split1, split2);
assert ensureMappers(ID_1, split1);
assert ensureMappers(ID_2, split2);
assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 0) || ensureReducers(ID_1, 0) && ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
plan(2, split1, split2);
assert ensureMappers(ID_1, split1);
assert ensureMappers(ID_2, split2);
assert ensureReducers(ID_1, 1);
assert ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
plan(3, split1, split2);
assert ensureMappers(ID_1, split1);
assert ensureMappers(ID_2, split2);
assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
plan(3, split1, split2, split3);
assert ensureMappers(ID_1, split1);
assert ensureMappers(ID_2, split2);
assert ensureMappers(ID_3, split3);
assert ensureReducers(ID_1, 1);
assert ensureReducers(ID_2, 1);
assert ensureReducers(ID_3, 1);
plan(5, split1, split2, split3);
assert ensureMappers(ID_1, split1);
assert ensureMappers(ID_2, split2);
assert ensureMappers(ID_3, split3);
assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 2) ||
ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1) && ensureReducers(ID_3, 2) ||
ensureReducers(ID_1, 2) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 1);
}
/**
* @throws IgniteCheckedException If failed.
*/
public void testNonIgfsOneBlockPerNode() throws IgniteCheckedException {
HadoopFileBlock split1 = split(false, "/file1", 0, 100, HOST_1);
HadoopFileBlock split2 = split(false, "/file2", 0, 100, HOST_2);
HadoopFileBlock split3 = split(false, "/file3", 0, 100, HOST_3);
plan(1, split1);
assert ensureMappers(ID_1, split1);
assert ensureReducers(ID_1, 1);
assert ensureEmpty(ID_2);
assert ensureEmpty(ID_3);
plan(2, split1);
assert ensureMappers(ID_1, split1);
assert ensureReducers(ID_1, 2);
assert ensureEmpty(ID_2);
assert ensureEmpty(ID_3);
plan(1, split1, split2);
assert ensureMappers(ID_1, split1);
assert ensureMappers(ID_2, split2);
assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 0) || ensureReducers(ID_1, 0) && ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
plan(2, split1, split2);
assert ensureMappers(ID_1, split1);
assert ensureMappers(ID_2, split2);
assert ensureReducers(ID_1, 1);
assert ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
plan(3, split1, split2);
assert ensureMappers(ID_1, split1);
assert ensureMappers(ID_2, split2);
assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) || ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
plan(3, split1, split2, split3);
assert ensureMappers(ID_1, split1);
assert ensureMappers(ID_2, split2);
assert ensureMappers(ID_3, split3);
assert ensureReducers(ID_1, 1);
assert ensureReducers(ID_2, 1);
assert ensureReducers(ID_3, 1);
plan(5, split1, split2, split3);
assert ensureMappers(ID_1, split1);
assert ensureMappers(ID_2, split2);
assert ensureMappers(ID_3, split3);
assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 2) ||
ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1) && ensureReducers(ID_3, 2) ||
ensureReducers(ID_1, 2) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 1);
}
/**
* @throws IgniteCheckedException If failed.
*/
public void testIgfsSeveralBlocksPerNode() throws IgniteCheckedException {
HadoopFileBlock split1 = split(true, "/file1", 0, 100, HOST_1, HOST_2);
HadoopFileBlock split2 = split(true, "/file2", 0, 100, HOST_1, HOST_2);
HadoopFileBlock split3 = split(true, "/file3", 0, 100, HOST_1, HOST_3);
mapIgfsBlock(split1.file(), 0, 100, location(0, 100, ID_1, ID_2));
mapIgfsBlock(split2.file(), 0, 100, location(0, 100, ID_1, ID_2));
mapIgfsBlock(split3.file(), 0, 100, location(0, 100, ID_1, ID_3));
plan(1, split1);
assert ensureMappers(ID_1, split1) && ensureReducers(ID_1, 1) && ensureEmpty(ID_2) ||
ensureEmpty(ID_1) && ensureMappers(ID_2, split1) && ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
plan(2, split1);
assert ensureMappers(ID_1, split1) && ensureReducers(ID_1, 2) && ensureEmpty(ID_2) ||
ensureEmpty(ID_1) && ensureMappers(ID_2, split1) && ensureReducers(ID_2, 2);
assert ensureEmpty(ID_3);
plan(1, split1, split2);
assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) ||
ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1);
assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 0) || ensureReducers(ID_1, 0) && ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
plan(2, split1, split2);
assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) ||
ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1);
assert ensureReducers(ID_1, 1);
assert ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
plan(3, split1, split2, split3);
assert ensureReducers(ID_1, 1);
assert ensureReducers(ID_2, 1);
assert ensureReducers(ID_3, 1);
plan(5, split1, split2, split3);
assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 2) ||
ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1) && ensureReducers(ID_3, 2) ||
ensureReducers(ID_1, 2) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 1);
}
/**
* @throws IgniteCheckedException If failed.
*/
public void testNonIgfsSeveralBlocksPerNode() throws IgniteCheckedException {
HadoopFileBlock split1 = split(false, "/file1", 0, 100, HOST_1, HOST_2);
HadoopFileBlock split2 = split(false, "/file2", 0, 100, HOST_1, HOST_2);
HadoopFileBlock split3 = split(false, "/file3", 0, 100, HOST_1, HOST_3);
plan(1, split1);
assert ensureMappers(ID_1, split1) && ensureReducers(ID_1, 1) && ensureEmpty(ID_2) ||
ensureEmpty(ID_1) && ensureMappers(ID_2, split1) && ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
plan(2, split1);
assert ensureMappers(ID_1, split1) && ensureReducers(ID_1, 2) && ensureEmpty(ID_2) ||
ensureEmpty(ID_1) && ensureMappers(ID_2, split1) && ensureReducers(ID_2, 2);
assert ensureEmpty(ID_3);
plan(1, split1, split2);
assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) ||
ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1);
assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 0) || ensureReducers(ID_1, 0) && ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
plan(2, split1, split2);
assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) ||
ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1);
assert ensureReducers(ID_1, 1);
assert ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
plan(3, split1, split2, split3);
assert ensureReducers(ID_1, 1);
assert ensureReducers(ID_2, 1);
assert ensureReducers(ID_3, 1);
plan(5, split1, split2, split3);
assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 2) ||
ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1) && ensureReducers(ID_3, 2) ||
ensureReducers(ID_1, 2) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 1);
}
/**
* @throws IgniteCheckedException If failed.
*/
public void testIgfsSeveralComplexBlocksPerNode() throws IgniteCheckedException {
HadoopFileBlock split1 = split(true, "/file1", 0, 100, HOST_1, HOST_2, HOST_3);
HadoopFileBlock split2 = split(true, "/file2", 0, 100, HOST_1, HOST_2, HOST_3);
mapIgfsBlock(split1.file(), 0, 100, location(0, 50, ID_1, ID_2), location(51, 100, ID_1, ID_3));
mapIgfsBlock(split2.file(), 0, 100, location(0, 50, ID_1, ID_2), location(51, 100, ID_2, ID_3));
plan(1, split1);
assert ensureMappers(ID_1, split1);
assert ensureReducers(ID_1, 1);
assert ensureEmpty(ID_2);
assert ensureEmpty(ID_3);
plan(1, split2);
assert ensureMappers(ID_2, split2);
assert ensureReducers(ID_2, 1);
assert ensureEmpty(ID_1);
assert ensureEmpty(ID_3);
plan(1, split1, split2);
assert ensureMappers(ID_1, split1);
assert ensureMappers(ID_2, split2);
assert ensureReducers(ID_1, 0) && ensureReducers(ID_2, 1) || ensureReducers(ID_1, 1) && ensureReducers(ID_2, 0);
assert ensureEmpty(ID_3);
plan(2, split1, split2);
assert ensureMappers(ID_1, split1);
assert ensureMappers(ID_2, split2);
assert ensureReducers(ID_1, 1);
assert ensureReducers(ID_2, 1);
assert ensureEmpty(ID_3);
}
/**
* @throws IgniteCheckedException If failed.
*/
public void testNonIgfsOrphans() throws IgniteCheckedException {
HadoopFileBlock split1 = split(false, "/file1", 0, 100, INVALID_HOST_1, INVALID_HOST_2);
HadoopFileBlock split2 = split(false, "/file2", 0, 100, INVALID_HOST_1, INVALID_HOST_3);
HadoopFileBlock split3 = split(false, "/file3", 0, 100, INVALID_HOST_2, INVALID_HOST_3);
plan(1, split1);
assert ensureMappers(ID_1, split1) && ensureReducers(ID_1, 1) && ensureEmpty(ID_2) && ensureEmpty(ID_3) ||
ensureEmpty(ID_1) && ensureMappers(ID_2, split1) && ensureReducers(ID_2, 1) && ensureEmpty(ID_3) ||
ensureEmpty(ID_1) && ensureEmpty(ID_2) && ensureMappers(ID_3, split1) && ensureReducers(ID_3, 1);
plan(2, split1);
assert ensureMappers(ID_1, split1) && ensureReducers(ID_1, 2) && ensureEmpty(ID_2) && ensureEmpty(ID_3) ||
ensureEmpty(ID_1) && ensureMappers(ID_2, split1) && ensureReducers(ID_2, 2) && ensureEmpty(ID_3) ||
ensureEmpty(ID_1) && ensureEmpty(ID_2) && ensureMappers(ID_3, split1) && ensureReducers(ID_3, 2);
plan(1, split1, split2, split3);
assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) && ensureMappers(ID_3, split3) ||
ensureMappers(ID_1, split1) && ensureMappers(ID_2, split3) && ensureMappers(ID_3, split2) ||
ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1) && ensureMappers(ID_3, split3) ||
ensureMappers(ID_1, split2) && ensureMappers(ID_2, split3) && ensureMappers(ID_3, split1) ||
ensureMappers(ID_1, split3) && ensureMappers(ID_2, split1) && ensureMappers(ID_3, split2) ||
ensureMappers(ID_1, split3) && ensureMappers(ID_2, split2) && ensureMappers(ID_3, split1);
assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 0) && ensureReducers(ID_3, 0) ||
ensureReducers(ID_1, 0) && ensureReducers(ID_2, 1) && ensureReducers(ID_3, 0) ||
ensureReducers(ID_1, 0) && ensureReducers(ID_2, 0) && ensureReducers(ID_3, 1);
plan(3, split1, split2, split3);
assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) && ensureMappers(ID_3, split3) ||
ensureMappers(ID_1, split1) && ensureMappers(ID_2, split3) && ensureMappers(ID_3, split2) ||
ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1) && ensureMappers(ID_3, split3) ||
ensureMappers(ID_1, split2) && ensureMappers(ID_2, split3) && ensureMappers(ID_3, split1) ||
ensureMappers(ID_1, split3) && ensureMappers(ID_2, split1) && ensureMappers(ID_3, split2) ||
ensureMappers(ID_1, split3) && ensureMappers(ID_2, split2) && ensureMappers(ID_3, split1);
assert ensureReducers(ID_1, 1);
assert ensureReducers(ID_2, 1);
assert ensureReducers(ID_3, 1);
plan(5, split1, split2, split3);
assert ensureMappers(ID_1, split1) && ensureMappers(ID_2, split2) && ensureMappers(ID_3, split3) ||
ensureMappers(ID_1, split1) && ensureMappers(ID_2, split3) && ensureMappers(ID_3, split2) ||
ensureMappers(ID_1, split2) && ensureMappers(ID_2, split1) && ensureMappers(ID_3, split3) ||
ensureMappers(ID_1, split2) && ensureMappers(ID_2, split3) && ensureMappers(ID_3, split1) ||
ensureMappers(ID_1, split3) && ensureMappers(ID_2, split1) && ensureMappers(ID_3, split2) ||
ensureMappers(ID_1, split3) && ensureMappers(ID_2, split2) && ensureMappers(ID_3, split1);
assert ensureReducers(ID_1, 1) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 2) ||
ensureReducers(ID_1, 2) && ensureReducers(ID_2, 1) && ensureReducers(ID_3, 2) ||
ensureReducers(ID_1, 2) && ensureReducers(ID_2, 2) && ensureReducers(ID_3, 1);
}
/**
* Create plan.
*
* @param reducers Reducers count.
* @param splits Splits.
* @return Plan.
* @throws IgniteCheckedException If failed.
*/
private static HadoopMapReducePlan plan(int reducers, HadoopInputSplit... splits) throws IgniteCheckedException {
assert reducers > 0;
assert splits != null && splits.length > 0;
Collection<HadoopInputSplit> splitList = new ArrayList<>(splits.length);
Collections.addAll(splitList, splits);
Collection<ClusterNode> top = new ArrayList<>();
GridTestNode node1 = new GridTestNode(ID_1);
GridTestNode node2 = new GridTestNode(ID_2);
GridTestNode node3 = new GridTestNode(ID_3);
node1.setHostName(HOST_1);
node2.setHostName(HOST_2);
node3.setHostName(HOST_3);
top.add(node1);
top.add(node2);
top.add(node3);
HadoopMapReducePlan plan = PLANNER.preparePlan(new MockJob(reducers, splitList), top, null);
PLAN.set(plan);
return plan;
}
/**
* Ensure that node contains the given mappers.
*
* @param nodeId Node ID.
* @param expSplits Expected splits.
* @return {@code True} if this assumption is valid.
*/
private static boolean ensureMappers(UUID nodeId, HadoopInputSplit... expSplits) {
Collection<HadoopInputSplit> expSplitsCol = new ArrayList<>();
Collections.addAll(expSplitsCol, expSplits);
Collection<HadoopInputSplit> splits = PLAN.get().mappers(nodeId);
return F.eq(expSplitsCol, splits);
}
/**
* Ensure that node contains the given amount of reducers.
*
* @param nodeId Node ID.
* @param reducers Reducers.
* @return {@code True} if this assumption is valid.
*/
private static boolean ensureReducers(UUID nodeId, int reducers) {
int[] reducersArr = PLAN.get().reducers(nodeId);
return reducers == 0 ? F.isEmpty(reducersArr) : (reducersArr != null && reducersArr.length == reducers);
}
/**
* Ensure that no mappers and reducers is located on this node.
*
* @param nodeId Node ID.
* @return {@code True} if this assumption is valid.
*/
private static boolean ensureEmpty(UUID nodeId) {
return F.isEmpty(PLAN.get().mappers(nodeId)) && F.isEmpty(PLAN.get().reducers(nodeId));
}
/**
* Create split.
*
* @param igfs IGFS flag.
* @param file File.
* @param start Start.
* @param len Length.
* @param hosts Hosts.
* @return Split.
*/
private static HadoopFileBlock split(boolean igfs, String file, long start, long len, String... hosts) {
URI uri = URI.create((igfs ? "igfs://igfs@" : "hdfs://") + file);
return new HadoopFileBlock(hosts, uri, start, len);
}
/**
* Create block location.
*
* @param start Start.
* @param len Length.
* @param nodeIds Node IDs.
* @return Block location.
*/
private static IgfsBlockLocation location(long start, long len, UUID... nodeIds) {
assert nodeIds != null && nodeIds.length > 0;
Collection<ClusterNode> nodes = new ArrayList<>(nodeIds.length);
for (UUID id : nodeIds)
nodes.add(new GridTestNode(id));
return new IgfsBlockLocationImpl(start, len, nodes);
}
/**
* Map IGFS block to nodes.
*
* @param file File.
* @param start Start.
* @param len Length.
* @param locations Locations.
*/
private static void mapIgfsBlock(URI file, long start, long len, IgfsBlockLocation... locations) {
assert locations != null && locations.length > 0;
IgfsPath path = new IgfsPath(file);
Block block = new Block(path, start, len);
Collection<IgfsBlockLocation> locationsList = new ArrayList<>();
Collections.addAll(locationsList, locations);
BLOCK_MAP.put(block, locationsList);
}
/**
* Block.
*/
private static class Block {
/** */
private final IgfsPath path;
/** */
private final long start;
/** */
private final long len;
/**
* Constructor.
*
* @param path Path.
* @param start Start.
* @param len Length.
*/
private Block(IgfsPath path, long start, long len) {
this.path = path;
this.start = start;
this.len = len;
}
/** {@inheritDoc} */
@SuppressWarnings("RedundantIfStatement")
@Override public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Block)) return false;
Block block = (Block) o;
if (len != block.len)
return false;
if (start != block.start)
return false;
if (!path.equals(block.path))
return false;
return true;
}
/** {@inheritDoc} */
@Override public int hashCode() {
int res = path.hashCode();
res = 31 * res + (int) (start ^ (start >>> 32));
res = 31 * res + (int) (len ^ (len >>> 32));
return res;
}
}
/**
* Mocked job.
*/
private static class MockJob implements HadoopJob {
/** Reducers count. */
private final int reducers;
/** */
private Collection<HadoopInputSplit> splitList;
/**
* Constructor.
*
* @param reducers Reducers count.
* @param splitList Splits.
*/
private MockJob(int reducers, Collection<HadoopInputSplit> splitList) {
this.reducers = reducers;
this.splitList = splitList;
}
/** {@inheritDoc} */
@Override public HadoopJobId id() {
return null;
}
/** {@inheritDoc} */
@Override public HadoopJobInfo info() {
return new HadoopDefaultJobInfo() {
@Override public int reducers() {
return reducers;
}
};
}
/** {@inheritDoc} */
@Override public Collection<HadoopInputSplit> input() throws IgniteCheckedException {
return splitList;
}
/** {@inheritDoc} */
@Override public HadoopTaskContext getTaskContext(HadoopTaskInfo info) throws IgniteCheckedException {
return null;
}
/** {@inheritDoc} */
@Override public void initialize(boolean external, UUID nodeId) throws IgniteCheckedException {
// No-op.
}
/** {@inheritDoc} */
@Override public void dispose(boolean external) throws IgniteCheckedException {
// No-op.
}
/** {@inheritDoc} */
@Override public void prepareTaskEnvironment(HadoopTaskInfo info) throws IgniteCheckedException {
// No-op.
}
/** {@inheritDoc} */
@Override public void cleanupTaskEnvironment(HadoopTaskInfo info) throws IgniteCheckedException {
// No-op.
}
/** {@inheritDoc} */
@Override public void cleanupStagingDirectory() {
// No-op.
}
}
/**
* Mocked IGFS.
*/
private static class MockIgfs implements IgfsEx {
/** {@inheritDoc} */
@Override public boolean isProxy(URI path) {
return PROXY_MAP.containsKey(path) && PROXY_MAP.get(path);
}
/** {@inheritDoc} */
@Override public Collection<IgfsBlockLocation> affinity(IgfsPath path, long start, long len) {
return BLOCK_MAP.get(new Block(path, start, len));
}
/** {@inheritDoc} */
@Override public Collection<IgfsBlockLocation> affinity(IgfsPath path, long start, long len,
long maxLen) {
return null;
}
/** {@inheritDoc} */
@Override public void stop(boolean cancel) {
// No-op.
}
/** {@inheritDoc} */
@Override public IgfsContext context() {
return null;
}
/** {@inheritDoc} */
@Override public IgfsPaths proxyPaths() {
return null;
}
/** {@inheritDoc} */
@Override public IgfsInputStreamAdapter open(IgfsPath path, int bufSize, int seqReadsBeforePrefetch) {
return null;
}
/** {@inheritDoc} */
@Override public IgfsInputStreamAdapter open(IgfsPath path) {
return null;
}
/** {@inheritDoc} */
@Override public IgfsInputStreamAdapter open(IgfsPath path, int bufSize) {
return null;
}
/** {@inheritDoc} */
@Override public IgfsStatus globalSpace() throws IgniteCheckedException {
return null;
}
/** {@inheritDoc} */
@Override public void globalSampling(@Nullable Boolean val) throws IgniteCheckedException {
// No-op.
}
/** {@inheritDoc} */
@Nullable @Override public Boolean globalSampling() {
return null;
}
/** {@inheritDoc} */
@Override public IgfsLocalMetrics localMetrics() {
return null;
}
/** {@inheritDoc} */
@Override public long groupBlockSize() {
return 0;
}
/** {@inheritDoc} */
@Nullable @Override public String clientLogDirectory() {
return null;
}
/** {@inheritDoc} */
@Override public void clientLogDirectory(String logDir) {
// No-op.
}
/** {@inheritDoc} */
@Override public boolean evictExclude(IgfsPath path, boolean primary) {
return false;
}
/** {@inheritDoc} */
@Nullable @Override public String name() {
return null;
}
/** {@inheritDoc} */
@Override public FileSystemConfiguration configuration() {
return null;
}
/** {@inheritDoc} */
@Override public boolean exists(IgfsPath path) {
return true;
}
/** {@inheritDoc} */
@Nullable @Override public IgfsFile info(IgfsPath path) {
return null;
}
/** {@inheritDoc} */
@Override public IgfsPathSummary summary(IgfsPath path) {
return null;
}
/** {@inheritDoc} */
@Nullable @Override public IgfsFile update(IgfsPath path, Map<String, String> props) {
return null;
}
/** {@inheritDoc} */
@Override public void rename(IgfsPath src, IgfsPath dest) {
// No-op.
}
/** {@inheritDoc} */
@Override public boolean delete(IgfsPath path, boolean recursive) {
return false;
}
/** {@inheritDoc} */
@Override public void mkdirs(IgfsPath path) {
// No-op.
}
/** {@inheritDoc} */
@Override public void mkdirs(IgfsPath path, @Nullable Map<String, String> props) {
// No-op.
}
/** {@inheritDoc} */
@Override public Collection<IgfsPath> listPaths(IgfsPath path) {
return null;
}
/** {@inheritDoc} */
@Override public Collection<IgfsFile> listFiles(IgfsPath path) {
return null;
}
/** {@inheritDoc} */
@Override public long usedSpaceSize() {
return 0;
}
/** {@inheritDoc} */
@Override public IgfsOutputStream create(IgfsPath path, boolean overwrite) {
return null;
}
/** {@inheritDoc} */
@Override public IgfsOutputStream create(IgfsPath path, int bufSize, boolean overwrite, int replication,
long blockSize, @Nullable Map<String, String> props) {
return null;
}
/** {@inheritDoc} */
@Override public IgfsOutputStream create(IgfsPath path, int bufSize, boolean overwrite,
@Nullable IgniteUuid affKey, int replication, long blockSize, @Nullable Map<String, String> props) {
return null;
}
/** {@inheritDoc} */
@Override public IgfsOutputStream append(IgfsPath path, boolean create) {
return null;
}
/** {@inheritDoc} */
@Override public IgfsOutputStream append(IgfsPath path, int bufSize, boolean create,
@Nullable Map<String, String> props) {
return null;
}
/** {@inheritDoc} */
@Override public void setTimes(IgfsPath path, long accessTime, long modificationTime) {
// No-op.
}
/** {@inheritDoc} */
@Override public IgfsMetrics metrics() {
return null;
}
/** {@inheritDoc} */
@Override public void resetMetrics() {
// No-op.
}
/** {@inheritDoc} */
@Override public long size(IgfsPath path) {
return 0;
}
/** {@inheritDoc} */
@Override public void format() {
// No-op.
}
/** {@inheritDoc} */
@Override public <T, R> R execute(IgfsTask<T, R> task, @Nullable IgfsRecordResolver rslvr,
Collection<IgfsPath> paths, @Nullable T arg) {
return null;
}
/** {@inheritDoc} */
@Override public <T, R> R execute(IgfsTask<T, R> task, @Nullable IgfsRecordResolver rslvr,
Collection<IgfsPath> paths, boolean skipNonExistentFiles, long maxRangeLen, @Nullable T arg) {
return null;
}
/** {@inheritDoc} */
@Override public <T, R> R execute(Class<? extends IgfsTask<T, R>> taskCls,
@Nullable IgfsRecordResolver rslvr, Collection<IgfsPath> paths, @Nullable T arg) {
return null;
}
/** {@inheritDoc} */
@Override public <T, R> R execute(Class<? extends IgfsTask<T, R>> taskCls,
@Nullable IgfsRecordResolver rslvr, Collection<IgfsPath> paths, boolean skipNonExistentFiles,
long maxRangeLen, @Nullable T arg) {
return null;
}
/** {@inheritDoc} */
@Override public IgniteUuid nextAffinityKey() {
return null;
}
/** {@inheritDoc} */
@Override public IgniteFileSystem withAsync() {
return null;
}
/** {@inheritDoc} */
@Override public boolean isAsync() {
return false;
}
/** {@inheritDoc} */
@Override public <R> IgniteFuture<R> future() {
return null;
}
/** {@inheritDoc} */
@Override public IgfsSecondaryFileSystem asSecondary() {
return null;
}
}
/**
* Mocked Grid.
*/
@SuppressWarnings("ExternalizableWithoutPublicNoArgConstructor")
private static class MockIgnite extends IgniteSpringBean implements IgniteEx {
/** {@inheritDoc} */
@Override public IgniteClusterEx cluster() {
return (IgniteClusterEx)super.cluster();
}
/** {@inheritDoc} */
@Override public IgniteFileSystem igfsx(String name) {
assert F.eq("igfs", name);
return IGFS;
}
/** {@inheritDoc} */
@Override public Hadoop hadoop() {
return null;
}
/** {@inheritDoc} */
@Override public String name() {
return null;
}
/** {@inheritDoc} */
@Override public <K extends GridCacheUtilityKey, V> IgniteInternalCache<K, V> utilityCache() {
return null;
}
/** {@inheritDoc} */
@Nullable @Override public <K, V> IgniteInternalCache<K, V> cachex(@Nullable String name) {
return null;
}
/** {@inheritDoc} */
@Nullable @Override public <K, V> IgniteInternalCache<K, V> cachex() {
return null;
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public Collection<IgniteInternalCache<?, ?>> cachesx(@Nullable IgnitePredicate<? super IgniteInternalCache<?, ?>>... p) {
return null;
}
/** {@inheritDoc} */
@Override public boolean eventUserRecordable(int type) {
return false;
}
/** {@inheritDoc} */
@Override public boolean allEventsUserRecordable(int[] types) {
return false;
}
/** {@inheritDoc} */
@Override public boolean isJmxRemoteEnabled() {
return false;
}
/** {@inheritDoc} */
@Override public boolean isRestartEnabled() {
return false;
}
/** {@inheritDoc} */
@Override public ClusterNode localNode() {
return null;
}
/** {@inheritDoc} */
@Override public String latestVersion() {
return null;
}
/** {@inheritDoc} */
@Override public GridKernalContext context() {
return null;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.rya.accumulo.instance;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.Date;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.minicluster.MiniAccumuloCluster;
import org.apache.rya.accumulo.AccumuloITBase;
import org.apache.rya.accumulo.MiniAccumuloClusterInstance;
import org.apache.rya.api.instance.RyaDetails;
import org.apache.rya.api.instance.RyaDetails.EntityCentricIndexDetails;
import org.apache.rya.api.instance.RyaDetails.FreeTextIndexDetails;
import org.apache.rya.api.instance.RyaDetails.JoinSelectivityDetails;
import org.apache.rya.api.instance.RyaDetails.PCJIndexDetails;
import org.apache.rya.api.instance.RyaDetails.PCJIndexDetails.FluoDetails;
import org.apache.rya.api.instance.RyaDetails.PCJIndexDetails.PCJDetails;
import org.apache.rya.api.instance.RyaDetails.PCJIndexDetails.PCJDetails.PCJUpdateStrategy;
import org.apache.rya.api.instance.RyaDetails.ProspectorDetails;
import org.apache.rya.api.instance.RyaDetails.TemporalIndexDetails;
import org.apache.rya.api.instance.RyaDetailsRepository;
import org.apache.rya.api.instance.RyaDetailsRepository.AlreadyInitializedException;
import org.apache.rya.api.instance.RyaDetailsRepository.ConcurrentUpdateException;
import org.apache.rya.api.instance.RyaDetailsRepository.NotInitializedException;
import org.apache.rya.api.instance.RyaDetailsRepository.RyaDetailsRepositoryException;
import org.junit.Test;
import com.google.common.base.Optional;
/**
* Tests the methods of {@link AccumuloRyaDetailsRepository} by using a {@link MiniAccumuloCluster}.
*/
public class AccumuloRyaDetailsRepositoryIT extends AccumuloITBase {
@Test
public void initializeAndGet() throws AccumuloException, AccumuloSecurityException, AlreadyInitializedException, RyaDetailsRepositoryException {
final String instanceName = getRyaInstanceName();
// Create the metadata object the repository will be initialized with.
final RyaDetails details = RyaDetails.builder()
.setRyaInstanceName(instanceName)
.setRyaVersion("1.2.3.4")
.setEntityCentricIndexDetails( new EntityCentricIndexDetails(true) )
//RYA-215 .setGeoIndexDetails( new GeoIndexDetails(true) )
.setTemporalIndexDetails( new TemporalIndexDetails(true) )
.setFreeTextDetails( new FreeTextIndexDetails(true) )
.setPCJIndexDetails(
PCJIndexDetails.builder()
.setEnabled(true)
.setFluoDetails( new FluoDetails("test_instance_rya_pcj_updater") )
.addPCJDetails(
PCJDetails.builder()
.setId("pcj 1")
.setUpdateStrategy(PCJUpdateStrategy.BATCH)
.setLastUpdateTime( new Date() ))
.addPCJDetails(
PCJDetails.builder()
.setId("pcj 2")))
.setProspectorDetails( new ProspectorDetails(Optional.of(new Date())) )
.setJoinSelectivityDetails( new JoinSelectivityDetails(Optional.of(new Date())) )
.build();
// Setup the repository that will be tested using a mini instance of Accumulo.
final Connector connector = getClusterInstance().getConnector();
final RyaDetailsRepository repo = new AccumuloRyaInstanceDetailsRepository(connector, instanceName);
// Initialize the repository
repo.initialize(details);
// Fetch the stored details.
final RyaDetails stored = repo.getRyaInstanceDetails();
// Ensure the fetched object is equivalent to what was stored.
assertEquals(details, stored);
}
@Test(expected = AlreadyInitializedException.class)
public void initialize_alreadyInitialized() throws AlreadyInitializedException, RyaDetailsRepositoryException, AccumuloException, AccumuloSecurityException {
final String instanceName = getRyaInstanceName();
// Create the metadata object the repository will be initialized with.
final RyaDetails details = RyaDetails.builder()
.setRyaInstanceName(instanceName)
.setRyaVersion("1.2.3.4")
.setEntityCentricIndexDetails( new EntityCentricIndexDetails(true) )
//RYA-215 .setGeoIndexDetails( new GeoIndexDetails(true) )
.setTemporalIndexDetails( new TemporalIndexDetails(true) )
.setFreeTextDetails( new FreeTextIndexDetails(true) )
.setPCJIndexDetails(
PCJIndexDetails.builder()
.setEnabled(true)
.setFluoDetails( new FluoDetails("test_instance_rya_pcj_updater") )
.addPCJDetails(
PCJDetails.builder()
.setId("pcj 1")
.setUpdateStrategy(PCJUpdateStrategy.BATCH)
.setLastUpdateTime( new Date() ))
.addPCJDetails(
PCJDetails.builder()
.setId("pcj 2")))
.setProspectorDetails( new ProspectorDetails(Optional.of(new Date())) )
.setJoinSelectivityDetails( new JoinSelectivityDetails(Optional.of(new Date())) )
.build();
// Setup the repository that will be tested using a mini instance of Accumulo.
final Connector connector = getClusterInstance().getConnector();
final RyaDetailsRepository repo = new AccumuloRyaInstanceDetailsRepository(connector, instanceName);
// Initialize the repository
repo.initialize(details);
// Initialize it again.
repo.initialize(details);
}
@Test(expected = NotInitializedException.class)
public void getRyaInstance_notInitialized() throws AccumuloException, AccumuloSecurityException, NotInitializedException, RyaDetailsRepositoryException {
// Setup the repository that will be tested using a mini instance of Accumulo.
final Connector connector = getClusterInstance().getConnector();
final RyaDetailsRepository repo = new AccumuloRyaInstanceDetailsRepository(connector, getRyaInstanceName());
// Try to fetch the details from the uninitialized repository.
repo.getRyaInstanceDetails();
}
@Test
public void isInitialized_true() throws AccumuloException, AccumuloSecurityException, AlreadyInitializedException, RyaDetailsRepositoryException {
final String instanceName = getRyaInstanceName();
// Create the metadata object the repository will be initialized with.
final RyaDetails details = RyaDetails.builder()
.setRyaInstanceName(instanceName)
.setRyaVersion("1.2.3.4")
.setEntityCentricIndexDetails( new EntityCentricIndexDetails(true) )
//RYA-215 .setGeoIndexDetails( new GeoIndexDetails(true) )
.setTemporalIndexDetails( new TemporalIndexDetails(true) )
.setFreeTextDetails( new FreeTextIndexDetails(true) )
.setPCJIndexDetails(
PCJIndexDetails.builder()
.setEnabled(true)
.setFluoDetails( new FluoDetails("test_instance_rya_pcj_updater") )
.addPCJDetails(
PCJDetails.builder()
.setId("pcj 1")
.setUpdateStrategy(PCJUpdateStrategy.BATCH)
.setLastUpdateTime( new Date() ))
.addPCJDetails(
PCJDetails.builder()
.setId("pcj 2")))
.setProspectorDetails( new ProspectorDetails(Optional.of(new Date())) )
.setJoinSelectivityDetails( new JoinSelectivityDetails(Optional.of(new Date())) )
.build();
// Setup the repository that will be tested using a mini instance of Accumulo.
final MiniAccumuloClusterInstance clusterInstance = getClusterInstance();
final Connector connector = clusterInstance.getConnector();
final RyaDetailsRepository repo = new AccumuloRyaInstanceDetailsRepository(connector, instanceName);
// Initialize the repository
repo.initialize(details);
// Ensure the repository reports that it has been initialized.
assertTrue( repo.isInitialized() );
}
@Test
public void isInitialized_false() throws AccumuloException, AccumuloSecurityException, RyaDetailsRepositoryException {
// Setup the repository that will be tested using a mock instance of Accumulo.
final Connector connector = getClusterInstance().getConnector();
final RyaDetailsRepository repo = new AccumuloRyaInstanceDetailsRepository(connector, getRyaInstanceName());
// Ensure the repository reports that is has not been initialized.
assertFalse( repo.isInitialized() );
}
@Test
public void update() throws AlreadyInitializedException, RyaDetailsRepositoryException, AccumuloException, AccumuloSecurityException {
final String instanceName = getRyaInstanceName();
// Create the metadata object the repository will be initialized with.
final RyaDetails details = RyaDetails.builder()
.setRyaInstanceName(instanceName)
.setRyaVersion("1.2.3.4")
.setEntityCentricIndexDetails( new EntityCentricIndexDetails(true) )
//RYA-215 .setGeoIndexDetails( new GeoIndexDetails(true) )
.setTemporalIndexDetails( new TemporalIndexDetails(true) )
.setFreeTextDetails( new FreeTextIndexDetails(true) )
.setPCJIndexDetails(
PCJIndexDetails.builder()
.setEnabled(true)
.setFluoDetails( new FluoDetails("test_instance_rya_pcj_updater") )
.addPCJDetails(
PCJDetails.builder()
.setId("pcj 1")
.setUpdateStrategy(PCJUpdateStrategy.BATCH)
.setLastUpdateTime( new Date() ))
.addPCJDetails(
PCJDetails.builder()
.setId("pcj 2")))
.setProspectorDetails( new ProspectorDetails(Optional.of(new Date())) )
.setJoinSelectivityDetails( new JoinSelectivityDetails(Optional.of(new Date())) )
.build();
// Setup the repository that will be tested using a mini instance of Accumulo.
final Connector connector = getClusterInstance().getConnector();
final RyaDetailsRepository repo = new AccumuloRyaInstanceDetailsRepository(connector, instanceName);
// Initialize the repository
repo.initialize(details);
// Create a new state for the details.
final RyaDetails updated = new RyaDetails.Builder( details )
.setEntityCentricIndexDetails(new EntityCentricIndexDetails(false) )
.build();
// Execute the update.
repo.update(details, updated);
// Show the new state that is stored matches the updated state.
final RyaDetails fetched = repo.getRyaInstanceDetails();
assertEquals(updated, fetched);
}
@Test(expected = ConcurrentUpdateException.class)
public void update_outOfDate() throws AccumuloException, AccumuloSecurityException, AlreadyInitializedException, RyaDetailsRepositoryException {
final String instanceName = getRyaInstanceName();
// Create the metadata object the repository will be initialized with.
final RyaDetails details = RyaDetails.builder()
.setRyaInstanceName(instanceName)
.setRyaVersion("1.2.3.4")
.setEntityCentricIndexDetails( new EntityCentricIndexDetails(true) )
//RYA-215 .setGeoIndexDetails( new GeoIndexDetails(true) )
.setTemporalIndexDetails( new TemporalIndexDetails(true) )
.setFreeTextDetails( new FreeTextIndexDetails(true) )
.setPCJIndexDetails(
PCJIndexDetails.builder()
.setEnabled(true)
.setFluoDetails( new FluoDetails("test_instance_rya_pcj_updater") )
.addPCJDetails(
PCJDetails.builder()
.setId("pcj 1")
.setUpdateStrategy(PCJUpdateStrategy.BATCH)
.setLastUpdateTime( new Date() ))
.addPCJDetails(
PCJDetails.builder()
.setId("pcj 2")))
.setProspectorDetails( new ProspectorDetails(Optional.of(new Date())) )
.setJoinSelectivityDetails( new JoinSelectivityDetails(Optional.of(new Date())) )
.build();
// Setup the repository that will be tested using a mini instance of Accumulo.
final Connector connector = getClusterInstance().getConnector();
final RyaDetailsRepository repo = new AccumuloRyaInstanceDetailsRepository(connector, instanceName);
// Initialize the repository
repo.initialize(details);
// Create a new state for the details.
final RyaDetails updated = new RyaDetails.Builder( details )
.setEntityCentricIndexDetails(new EntityCentricIndexDetails(false) )
.build();
// Try to execute the update where the old state is not the currently stored state.
repo.update(updated, updated);
}
}
| |
package de.danoeh.antennapod.core.service.playback;
import android.annotation.SuppressLint;
import android.app.Notification;
import android.app.PendingIntent;
import android.app.Service;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.AudioManager;
import android.media.MediaMetadataRetriever;
import android.media.MediaPlayer;
import android.media.RemoteControlClient;
import android.media.RemoteControlClient.MetadataEditor;
import android.os.AsyncTask;
import android.os.Binder;
import android.os.Build;
import android.os.IBinder;
import android.preference.PreferenceManager;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import android.util.Pair;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.widget.Toast;
import com.squareup.picasso.Picasso;
import org.apache.commons.lang3.StringUtils;
import java.io.IOException;
import java.util.List;
import de.danoeh.antennapod.core.BuildConfig;
import de.danoeh.antennapod.core.ClientConfig;
import de.danoeh.antennapod.core.R;
import de.danoeh.antennapod.core.feed.Chapter;
import de.danoeh.antennapod.core.feed.FeedItem;
import de.danoeh.antennapod.core.feed.FeedMedia;
import de.danoeh.antennapod.core.feed.MediaType;
import de.danoeh.antennapod.core.preferences.PlaybackPreferences;
import de.danoeh.antennapod.core.preferences.UserPreferences;
import de.danoeh.antennapod.core.receiver.MediaButtonReceiver;
import de.danoeh.antennapod.core.storage.DBTasks;
import de.danoeh.antennapod.core.storage.DBWriter;
import de.danoeh.antennapod.core.util.QueueAccess;
import de.danoeh.antennapod.core.util.flattr.FlattrUtils;
import de.danoeh.antennapod.core.util.playback.Playable;
/**
* Controls the MediaPlayer that plays a FeedMedia-file
*/
public class PlaybackService extends Service {
public static final String FORCE_WIDGET_UPDATE = "de.danoeh.antennapod.FORCE_WIDGET_UPDATE";
public static final String STOP_WIDGET_UPDATE = "de.danoeh.antennapod.STOP_WIDGET_UPDATE";
/**
* Logging tag
*/
private static final String TAG = "PlaybackService";
/**
* Parcelable of type Playable.
*/
public static final String EXTRA_PLAYABLE = "PlaybackService.PlayableExtra";
/**
* True if media should be streamed.
*/
public static final String EXTRA_SHOULD_STREAM = "extra.de.danoeh.antennapod.core.service.shouldStream";
/**
* True if playback should be started immediately after media has been
* prepared.
*/
public static final String EXTRA_START_WHEN_PREPARED = "extra.de.danoeh.antennapod.core.service.startWhenPrepared";
public static final String EXTRA_PREPARE_IMMEDIATELY = "extra.de.danoeh.antennapod.core.service.prepareImmediately";
public static final String ACTION_PLAYER_STATUS_CHANGED = "action.de.danoeh.antennapod.core.service.playerStatusChanged";
public static final String EXTRA_NEW_PLAYER_STATUS = "extra.de.danoeh.antennapod.service.playerStatusChanged.newStatus";
private static final String AVRCP_ACTION_PLAYER_STATUS_CHANGED = "com.android.music.playstatechanged";
private static final String AVRCP_ACTION_META_CHANGED = "com.android.music.metachanged";
public static final String ACTION_PLAYER_NOTIFICATION = "action.de.danoeh.antennapod.core.service.playerNotification";
public static final String EXTRA_NOTIFICATION_CODE = "extra.de.danoeh.antennapod.core.service.notificationCode";
public static final String EXTRA_NOTIFICATION_TYPE = "extra.de.danoeh.antennapod.core.service.notificationType";
/**
* If the PlaybackService receives this action, it will stop playback and
* try to shutdown.
*/
public static final String ACTION_SHUTDOWN_PLAYBACK_SERVICE = "action.de.danoeh.antennapod.core.service.actionShutdownPlaybackService";
/**
* If the PlaybackService receives this action, it will end playback of the
* current episode and load the next episode if there is one available.
*/
public static final String ACTION_SKIP_CURRENT_EPISODE = "action.de.danoeh.antennapod.core.service.skipCurrentEpisode";
/**
* If the PlaybackService receives this action, it will pause playback.
*/
public static final String ACTION_PAUSE_PLAY_CURRENT_EPISODE = "action.de.danoeh.antennapod.core.service.pausePlayCurrentEpisode";
/**
* If the PlaybackService receives this action, it will resume playback.
*/
public static final String ACTION_RESUME_PLAY_CURRENT_EPISODE = "action.de.danoeh.antennapod.core.service.resumePlayCurrentEpisode";
/**
* Used in NOTIFICATION_TYPE_RELOAD.
*/
public static final int EXTRA_CODE_AUDIO = 1;
public static final int EXTRA_CODE_VIDEO = 2;
public static final int NOTIFICATION_TYPE_ERROR = 0;
public static final int NOTIFICATION_TYPE_INFO = 1;
public static final int NOTIFICATION_TYPE_BUFFER_UPDATE = 2;
/**
* Receivers of this intent should update their information about the curently playing media
*/
public static final int NOTIFICATION_TYPE_RELOAD = 3;
/**
* The state of the sleeptimer changed.
*/
public static final int NOTIFICATION_TYPE_SLEEPTIMER_UPDATE = 4;
public static final int NOTIFICATION_TYPE_BUFFER_START = 5;
public static final int NOTIFICATION_TYPE_BUFFER_END = 6;
/**
* No more episodes are going to be played.
*/
public static final int NOTIFICATION_TYPE_PLAYBACK_END = 7;
/**
* Playback speed has changed
*/
public static final int NOTIFICATION_TYPE_PLAYBACK_SPEED_CHANGE = 8;
/**
* Returned by getPositionSafe() or getDurationSafe() if the playbackService
* is in an invalid state.
*/
public static final int INVALID_TIME = -1;
/**
* Is true if service is running.
*/
public static boolean isRunning = false;
/**
* Is true if service has received a valid start command.
*/
public static boolean started = false;
/**
* Is true if the service was running, but paused due to headphone disconnect
*/
public static boolean transientPause = false;
private static final int NOTIFICATION_ID = 1;
private RemoteControlClient remoteControlClient;
private PlaybackServiceMediaPlayer mediaPlayer;
private PlaybackServiceTaskManager taskManager;
private static volatile MediaType currentMediaType = MediaType.UNKNOWN;
private final IBinder mBinder = new LocalBinder();
public class LocalBinder extends Binder {
public PlaybackService getService() {
return PlaybackService.this;
}
}
@Override
public boolean onUnbind(Intent intent) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Received onUnbind event");
return super.onUnbind(intent);
}
/**
* Returns an intent which starts an audio- or videoplayer, depending on the
* type of media that is being played. If the playbackservice is not
* running, the type of the last played media will be looked up.
*/
public static Intent getPlayerActivityIntent(Context context) {
if (isRunning) {
return ClientConfig.playbackServiceCallbacks.getPlayerActivityIntent(context, currentMediaType);
} else {
if (PlaybackPreferences.getCurrentEpisodeIsVideo()) {
return ClientConfig.playbackServiceCallbacks.getPlayerActivityIntent(context, MediaType.VIDEO);
} else {
return ClientConfig.playbackServiceCallbacks.getPlayerActivityIntent(context, MediaType.AUDIO);
}
}
}
/**
* Same as getPlayerActivityIntent(context), but here the type of activity
* depends on the FeedMedia that is provided as an argument.
*/
public static Intent getPlayerActivityIntent(Context context, Playable media) {
MediaType mt = media.getMediaType();
return ClientConfig.playbackServiceCallbacks.getPlayerActivityIntent(context, mt);
}
@SuppressLint("NewApi")
@Override
public void onCreate() {
super.onCreate();
if (BuildConfig.DEBUG)
Log.d(TAG, "Service created.");
isRunning = true;
registerReceiver(headsetDisconnected, new IntentFilter(
Intent.ACTION_HEADSET_PLUG));
registerReceiver(shutdownReceiver, new IntentFilter(
ACTION_SHUTDOWN_PLAYBACK_SERVICE));
registerReceiver(bluetoothStateUpdated, new IntentFilter(
AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED));
registerReceiver(audioBecomingNoisy, new IntentFilter(
AudioManager.ACTION_AUDIO_BECOMING_NOISY));
registerReceiver(skipCurrentEpisodeReceiver, new IntentFilter(
ACTION_SKIP_CURRENT_EPISODE));
registerReceiver(pausePlayCurrentEpisodeReceiver, new IntentFilter(
ACTION_PAUSE_PLAY_CURRENT_EPISODE));
registerReceiver(pauseResumeCurrentEpisodeReceiver, new IntentFilter(
ACTION_RESUME_PLAY_CURRENT_EPISODE));
remoteControlClient = setupRemoteControlClient();
taskManager = new PlaybackServiceTaskManager(this, taskManagerCallback);
mediaPlayer = new PlaybackServiceMediaPlayer(this, mediaPlayerCallback);
}
@SuppressLint("NewApi")
@Override
public void onDestroy() {
super.onDestroy();
if (BuildConfig.DEBUG)
Log.d(TAG, "Service is about to be destroyed");
isRunning = false;
started = false;
currentMediaType = MediaType.UNKNOWN;
unregisterReceiver(headsetDisconnected);
unregisterReceiver(shutdownReceiver);
unregisterReceiver(bluetoothStateUpdated);
unregisterReceiver(audioBecomingNoisy);
unregisterReceiver(skipCurrentEpisodeReceiver);
mediaPlayer.shutdown();
taskManager.shutdown();
}
@Override
public IBinder onBind(Intent intent) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Received onBind event");
return mBinder;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
super.onStartCommand(intent, flags, startId);
if (BuildConfig.DEBUG)
Log.d(TAG, "OnStartCommand called");
final int keycode = intent.getIntExtra(MediaButtonReceiver.EXTRA_KEYCODE, -1);
final Playable playable = intent.getParcelableExtra(EXTRA_PLAYABLE);
if (keycode == -1 && playable == null) {
Log.e(TAG, "PlaybackService was started with no arguments");
stopSelf();
}
if ((flags & Service.START_FLAG_REDELIVERY) != 0) {
if (BuildConfig.DEBUG)
Log.d(TAG, "onStartCommand is a redelivered intent, calling stopForeground now.");
stopForeground(true);
} else {
if (keycode != -1) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Received media button event");
handleKeycode(keycode);
} else {
started = true;
boolean stream = intent.getBooleanExtra(EXTRA_SHOULD_STREAM,
true);
boolean startWhenPrepared = intent.getBooleanExtra(EXTRA_START_WHEN_PREPARED, false);
boolean prepareImmediately = intent.getBooleanExtra(EXTRA_PREPARE_IMMEDIATELY, false);
sendNotificationBroadcast(NOTIFICATION_TYPE_RELOAD, 0);
mediaPlayer.playMediaObject(playable, stream, startWhenPrepared, prepareImmediately);
}
}
return Service.START_REDELIVER_INTENT;
}
/**
* Handles media button events
*/
private void handleKeycode(int keycode) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Handling keycode: " + keycode);
final PlaybackServiceMediaPlayer.PSMPInfo info = mediaPlayer.getPSMPInfo();
final PlayerStatus status = info.playerStatus;
switch (keycode) {
case KeyEvent.KEYCODE_HEADSETHOOK:
case KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE:
if (status == PlayerStatus.PLAYING) {
if (UserPreferences.isPersistNotify()) {
mediaPlayer.pause(false, true);
} else {
mediaPlayer.pause(true, true);
}
} else if (status == PlayerStatus.PAUSED || status == PlayerStatus.PREPARED) {
mediaPlayer.resume();
} else if (status == PlayerStatus.PREPARING) {
mediaPlayer.setStartWhenPrepared(!mediaPlayer.isStartWhenPrepared());
} else if (status == PlayerStatus.INITIALIZED) {
mediaPlayer.setStartWhenPrepared(true);
mediaPlayer.prepare();
}
break;
case KeyEvent.KEYCODE_MEDIA_PLAY:
if (status == PlayerStatus.PAUSED || status == PlayerStatus.PREPARED) {
mediaPlayer.resume();
} else if (status == PlayerStatus.INITIALIZED) {
mediaPlayer.setStartWhenPrepared(true);
mediaPlayer.prepare();
}
break;
case KeyEvent.KEYCODE_MEDIA_PAUSE:
if (status == PlayerStatus.PLAYING) {
mediaPlayer.pause(false, true);
}
if (UserPreferences.isPersistNotify()) {
mediaPlayer.pause(false, true);
} else {
mediaPlayer.pause(true, true);
}
break;
case KeyEvent.KEYCODE_MEDIA_NEXT:
case KeyEvent.KEYCODE_MEDIA_FAST_FORWARD:
mediaPlayer.seekDelta(UserPreferences.getSeekDeltaMs());
break;
case KeyEvent.KEYCODE_MEDIA_PREVIOUS:
case KeyEvent.KEYCODE_MEDIA_REWIND:
mediaPlayer.seekDelta(-UserPreferences.getSeekDeltaMs());
break;
case KeyEvent.KEYCODE_MEDIA_STOP:
if (status == PlayerStatus.PLAYING) {
mediaPlayer.pause(true, true);
started = false;
}
stopForeground(true); // gets rid of persistent notification
break;
default:
if (info.playable != null && info.playerStatus == PlayerStatus.PLAYING) { // only notify the user about an unknown key event if it is actually doing something
String message = String.format(getResources().getString(R.string.unknown_media_key), keycode);
Toast.makeText(this, message, Toast.LENGTH_SHORT).show();
}
break;
}
}
/**
* Called by a mediaplayer Activity as soon as it has prepared its
* mediaplayer.
*/
public void setVideoSurface(SurfaceHolder sh) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Setting display");
mediaPlayer.setVideoSurface(sh);
}
/**
* Called when the surface holder of the mediaplayer has to be changed.
*/
private void resetVideoSurface() {
taskManager.cancelPositionSaver();
mediaPlayer.resetVideoSurface();
}
public void notifyVideoSurfaceAbandoned() {
stopForeground(true);
mediaPlayer.resetVideoSurface();
}
private final PlaybackServiceTaskManager.PSTMCallback taskManagerCallback = new PlaybackServiceTaskManager.PSTMCallback() {
@Override
public void positionSaverTick() {
saveCurrentPosition(true, PlaybackServiceTaskManager.POSITION_SAVER_WAITING_INTERVAL);
}
@Override
public void onSleepTimerExpired() {
mediaPlayer.pause(true, true);
sendNotificationBroadcast(NOTIFICATION_TYPE_SLEEPTIMER_UPDATE, 0);
}
@Override
public void onWidgetUpdaterTick() {
updateWidget();
}
@Override
public void onChapterLoaded(Playable media) {
sendNotificationBroadcast(NOTIFICATION_TYPE_RELOAD, 0);
}
};
private final PlaybackServiceMediaPlayer.PSMPCallback mediaPlayerCallback = new PlaybackServiceMediaPlayer.PSMPCallback() {
@Override
public void statusChanged(PlaybackServiceMediaPlayer.PSMPInfo newInfo) {
currentMediaType = mediaPlayer.getCurrentMediaType();
switch (newInfo.playerStatus) {
case INITIALIZED:
writePlaybackPreferences();
break;
case PREPARED:
taskManager.startChapterLoader(newInfo.playable);
break;
case PAUSED:
taskManager.cancelPositionSaver();
saveCurrentPosition(false, 0);
taskManager.cancelWidgetUpdater();
if (UserPreferences.isPersistNotify() && android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
// do not remove notification on pause based on user pref and whether android version supports expanded notifications
// Change [Play] button to [Pause]
setupNotification(newInfo);
} else if (!UserPreferences.isPersistNotify()) {
// remove notifcation on pause
stopForeground(true);
}
writePlayerStatusPlaybackPreferences();
break;
case STOPPED:
//setCurrentlyPlayingMedia(PlaybackPreferences.NO_MEDIA_PLAYING);
//stopSelf();
break;
case PLAYING:
if (BuildConfig.DEBUG)
Log.d(TAG, "Audiofocus successfully requested");
if (BuildConfig.DEBUG)
Log.d(TAG, "Resuming/Starting playback");
taskManager.startPositionSaver();
taskManager.startWidgetUpdater();
writePlayerStatusPlaybackPreferences();
setupNotification(newInfo);
started = true;
break;
case ERROR:
writePlaybackPreferencesNoMediaPlaying();
break;
}
Intent statusUpdate = new Intent(ACTION_PLAYER_STATUS_CHANGED);
statusUpdate.putExtra(EXTRA_NEW_PLAYER_STATUS, newInfo.playerStatus.ordinal());
sendBroadcast(statusUpdate);
sendBroadcast(new Intent(ACTION_PLAYER_STATUS_CHANGED));
updateWidget();
refreshRemoteControlClientState(newInfo);
bluetoothNotifyChange(newInfo, AVRCP_ACTION_PLAYER_STATUS_CHANGED);
bluetoothNotifyChange(newInfo, AVRCP_ACTION_META_CHANGED);
}
@Override
public void shouldStop() {
stopSelf();
}
@Override
public void playbackSpeedChanged(float s) {
sendNotificationBroadcast(
NOTIFICATION_TYPE_PLAYBACK_SPEED_CHANGE, 0);
}
@Override
public void onBufferingUpdate(int percent) {
sendNotificationBroadcast(NOTIFICATION_TYPE_BUFFER_UPDATE, percent);
}
@Override
public boolean onMediaPlayerInfo(int code) {
switch (code) {
case MediaPlayer.MEDIA_INFO_BUFFERING_START:
sendNotificationBroadcast(NOTIFICATION_TYPE_BUFFER_START, 0);
return true;
case MediaPlayer.MEDIA_INFO_BUFFERING_END:
sendNotificationBroadcast(NOTIFICATION_TYPE_BUFFER_END, 0);
return true;
default:
return false;
}
}
@Override
public boolean onMediaPlayerError(Object inObj, int what, int extra) {
final String TAG = "PlaybackService.onErrorListener";
Log.w(TAG, "An error has occured: " + what + " " + extra);
if (mediaPlayer.getPSMPInfo().playerStatus == PlayerStatus.PLAYING) {
mediaPlayer.pause(true, false);
}
sendNotificationBroadcast(NOTIFICATION_TYPE_ERROR, what);
writePlaybackPreferencesNoMediaPlaying();
stopSelf();
return true;
}
@Override
public boolean endPlayback(boolean playNextEpisode) {
PlaybackService.this.endPlayback(true);
return true;
}
@Override
public RemoteControlClient getRemoteControlClient() {
return remoteControlClient;
}
};
private void endPlayback(boolean playNextEpisode) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Playback ended");
final Playable media = mediaPlayer.getPSMPInfo().playable;
if (media == null) {
Log.e(TAG, "Cannot end playback: media was null");
return;
}
taskManager.cancelPositionSaver();
boolean isInQueue = false;
FeedItem nextItem = null;
if (media instanceof FeedMedia) {
FeedItem item = ((FeedMedia) media).getItem();
DBWriter.markItemRead(PlaybackService.this, item, true, true);
try {
final List<FeedItem> queue = taskManager.getQueue();
isInQueue = QueueAccess.ItemListAccess(queue).contains(((FeedMedia) media).getItem().getId());
nextItem = DBTasks.getQueueSuccessorOfItem(this, item.getId(), queue);
} catch (InterruptedException e) {
e.printStackTrace();
// isInQueue remains false
}
if (isInQueue) {
DBWriter.removeQueueItem(PlaybackService.this, item.getId(), true);
}
DBWriter.addItemToPlaybackHistory(PlaybackService.this, (FeedMedia) media);
// auto-flattr if enabled
if (isAutoFlattrable(media) && UserPreferences.getAutoFlattrPlayedDurationThreshold() == 1.0f) {
DBTasks.flattrItemIfLoggedIn(PlaybackService.this, item);
}
//Delete episode if enabled
if(UserPreferences.isAutoDelete()) {
DBWriter.deleteFeedMediaOfItem(PlaybackService.this, item.getMedia().getId());
if(BuildConfig.DEBUG)
Log.d(TAG, "Episode Deleted");
}
}
// Load next episode if previous episode was in the queue and if there
// is an episode in the queue left.
// Start playback immediately if continuous playback is enabled
Playable nextMedia = null;
boolean loadNextItem = ClientConfig.playbackServiceCallbacks.useQueue() &&
isInQueue &&
nextItem != null;
playNextEpisode = playNextEpisode &&
loadNextItem &&
UserPreferences.isFollowQueue();
if (loadNextItem) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Loading next item in queue");
nextMedia = nextItem.getMedia();
}
final boolean prepareImmediately;
final boolean startWhenPrepared;
final boolean stream;
if (playNextEpisode) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Playback of next episode will start immediately.");
prepareImmediately = startWhenPrepared = true;
} else {
if (BuildConfig.DEBUG)
Log.d(TAG, "No more episodes available to play");
prepareImmediately = startWhenPrepared = false;
stopForeground(true);
stopWidgetUpdater();
}
writePlaybackPreferencesNoMediaPlaying();
if (nextMedia != null) {
stream = !media.localFileAvailable();
mediaPlayer.playMediaObject(nextMedia, stream, startWhenPrepared, prepareImmediately);
sendNotificationBroadcast(NOTIFICATION_TYPE_RELOAD,
(nextMedia.getMediaType() == MediaType.VIDEO) ? EXTRA_CODE_VIDEO : EXTRA_CODE_AUDIO);
} else {
sendNotificationBroadcast(NOTIFICATION_TYPE_PLAYBACK_END, 0);
mediaPlayer.stop();
//stopSelf();
}
}
public void setSleepTimer(long waitingTime) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Setting sleep timer to " + Long.toString(waitingTime)
+ " milliseconds");
taskManager.setSleepTimer(waitingTime);
sendNotificationBroadcast(NOTIFICATION_TYPE_SLEEPTIMER_UPDATE, 0);
}
public void disableSleepTimer() {
taskManager.disableSleepTimer();
sendNotificationBroadcast(NOTIFICATION_TYPE_SLEEPTIMER_UPDATE, 0);
}
private void writePlaybackPreferencesNoMediaPlaying() {
SharedPreferences.Editor editor = PreferenceManager
.getDefaultSharedPreferences(getApplicationContext()).edit();
editor.putLong(PlaybackPreferences.PREF_CURRENTLY_PLAYING_MEDIA,
PlaybackPreferences.NO_MEDIA_PLAYING);
editor.putLong(PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEED_ID,
PlaybackPreferences.NO_MEDIA_PLAYING);
editor.putLong(
PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEEDMEDIA_ID,
PlaybackPreferences.NO_MEDIA_PLAYING);
editor.putInt(
PlaybackPreferences.PREF_CURRENT_PLAYER_STATUS,
PlaybackPreferences.PLAYER_STATUS_OTHER);
editor.commit();
}
private int getCurrentPlayerStatusAsInt(PlayerStatus playerStatus) {
int playerStatusAsInt;
switch (playerStatus) {
case PLAYING:
playerStatusAsInt = PlaybackPreferences.PLAYER_STATUS_PLAYING;
break;
case PAUSED:
playerStatusAsInt = PlaybackPreferences.PLAYER_STATUS_PAUSED;
break;
default:
playerStatusAsInt = PlaybackPreferences.PLAYER_STATUS_OTHER;
}
return playerStatusAsInt;
}
private void writePlaybackPreferences() {
if (BuildConfig.DEBUG)
Log.d(TAG, "Writing playback preferences");
SharedPreferences.Editor editor = PreferenceManager
.getDefaultSharedPreferences(getApplicationContext()).edit();
PlaybackServiceMediaPlayer.PSMPInfo info = mediaPlayer.getPSMPInfo();
MediaType mediaType = mediaPlayer.getCurrentMediaType();
boolean stream = mediaPlayer.isStreaming();
int playerStatus = getCurrentPlayerStatusAsInt(info.playerStatus);
if (info.playable != null) {
editor.putLong(PlaybackPreferences.PREF_CURRENTLY_PLAYING_MEDIA,
info.playable.getPlayableType());
editor.putBoolean(
PlaybackPreferences.PREF_CURRENT_EPISODE_IS_STREAM,
stream);
editor.putBoolean(
PlaybackPreferences.PREF_CURRENT_EPISODE_IS_VIDEO,
mediaType == MediaType.VIDEO);
if (info.playable instanceof FeedMedia) {
FeedMedia fMedia = (FeedMedia) info.playable;
editor.putLong(
PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEED_ID,
fMedia.getItem().getFeed().getId());
editor.putLong(
PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEEDMEDIA_ID,
fMedia.getId());
} else {
editor.putLong(
PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEED_ID,
PlaybackPreferences.NO_MEDIA_PLAYING);
editor.putLong(
PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEEDMEDIA_ID,
PlaybackPreferences.NO_MEDIA_PLAYING);
}
info.playable.writeToPreferences(editor);
} else {
editor.putLong(PlaybackPreferences.PREF_CURRENTLY_PLAYING_MEDIA,
PlaybackPreferences.NO_MEDIA_PLAYING);
editor.putLong(PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEED_ID,
PlaybackPreferences.NO_MEDIA_PLAYING);
editor.putLong(
PlaybackPreferences.PREF_CURRENTLY_PLAYING_FEEDMEDIA_ID,
PlaybackPreferences.NO_MEDIA_PLAYING);
}
editor.putInt(
PlaybackPreferences.PREF_CURRENT_PLAYER_STATUS, playerStatus);
editor.commit();
}
private void writePlayerStatusPlaybackPreferences() {
if (BuildConfig.DEBUG)
Log.d(TAG, "Writing player status playback preferences");
SharedPreferences.Editor editor = PreferenceManager
.getDefaultSharedPreferences(getApplicationContext()).edit();
PlaybackServiceMediaPlayer.PSMPInfo info = mediaPlayer.getPSMPInfo();
int playerStatus = getCurrentPlayerStatusAsInt(info.playerStatus);
editor.putInt(
PlaybackPreferences.PREF_CURRENT_PLAYER_STATUS, playerStatus);
editor.commit();
}
/**
* Send ACTION_PLAYER_STATUS_CHANGED without changing the status attribute.
*/
private void postStatusUpdateIntent() {
sendBroadcast(new Intent(ACTION_PLAYER_STATUS_CHANGED));
}
private void sendNotificationBroadcast(int type, int code) {
Intent intent = new Intent(ACTION_PLAYER_NOTIFICATION);
intent.putExtra(EXTRA_NOTIFICATION_TYPE, type);
intent.putExtra(EXTRA_NOTIFICATION_CODE, code);
sendBroadcast(intent);
}
/**
* Used by setupNotification to load notification data in another thread.
*/
private AsyncTask<Void, Void, Void> notificationSetupTask;
/**
* Prepares notification and starts the service in the foreground.
*/
@SuppressLint("NewApi")
private void setupNotification(final PlaybackServiceMediaPlayer.PSMPInfo info) {
final PendingIntent pIntent = PendingIntent.getActivity(this, 0,
PlaybackService.getPlayerActivityIntent(this),
PendingIntent.FLAG_UPDATE_CURRENT);
if (notificationSetupTask != null) {
notificationSetupTask.cancel(true);
}
notificationSetupTask = new AsyncTask<Void, Void, Void>() {
Bitmap icon = null;
@Override
protected Void doInBackground(Void... params) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Starting background work");
if (android.os.Build.VERSION.SDK_INT >= 11) {
if (info.playable != null) {
try {
int iconSize = getResources().getDimensionPixelSize(
android.R.dimen.notification_large_icon_width);
icon = Picasso.with(PlaybackService.this)
.load(info.playable.getImageUri())
.resize(iconSize, iconSize)
.get();
} catch (IOException e) {
e.printStackTrace();
}
}
}
if (icon == null) {
icon = BitmapFactory.decodeResource(getApplicationContext().getResources(),
ClientConfig.playbackServiceCallbacks.getNotificationIconResource(getApplicationContext()));
}
return null;
}
@Override
protected void onPostExecute(Void result) {
super.onPostExecute(result);
if (mediaPlayer == null) {
return;
}
PlaybackServiceMediaPlayer.PSMPInfo newInfo = mediaPlayer.getPSMPInfo();
final int smallIcon = ClientConfig.playbackServiceCallbacks.getNotificationIconResource(getApplicationContext());
if (!isCancelled() &&
started &&
info.playable != null) {
String contentText = info.playable.getFeedTitle();
String contentTitle = info.playable.getEpisodeTitle();
Notification notification = null;
if (android.os.Build.VERSION.SDK_INT >= 16) {
Intent pauseButtonIntent = new Intent( // pause button intent
PlaybackService.this, PlaybackService.class);
pauseButtonIntent.putExtra(
MediaButtonReceiver.EXTRA_KEYCODE,
KeyEvent.KEYCODE_MEDIA_PAUSE);
PendingIntent pauseButtonPendingIntent = PendingIntent
.getService(PlaybackService.this, 0,
pauseButtonIntent,
PendingIntent.FLAG_UPDATE_CURRENT);
Intent playButtonIntent = new Intent( // play button intent
PlaybackService.this, PlaybackService.class);
playButtonIntent.putExtra(
MediaButtonReceiver.EXTRA_KEYCODE,
KeyEvent.KEYCODE_MEDIA_PLAY);
PendingIntent playButtonPendingIntent = PendingIntent
.getService(PlaybackService.this, 1,
playButtonIntent,
PendingIntent.FLAG_UPDATE_CURRENT);
Intent stopButtonIntent = new Intent( // stop button intent
PlaybackService.this, PlaybackService.class);
stopButtonIntent.putExtra(
MediaButtonReceiver.EXTRA_KEYCODE,
KeyEvent.KEYCODE_MEDIA_STOP);
PendingIntent stopButtonPendingIntent = PendingIntent
.getService(PlaybackService.this, 2,
stopButtonIntent,
PendingIntent.FLAG_UPDATE_CURRENT);
Notification.Builder notificationBuilder = new Notification.Builder(
PlaybackService.this)
.setContentTitle(contentTitle)
.setContentText(contentText)
.setOngoing(true)
.setContentIntent(pIntent)
.setLargeIcon(icon)
.setSmallIcon(smallIcon)
.setPriority(UserPreferences.getNotifyPriority()); // set notification priority
if (newInfo.playerStatus == PlayerStatus.PLAYING) {
notificationBuilder.addAction(android.R.drawable.ic_media_pause, //pause action
getString(R.string.pause_label),
pauseButtonPendingIntent);
} else {
notificationBuilder.addAction(android.R.drawable.ic_media_play, //play action
getString(R.string.play_label),
playButtonPendingIntent);
}
if (UserPreferences.isPersistNotify()) {
notificationBuilder.addAction(android.R.drawable.ic_menu_close_clear_cancel, // stop action
getString(R.string.stop_label),
stopButtonPendingIntent);
}
if (Build.VERSION.SDK_INT >= 21) {
notificationBuilder.setStyle(new Notification.MediaStyle()
.setMediaSession((android.media.session.MediaSession.Token) mediaPlayer.getSessionToken().getToken())
.setShowActionsInCompactView(0))
.setVisibility(Notification.VISIBILITY_PUBLIC)
.setColor(Notification.COLOR_DEFAULT);
}
notification = notificationBuilder.build();
} else {
NotificationCompat.Builder notificationBuilder = new NotificationCompat.Builder(
PlaybackService.this)
.setContentTitle(contentTitle)
.setContentText(contentText).setOngoing(true)
.setContentIntent(pIntent).setLargeIcon(icon)
.setSmallIcon(smallIcon);
notification = notificationBuilder.build();
}
startForeground(NOTIFICATION_ID, notification);
if (BuildConfig.DEBUG)
Log.d(TAG, "Notification set up");
}
}
};
if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.GINGERBREAD_MR1) {
notificationSetupTask
.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
} else {
notificationSetupTask.execute();
}
}
/**
* Saves the current position of the media file to the DB
*
* @param updatePlayedDuration true if played_duration should be updated. This applies only to FeedMedia objects
* @param deltaPlayedDuration value by which played_duration should be increased.
*/
private synchronized void saveCurrentPosition(boolean updatePlayedDuration, int deltaPlayedDuration) {
int position = getCurrentPosition();
int duration = getDuration();
float playbackSpeed = getCurrentPlaybackSpeed();
final Playable playable = mediaPlayer.getPSMPInfo().playable;
if (position != INVALID_TIME && duration != INVALID_TIME && playable != null) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Saving current position to " + position);
if (updatePlayedDuration && playable instanceof FeedMedia) {
FeedMedia m = (FeedMedia) playable;
FeedItem item = m.getItem();
m.setPlayedDuration(m.getPlayedDuration() + ((int) (deltaPlayedDuration * playbackSpeed)));
// Auto flattr
if (isAutoFlattrable(m) &&
(m.getPlayedDuration() > UserPreferences.getAutoFlattrPlayedDurationThreshold() * duration)) {
if (BuildConfig.DEBUG)
Log.d(TAG, "saveCurrentPosition: performing auto flattr since played duration " + Integer.toString(m.getPlayedDuration())
+ " is " + UserPreferences.getAutoFlattrPlayedDurationThreshold() * 100 + "% of file duration " + Integer.toString(duration));
DBTasks.flattrItemIfLoggedIn(this, item);
}
}
playable.saveCurrentPosition(PreferenceManager
.getDefaultSharedPreferences(getApplicationContext()),
position
);
}
}
private void stopWidgetUpdater() {
taskManager.cancelWidgetUpdater();
sendBroadcast(new Intent(STOP_WIDGET_UPDATE));
}
private void updateWidget() {
PlaybackService.this.sendBroadcast(new Intent(
FORCE_WIDGET_UPDATE));
}
public boolean sleepTimerActive() {
return taskManager.isSleepTimerActive();
}
public long getSleepTimerTimeLeft() {
return taskManager.getSleepTimerTimeLeft();
}
@SuppressLint("NewApi")
private RemoteControlClient setupRemoteControlClient() {
if (Build.VERSION.SDK_INT < 14) {
return null;
}
Intent mediaButtonIntent = new Intent(Intent.ACTION_MEDIA_BUTTON);
mediaButtonIntent.setComponent(new ComponentName(getPackageName(),
MediaButtonReceiver.class.getName()));
PendingIntent mediaPendingIntent = PendingIntent.getBroadcast(
getApplicationContext(), 0, mediaButtonIntent, 0);
remoteControlClient = new RemoteControlClient(mediaPendingIntent);
int controlFlags;
if (android.os.Build.VERSION.SDK_INT < 16) {
controlFlags = RemoteControlClient.FLAG_KEY_MEDIA_PLAY_PAUSE
| RemoteControlClient.FLAG_KEY_MEDIA_NEXT;
} else {
controlFlags = RemoteControlClient.FLAG_KEY_MEDIA_PLAY_PAUSE;
}
remoteControlClient.setTransportControlFlags(controlFlags);
return remoteControlClient;
}
/**
* Refresh player status and metadata.
*/
@SuppressLint("NewApi")
private void refreshRemoteControlClientState(PlaybackServiceMediaPlayer.PSMPInfo info) {
if (android.os.Build.VERSION.SDK_INT >= 14) {
if (remoteControlClient != null) {
switch (info.playerStatus) {
case PLAYING:
remoteControlClient
.setPlaybackState(RemoteControlClient.PLAYSTATE_PLAYING);
break;
case PAUSED:
case INITIALIZED:
remoteControlClient
.setPlaybackState(RemoteControlClient.PLAYSTATE_PAUSED);
break;
case STOPPED:
remoteControlClient
.setPlaybackState(RemoteControlClient.PLAYSTATE_STOPPED);
break;
case ERROR:
remoteControlClient
.setPlaybackState(RemoteControlClient.PLAYSTATE_ERROR);
break;
default:
remoteControlClient
.setPlaybackState(RemoteControlClient.PLAYSTATE_BUFFERING);
}
if (info.playable != null) {
MetadataEditor editor = remoteControlClient
.editMetadata(false);
editor.putString(MediaMetadataRetriever.METADATA_KEY_TITLE,
info.playable.getEpisodeTitle());
editor.putString(MediaMetadataRetriever.METADATA_KEY_ALBUM,
info.playable.getFeedTitle());
editor.apply();
}
if (BuildConfig.DEBUG)
Log.d(TAG, "RemoteControlClient state was refreshed");
}
}
}
private void bluetoothNotifyChange(PlaybackServiceMediaPlayer.PSMPInfo info, String whatChanged) {
boolean isPlaying = false;
if (info.playerStatus == PlayerStatus.PLAYING) {
isPlaying = true;
}
if (info.playable != null) {
Intent i = new Intent(whatChanged);
i.putExtra("id", 1);
i.putExtra("artist", "");
i.putExtra("album", info.playable.getFeedTitle());
i.putExtra("track", info.playable.getEpisodeTitle());
i.putExtra("playing", isPlaying);
final List<FeedItem> queue = taskManager.getQueueIfLoaded();
if (queue != null) {
i.putExtra("ListSize", queue.size());
}
i.putExtra("duration", info.playable.getDuration());
i.putExtra("position", info.playable.getPosition());
sendBroadcast(i);
}
}
/**
* Pauses playback when the headset is disconnected and the preference is
* set
*/
private BroadcastReceiver headsetDisconnected = new BroadcastReceiver() {
private static final String TAG = "headsetDisconnected";
private static final int UNPLUGGED = 0;
private static final int PLUGGED = 1;
@Override
public void onReceive(Context context, Intent intent) {
if (StringUtils.equals(intent.getAction(), Intent.ACTION_HEADSET_PLUG)) {
int state = intent.getIntExtra("state", -1);
if (state != -1) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Headset plug event. State is " + state);
if (state == UNPLUGGED) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Headset was unplugged during playback.");
pauseIfPauseOnDisconnect();
} else if (state == PLUGGED) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Headset was plugged in during playback.");
unpauseIfPauseOnDisconnect();
}
} else {
Log.e(TAG, "Received invalid ACTION_HEADSET_PLUG intent");
}
}
}
};
private BroadcastReceiver bluetoothStateUpdated = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (StringUtils.equals(intent.getAction(), AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED)) {
int state = intent.getIntExtra(AudioManager.EXTRA_SCO_AUDIO_STATE, -1);
int prevState = intent.getIntExtra(AudioManager.EXTRA_SCO_AUDIO_PREVIOUS_STATE, -1);
if (state == AudioManager.SCO_AUDIO_STATE_CONNECTED) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Received bluetooth connection intent");
unpauseIfPauseOnDisconnect();
}
}
}
};
private BroadcastReceiver audioBecomingNoisy = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
// sound is about to change, eg. bluetooth -> speaker
if (BuildConfig.DEBUG)
Log.d(TAG, "Pausing playback because audio is becoming noisy");
pauseIfPauseOnDisconnect();
}
// android.media.AUDIO_BECOMING_NOISY
};
/**
* Pauses playback if PREF_PAUSE_ON_HEADSET_DISCONNECT was set to true.
*/
private void pauseIfPauseOnDisconnect() {
if (UserPreferences.isPauseOnHeadsetDisconnect()) {
if (mediaPlayer.getPlayerStatus() == PlayerStatus.PLAYING) {
transientPause = true;
}
if (UserPreferences.isPersistNotify()) {
mediaPlayer.pause(false, true);
} else {
mediaPlayer.pause(true, true);
}
}
}
private void unpauseIfPauseOnDisconnect() {
if (transientPause) {
transientPause = false;
if (UserPreferences.isPauseOnHeadsetDisconnect() && UserPreferences.isUnpauseOnHeadsetReconnect()) {
mediaPlayer.resume();
}
}
}
private BroadcastReceiver shutdownReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (StringUtils.equals(intent.getAction(), ACTION_SHUTDOWN_PLAYBACK_SERVICE)) {
stopSelf();
}
}
};
private BroadcastReceiver skipCurrentEpisodeReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (StringUtils.equals(intent.getAction(), ACTION_SKIP_CURRENT_EPISODE)) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Received SKIP_CURRENT_EPISODE intent");
mediaPlayer.endPlayback();
}
}
};
private BroadcastReceiver pauseResumeCurrentEpisodeReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (StringUtils.equals(intent.getAction(), ACTION_RESUME_PLAY_CURRENT_EPISODE)) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Received RESUME_PLAY_CURRENT_EPISODE intent");
mediaPlayer.resume();
}
}
};
private BroadcastReceiver pausePlayCurrentEpisodeReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (StringUtils.equals(intent.getAction(), ACTION_PAUSE_PLAY_CURRENT_EPISODE)) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Received PAUSE_PLAY_CURRENT_EPISODE intent");
mediaPlayer.pause(false, false);
}
}
};
public static MediaType getCurrentMediaType() {
return currentMediaType;
}
public void resume() {
mediaPlayer.resume();
}
public void prepare() {
mediaPlayer.prepare();
}
public void pause(boolean abandonAudioFocus, boolean reinit) {
mediaPlayer.pause(abandonAudioFocus, reinit);
}
public void reinit() {
mediaPlayer.reinit();
}
public PlaybackServiceMediaPlayer.PSMPInfo getPSMPInfo() {
return mediaPlayer.getPSMPInfo();
}
public PlayerStatus getStatus() {
return mediaPlayer.getPSMPInfo().playerStatus;
}
public Playable getPlayable() {
return mediaPlayer.getPSMPInfo().playable;
}
public void setSpeed(float speed) {
mediaPlayer.setSpeed(speed);
}
public boolean canSetSpeed() {
return mediaPlayer.canSetSpeed();
}
public float getCurrentPlaybackSpeed() {
return mediaPlayer.getPlaybackSpeed();
}
public boolean isStartWhenPrepared() {
return mediaPlayer.isStartWhenPrepared();
}
public void setStartWhenPrepared(boolean s) {
mediaPlayer.setStartWhenPrepared(s);
}
public void seekTo(final int t) {
mediaPlayer.seekTo(t);
}
public void seekDelta(final int d) {
mediaPlayer.seekDelta(d);
}
/**
* @see de.danoeh.antennapod.core.service.playback.PlaybackServiceMediaPlayer#seekToChapter(de.danoeh.antennapod.core.feed.Chapter)
*/
public void seekToChapter(Chapter c) {
mediaPlayer.seekToChapter(c);
}
/**
* call getDuration() on mediaplayer or return INVALID_TIME if player is in
* an invalid state.
*/
public int getDuration() {
return mediaPlayer.getDuration();
}
/**
* call getCurrentPosition() on mediaplayer or return INVALID_TIME if player
* is in an invalid state.
*/
public int getCurrentPosition() {
return mediaPlayer.getPosition();
}
public boolean isStreaming() {
return mediaPlayer.isStreaming();
}
public Pair<Integer, Integer> getVideoSize() {
return mediaPlayer.getVideoSize();
}
private boolean isAutoFlattrable(Playable p) {
if (p != null && p instanceof FeedMedia) {
FeedMedia media = (FeedMedia) p;
FeedItem item = ((FeedMedia) p).getItem();
return item != null && FlattrUtils.hasToken() && UserPreferences.isAutoFlattr() && item.getPaymentLink() != null && item.getFlattrStatus().getUnflattred();
} else {
return false;
}
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.rds;
import javax.annotation.Generated;
import com.amazonaws.services.rds.model.*;
/**
* Abstract implementation of {@code AmazonRDSAsync}. Convenient method forms pass through to the corresponding overload
* that takes a request object and an {@code AsyncHandler}, which throws an {@code UnsupportedOperationException}.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AbstractAmazonRDSAsync extends AbstractAmazonRDS implements AmazonRDSAsync {
protected AbstractAmazonRDSAsync() {
}
@Override
public java.util.concurrent.Future<AddRoleToDBClusterResult> addRoleToDBClusterAsync(AddRoleToDBClusterRequest request) {
return addRoleToDBClusterAsync(request, null);
}
@Override
public java.util.concurrent.Future<AddRoleToDBClusterResult> addRoleToDBClusterAsync(AddRoleToDBClusterRequest request,
com.amazonaws.handlers.AsyncHandler<AddRoleToDBClusterRequest, AddRoleToDBClusterResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<EventSubscription> addSourceIdentifierToSubscriptionAsync(AddSourceIdentifierToSubscriptionRequest request) {
return addSourceIdentifierToSubscriptionAsync(request, null);
}
@Override
public java.util.concurrent.Future<EventSubscription> addSourceIdentifierToSubscriptionAsync(AddSourceIdentifierToSubscriptionRequest request,
com.amazonaws.handlers.AsyncHandler<AddSourceIdentifierToSubscriptionRequest, EventSubscription> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<AddTagsToResourceResult> addTagsToResourceAsync(AddTagsToResourceRequest request) {
return addTagsToResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<AddTagsToResourceResult> addTagsToResourceAsync(AddTagsToResourceRequest request,
com.amazonaws.handlers.AsyncHandler<AddTagsToResourceRequest, AddTagsToResourceResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ResourcePendingMaintenanceActions> applyPendingMaintenanceActionAsync(ApplyPendingMaintenanceActionRequest request) {
return applyPendingMaintenanceActionAsync(request, null);
}
@Override
public java.util.concurrent.Future<ResourcePendingMaintenanceActions> applyPendingMaintenanceActionAsync(ApplyPendingMaintenanceActionRequest request,
com.amazonaws.handlers.AsyncHandler<ApplyPendingMaintenanceActionRequest, ResourcePendingMaintenanceActions> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBSecurityGroup> authorizeDBSecurityGroupIngressAsync(AuthorizeDBSecurityGroupIngressRequest request) {
return authorizeDBSecurityGroupIngressAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBSecurityGroup> authorizeDBSecurityGroupIngressAsync(AuthorizeDBSecurityGroupIngressRequest request,
com.amazonaws.handlers.AsyncHandler<AuthorizeDBSecurityGroupIngressRequest, DBSecurityGroup> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBClusterParameterGroup> copyDBClusterParameterGroupAsync(CopyDBClusterParameterGroupRequest request) {
return copyDBClusterParameterGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBClusterParameterGroup> copyDBClusterParameterGroupAsync(CopyDBClusterParameterGroupRequest request,
com.amazonaws.handlers.AsyncHandler<CopyDBClusterParameterGroupRequest, DBClusterParameterGroup> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBClusterSnapshot> copyDBClusterSnapshotAsync(CopyDBClusterSnapshotRequest request) {
return copyDBClusterSnapshotAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBClusterSnapshot> copyDBClusterSnapshotAsync(CopyDBClusterSnapshotRequest request,
com.amazonaws.handlers.AsyncHandler<CopyDBClusterSnapshotRequest, DBClusterSnapshot> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBParameterGroup> copyDBParameterGroupAsync(CopyDBParameterGroupRequest request) {
return copyDBParameterGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBParameterGroup> copyDBParameterGroupAsync(CopyDBParameterGroupRequest request,
com.amazonaws.handlers.AsyncHandler<CopyDBParameterGroupRequest, DBParameterGroup> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBSnapshot> copyDBSnapshotAsync(CopyDBSnapshotRequest request) {
return copyDBSnapshotAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBSnapshot> copyDBSnapshotAsync(CopyDBSnapshotRequest request,
com.amazonaws.handlers.AsyncHandler<CopyDBSnapshotRequest, DBSnapshot> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<OptionGroup> copyOptionGroupAsync(CopyOptionGroupRequest request) {
return copyOptionGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<OptionGroup> copyOptionGroupAsync(CopyOptionGroupRequest request,
com.amazonaws.handlers.AsyncHandler<CopyOptionGroupRequest, OptionGroup> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBCluster> createDBClusterAsync(CreateDBClusterRequest request) {
return createDBClusterAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBCluster> createDBClusterAsync(CreateDBClusterRequest request,
com.amazonaws.handlers.AsyncHandler<CreateDBClusterRequest, DBCluster> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBClusterParameterGroup> createDBClusterParameterGroupAsync(CreateDBClusterParameterGroupRequest request) {
return createDBClusterParameterGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBClusterParameterGroup> createDBClusterParameterGroupAsync(CreateDBClusterParameterGroupRequest request,
com.amazonaws.handlers.AsyncHandler<CreateDBClusterParameterGroupRequest, DBClusterParameterGroup> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBClusterSnapshot> createDBClusterSnapshotAsync(CreateDBClusterSnapshotRequest request) {
return createDBClusterSnapshotAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBClusterSnapshot> createDBClusterSnapshotAsync(CreateDBClusterSnapshotRequest request,
com.amazonaws.handlers.AsyncHandler<CreateDBClusterSnapshotRequest, DBClusterSnapshot> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBInstance> createDBInstanceAsync(CreateDBInstanceRequest request) {
return createDBInstanceAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBInstance> createDBInstanceAsync(CreateDBInstanceRequest request,
com.amazonaws.handlers.AsyncHandler<CreateDBInstanceRequest, DBInstance> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBInstance> createDBInstanceReadReplicaAsync(CreateDBInstanceReadReplicaRequest request) {
return createDBInstanceReadReplicaAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBInstance> createDBInstanceReadReplicaAsync(CreateDBInstanceReadReplicaRequest request,
com.amazonaws.handlers.AsyncHandler<CreateDBInstanceReadReplicaRequest, DBInstance> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBParameterGroup> createDBParameterGroupAsync(CreateDBParameterGroupRequest request) {
return createDBParameterGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBParameterGroup> createDBParameterGroupAsync(CreateDBParameterGroupRequest request,
com.amazonaws.handlers.AsyncHandler<CreateDBParameterGroupRequest, DBParameterGroup> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBSecurityGroup> createDBSecurityGroupAsync(CreateDBSecurityGroupRequest request) {
return createDBSecurityGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBSecurityGroup> createDBSecurityGroupAsync(CreateDBSecurityGroupRequest request,
com.amazonaws.handlers.AsyncHandler<CreateDBSecurityGroupRequest, DBSecurityGroup> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBSnapshot> createDBSnapshotAsync(CreateDBSnapshotRequest request) {
return createDBSnapshotAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBSnapshot> createDBSnapshotAsync(CreateDBSnapshotRequest request,
com.amazonaws.handlers.AsyncHandler<CreateDBSnapshotRequest, DBSnapshot> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBSubnetGroup> createDBSubnetGroupAsync(CreateDBSubnetGroupRequest request) {
return createDBSubnetGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBSubnetGroup> createDBSubnetGroupAsync(CreateDBSubnetGroupRequest request,
com.amazonaws.handlers.AsyncHandler<CreateDBSubnetGroupRequest, DBSubnetGroup> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<EventSubscription> createEventSubscriptionAsync(CreateEventSubscriptionRequest request) {
return createEventSubscriptionAsync(request, null);
}
@Override
public java.util.concurrent.Future<EventSubscription> createEventSubscriptionAsync(CreateEventSubscriptionRequest request,
com.amazonaws.handlers.AsyncHandler<CreateEventSubscriptionRequest, EventSubscription> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<OptionGroup> createOptionGroupAsync(CreateOptionGroupRequest request) {
return createOptionGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<OptionGroup> createOptionGroupAsync(CreateOptionGroupRequest request,
com.amazonaws.handlers.AsyncHandler<CreateOptionGroupRequest, OptionGroup> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBCluster> deleteDBClusterAsync(DeleteDBClusterRequest request) {
return deleteDBClusterAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBCluster> deleteDBClusterAsync(DeleteDBClusterRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteDBClusterRequest, DBCluster> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteDBClusterParameterGroupResult> deleteDBClusterParameterGroupAsync(DeleteDBClusterParameterGroupRequest request) {
return deleteDBClusterParameterGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteDBClusterParameterGroupResult> deleteDBClusterParameterGroupAsync(DeleteDBClusterParameterGroupRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteDBClusterParameterGroupRequest, DeleteDBClusterParameterGroupResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBClusterSnapshot> deleteDBClusterSnapshotAsync(DeleteDBClusterSnapshotRequest request) {
return deleteDBClusterSnapshotAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBClusterSnapshot> deleteDBClusterSnapshotAsync(DeleteDBClusterSnapshotRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteDBClusterSnapshotRequest, DBClusterSnapshot> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBInstance> deleteDBInstanceAsync(DeleteDBInstanceRequest request) {
return deleteDBInstanceAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBInstance> deleteDBInstanceAsync(DeleteDBInstanceRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteDBInstanceRequest, DBInstance> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteDBParameterGroupResult> deleteDBParameterGroupAsync(DeleteDBParameterGroupRequest request) {
return deleteDBParameterGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteDBParameterGroupResult> deleteDBParameterGroupAsync(DeleteDBParameterGroupRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteDBParameterGroupRequest, DeleteDBParameterGroupResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteDBSecurityGroupResult> deleteDBSecurityGroupAsync(DeleteDBSecurityGroupRequest request) {
return deleteDBSecurityGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteDBSecurityGroupResult> deleteDBSecurityGroupAsync(DeleteDBSecurityGroupRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteDBSecurityGroupRequest, DeleteDBSecurityGroupResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBSnapshot> deleteDBSnapshotAsync(DeleteDBSnapshotRequest request) {
return deleteDBSnapshotAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBSnapshot> deleteDBSnapshotAsync(DeleteDBSnapshotRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteDBSnapshotRequest, DBSnapshot> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteDBSubnetGroupResult> deleteDBSubnetGroupAsync(DeleteDBSubnetGroupRequest request) {
return deleteDBSubnetGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteDBSubnetGroupResult> deleteDBSubnetGroupAsync(DeleteDBSubnetGroupRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteDBSubnetGroupRequest, DeleteDBSubnetGroupResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<EventSubscription> deleteEventSubscriptionAsync(DeleteEventSubscriptionRequest request) {
return deleteEventSubscriptionAsync(request, null);
}
@Override
public java.util.concurrent.Future<EventSubscription> deleteEventSubscriptionAsync(DeleteEventSubscriptionRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteEventSubscriptionRequest, EventSubscription> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteOptionGroupResult> deleteOptionGroupAsync(DeleteOptionGroupRequest request) {
return deleteOptionGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteOptionGroupResult> deleteOptionGroupAsync(DeleteOptionGroupRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteOptionGroupRequest, DeleteOptionGroupResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeAccountAttributesResult> describeAccountAttributesAsync(DescribeAccountAttributesRequest request) {
return describeAccountAttributesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeAccountAttributesResult> describeAccountAttributesAsync(DescribeAccountAttributesRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeAccountAttributesRequest, DescribeAccountAttributesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeAccountAttributes operation.
*
* @see #describeAccountAttributesAsync(DescribeAccountAttributesRequest)
*/
@Override
public java.util.concurrent.Future<DescribeAccountAttributesResult> describeAccountAttributesAsync() {
return describeAccountAttributesAsync(new DescribeAccountAttributesRequest());
}
/**
* Simplified method form for invoking the DescribeAccountAttributes operation with an AsyncHandler.
*
* @see #describeAccountAttributesAsync(DescribeAccountAttributesRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeAccountAttributesResult> describeAccountAttributesAsync(
com.amazonaws.handlers.AsyncHandler<DescribeAccountAttributesRequest, DescribeAccountAttributesResult> asyncHandler) {
return describeAccountAttributesAsync(new DescribeAccountAttributesRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeCertificatesResult> describeCertificatesAsync(DescribeCertificatesRequest request) {
return describeCertificatesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeCertificatesResult> describeCertificatesAsync(DescribeCertificatesRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeCertificatesRequest, DescribeCertificatesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeCertificates operation.
*
* @see #describeCertificatesAsync(DescribeCertificatesRequest)
*/
@Override
public java.util.concurrent.Future<DescribeCertificatesResult> describeCertificatesAsync() {
return describeCertificatesAsync(new DescribeCertificatesRequest());
}
/**
* Simplified method form for invoking the DescribeCertificates operation with an AsyncHandler.
*
* @see #describeCertificatesAsync(DescribeCertificatesRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeCertificatesResult> describeCertificatesAsync(
com.amazonaws.handlers.AsyncHandler<DescribeCertificatesRequest, DescribeCertificatesResult> asyncHandler) {
return describeCertificatesAsync(new DescribeCertificatesRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeDBClusterParameterGroupsResult> describeDBClusterParameterGroupsAsync(
DescribeDBClusterParameterGroupsRequest request) {
return describeDBClusterParameterGroupsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDBClusterParameterGroupsResult> describeDBClusterParameterGroupsAsync(
DescribeDBClusterParameterGroupsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBClusterParameterGroupsRequest, DescribeDBClusterParameterGroupsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeDBClusterParameterGroups operation.
*
* @see #describeDBClusterParameterGroupsAsync(DescribeDBClusterParameterGroupsRequest)
*/
@Override
public java.util.concurrent.Future<DescribeDBClusterParameterGroupsResult> describeDBClusterParameterGroupsAsync() {
return describeDBClusterParameterGroupsAsync(new DescribeDBClusterParameterGroupsRequest());
}
/**
* Simplified method form for invoking the DescribeDBClusterParameterGroups operation with an AsyncHandler.
*
* @see #describeDBClusterParameterGroupsAsync(DescribeDBClusterParameterGroupsRequest,
* com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeDBClusterParameterGroupsResult> describeDBClusterParameterGroupsAsync(
com.amazonaws.handlers.AsyncHandler<DescribeDBClusterParameterGroupsRequest, DescribeDBClusterParameterGroupsResult> asyncHandler) {
return describeDBClusterParameterGroupsAsync(new DescribeDBClusterParameterGroupsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeDBClusterParametersResult> describeDBClusterParametersAsync(DescribeDBClusterParametersRequest request) {
return describeDBClusterParametersAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDBClusterParametersResult> describeDBClusterParametersAsync(DescribeDBClusterParametersRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBClusterParametersRequest, DescribeDBClusterParametersResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBClusterSnapshotAttributesResult> describeDBClusterSnapshotAttributesAsync(
DescribeDBClusterSnapshotAttributesRequest request) {
return describeDBClusterSnapshotAttributesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBClusterSnapshotAttributesResult> describeDBClusterSnapshotAttributesAsync(
DescribeDBClusterSnapshotAttributesRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBClusterSnapshotAttributesRequest, DBClusterSnapshotAttributesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeDBClusterSnapshotsResult> describeDBClusterSnapshotsAsync(DescribeDBClusterSnapshotsRequest request) {
return describeDBClusterSnapshotsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDBClusterSnapshotsResult> describeDBClusterSnapshotsAsync(DescribeDBClusterSnapshotsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBClusterSnapshotsRequest, DescribeDBClusterSnapshotsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeDBClusterSnapshots operation.
*
* @see #describeDBClusterSnapshotsAsync(DescribeDBClusterSnapshotsRequest)
*/
@Override
public java.util.concurrent.Future<DescribeDBClusterSnapshotsResult> describeDBClusterSnapshotsAsync() {
return describeDBClusterSnapshotsAsync(new DescribeDBClusterSnapshotsRequest());
}
/**
* Simplified method form for invoking the DescribeDBClusterSnapshots operation with an AsyncHandler.
*
* @see #describeDBClusterSnapshotsAsync(DescribeDBClusterSnapshotsRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeDBClusterSnapshotsResult> describeDBClusterSnapshotsAsync(
com.amazonaws.handlers.AsyncHandler<DescribeDBClusterSnapshotsRequest, DescribeDBClusterSnapshotsResult> asyncHandler) {
return describeDBClusterSnapshotsAsync(new DescribeDBClusterSnapshotsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeDBClustersResult> describeDBClustersAsync(DescribeDBClustersRequest request) {
return describeDBClustersAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDBClustersResult> describeDBClustersAsync(DescribeDBClustersRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBClustersRequest, DescribeDBClustersResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeDBClusters operation.
*
* @see #describeDBClustersAsync(DescribeDBClustersRequest)
*/
@Override
public java.util.concurrent.Future<DescribeDBClustersResult> describeDBClustersAsync() {
return describeDBClustersAsync(new DescribeDBClustersRequest());
}
/**
* Simplified method form for invoking the DescribeDBClusters operation with an AsyncHandler.
*
* @see #describeDBClustersAsync(DescribeDBClustersRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeDBClustersResult> describeDBClustersAsync(
com.amazonaws.handlers.AsyncHandler<DescribeDBClustersRequest, DescribeDBClustersResult> asyncHandler) {
return describeDBClustersAsync(new DescribeDBClustersRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeDBEngineVersionsResult> describeDBEngineVersionsAsync(DescribeDBEngineVersionsRequest request) {
return describeDBEngineVersionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDBEngineVersionsResult> describeDBEngineVersionsAsync(DescribeDBEngineVersionsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBEngineVersionsRequest, DescribeDBEngineVersionsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeDBEngineVersions operation.
*
* @see #describeDBEngineVersionsAsync(DescribeDBEngineVersionsRequest)
*/
@Override
public java.util.concurrent.Future<DescribeDBEngineVersionsResult> describeDBEngineVersionsAsync() {
return describeDBEngineVersionsAsync(new DescribeDBEngineVersionsRequest());
}
/**
* Simplified method form for invoking the DescribeDBEngineVersions operation with an AsyncHandler.
*
* @see #describeDBEngineVersionsAsync(DescribeDBEngineVersionsRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeDBEngineVersionsResult> describeDBEngineVersionsAsync(
com.amazonaws.handlers.AsyncHandler<DescribeDBEngineVersionsRequest, DescribeDBEngineVersionsResult> asyncHandler) {
return describeDBEngineVersionsAsync(new DescribeDBEngineVersionsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeDBInstancesResult> describeDBInstancesAsync(DescribeDBInstancesRequest request) {
return describeDBInstancesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDBInstancesResult> describeDBInstancesAsync(DescribeDBInstancesRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBInstancesRequest, DescribeDBInstancesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeDBInstances operation.
*
* @see #describeDBInstancesAsync(DescribeDBInstancesRequest)
*/
@Override
public java.util.concurrent.Future<DescribeDBInstancesResult> describeDBInstancesAsync() {
return describeDBInstancesAsync(new DescribeDBInstancesRequest());
}
/**
* Simplified method form for invoking the DescribeDBInstances operation with an AsyncHandler.
*
* @see #describeDBInstancesAsync(DescribeDBInstancesRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeDBInstancesResult> describeDBInstancesAsync(
com.amazonaws.handlers.AsyncHandler<DescribeDBInstancesRequest, DescribeDBInstancesResult> asyncHandler) {
return describeDBInstancesAsync(new DescribeDBInstancesRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeDBLogFilesResult> describeDBLogFilesAsync(DescribeDBLogFilesRequest request) {
return describeDBLogFilesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDBLogFilesResult> describeDBLogFilesAsync(DescribeDBLogFilesRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBLogFilesRequest, DescribeDBLogFilesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeDBParameterGroupsResult> describeDBParameterGroupsAsync(DescribeDBParameterGroupsRequest request) {
return describeDBParameterGroupsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDBParameterGroupsResult> describeDBParameterGroupsAsync(DescribeDBParameterGroupsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBParameterGroupsRequest, DescribeDBParameterGroupsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeDBParameterGroups operation.
*
* @see #describeDBParameterGroupsAsync(DescribeDBParameterGroupsRequest)
*/
@Override
public java.util.concurrent.Future<DescribeDBParameterGroupsResult> describeDBParameterGroupsAsync() {
return describeDBParameterGroupsAsync(new DescribeDBParameterGroupsRequest());
}
/**
* Simplified method form for invoking the DescribeDBParameterGroups operation with an AsyncHandler.
*
* @see #describeDBParameterGroupsAsync(DescribeDBParameterGroupsRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeDBParameterGroupsResult> describeDBParameterGroupsAsync(
com.amazonaws.handlers.AsyncHandler<DescribeDBParameterGroupsRequest, DescribeDBParameterGroupsResult> asyncHandler) {
return describeDBParameterGroupsAsync(new DescribeDBParameterGroupsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeDBParametersResult> describeDBParametersAsync(DescribeDBParametersRequest request) {
return describeDBParametersAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDBParametersResult> describeDBParametersAsync(DescribeDBParametersRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBParametersRequest, DescribeDBParametersResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeDBSecurityGroupsResult> describeDBSecurityGroupsAsync(DescribeDBSecurityGroupsRequest request) {
return describeDBSecurityGroupsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDBSecurityGroupsResult> describeDBSecurityGroupsAsync(DescribeDBSecurityGroupsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBSecurityGroupsRequest, DescribeDBSecurityGroupsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeDBSecurityGroups operation.
*
* @see #describeDBSecurityGroupsAsync(DescribeDBSecurityGroupsRequest)
*/
@Override
public java.util.concurrent.Future<DescribeDBSecurityGroupsResult> describeDBSecurityGroupsAsync() {
return describeDBSecurityGroupsAsync(new DescribeDBSecurityGroupsRequest());
}
/**
* Simplified method form for invoking the DescribeDBSecurityGroups operation with an AsyncHandler.
*
* @see #describeDBSecurityGroupsAsync(DescribeDBSecurityGroupsRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeDBSecurityGroupsResult> describeDBSecurityGroupsAsync(
com.amazonaws.handlers.AsyncHandler<DescribeDBSecurityGroupsRequest, DescribeDBSecurityGroupsResult> asyncHandler) {
return describeDBSecurityGroupsAsync(new DescribeDBSecurityGroupsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DBSnapshotAttributesResult> describeDBSnapshotAttributesAsync(DescribeDBSnapshotAttributesRequest request) {
return describeDBSnapshotAttributesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBSnapshotAttributesResult> describeDBSnapshotAttributesAsync(DescribeDBSnapshotAttributesRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBSnapshotAttributesRequest, DBSnapshotAttributesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeDBSnapshotAttributes operation.
*
* @see #describeDBSnapshotAttributesAsync(DescribeDBSnapshotAttributesRequest)
*/
@Override
public java.util.concurrent.Future<DBSnapshotAttributesResult> describeDBSnapshotAttributesAsync() {
return describeDBSnapshotAttributesAsync(new DescribeDBSnapshotAttributesRequest());
}
/**
* Simplified method form for invoking the DescribeDBSnapshotAttributes operation with an AsyncHandler.
*
* @see #describeDBSnapshotAttributesAsync(DescribeDBSnapshotAttributesRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DBSnapshotAttributesResult> describeDBSnapshotAttributesAsync(
com.amazonaws.handlers.AsyncHandler<DescribeDBSnapshotAttributesRequest, DBSnapshotAttributesResult> asyncHandler) {
return describeDBSnapshotAttributesAsync(new DescribeDBSnapshotAttributesRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeDBSnapshotsResult> describeDBSnapshotsAsync(DescribeDBSnapshotsRequest request) {
return describeDBSnapshotsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDBSnapshotsResult> describeDBSnapshotsAsync(DescribeDBSnapshotsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBSnapshotsRequest, DescribeDBSnapshotsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeDBSnapshots operation.
*
* @see #describeDBSnapshotsAsync(DescribeDBSnapshotsRequest)
*/
@Override
public java.util.concurrent.Future<DescribeDBSnapshotsResult> describeDBSnapshotsAsync() {
return describeDBSnapshotsAsync(new DescribeDBSnapshotsRequest());
}
/**
* Simplified method form for invoking the DescribeDBSnapshots operation with an AsyncHandler.
*
* @see #describeDBSnapshotsAsync(DescribeDBSnapshotsRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeDBSnapshotsResult> describeDBSnapshotsAsync(
com.amazonaws.handlers.AsyncHandler<DescribeDBSnapshotsRequest, DescribeDBSnapshotsResult> asyncHandler) {
return describeDBSnapshotsAsync(new DescribeDBSnapshotsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeDBSubnetGroupsResult> describeDBSubnetGroupsAsync(DescribeDBSubnetGroupsRequest request) {
return describeDBSubnetGroupsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeDBSubnetGroupsResult> describeDBSubnetGroupsAsync(DescribeDBSubnetGroupsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeDBSubnetGroupsRequest, DescribeDBSubnetGroupsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeDBSubnetGroups operation.
*
* @see #describeDBSubnetGroupsAsync(DescribeDBSubnetGroupsRequest)
*/
@Override
public java.util.concurrent.Future<DescribeDBSubnetGroupsResult> describeDBSubnetGroupsAsync() {
return describeDBSubnetGroupsAsync(new DescribeDBSubnetGroupsRequest());
}
/**
* Simplified method form for invoking the DescribeDBSubnetGroups operation with an AsyncHandler.
*
* @see #describeDBSubnetGroupsAsync(DescribeDBSubnetGroupsRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeDBSubnetGroupsResult> describeDBSubnetGroupsAsync(
com.amazonaws.handlers.AsyncHandler<DescribeDBSubnetGroupsRequest, DescribeDBSubnetGroupsResult> asyncHandler) {
return describeDBSubnetGroupsAsync(new DescribeDBSubnetGroupsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<EngineDefaults> describeEngineDefaultClusterParametersAsync(DescribeEngineDefaultClusterParametersRequest request) {
return describeEngineDefaultClusterParametersAsync(request, null);
}
@Override
public java.util.concurrent.Future<EngineDefaults> describeEngineDefaultClusterParametersAsync(DescribeEngineDefaultClusterParametersRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeEngineDefaultClusterParametersRequest, EngineDefaults> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<EngineDefaults> describeEngineDefaultParametersAsync(DescribeEngineDefaultParametersRequest request) {
return describeEngineDefaultParametersAsync(request, null);
}
@Override
public java.util.concurrent.Future<EngineDefaults> describeEngineDefaultParametersAsync(DescribeEngineDefaultParametersRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeEngineDefaultParametersRequest, EngineDefaults> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeEventCategoriesResult> describeEventCategoriesAsync(DescribeEventCategoriesRequest request) {
return describeEventCategoriesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeEventCategoriesResult> describeEventCategoriesAsync(DescribeEventCategoriesRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeEventCategoriesRequest, DescribeEventCategoriesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeEventCategories operation.
*
* @see #describeEventCategoriesAsync(DescribeEventCategoriesRequest)
*/
@Override
public java.util.concurrent.Future<DescribeEventCategoriesResult> describeEventCategoriesAsync() {
return describeEventCategoriesAsync(new DescribeEventCategoriesRequest());
}
/**
* Simplified method form for invoking the DescribeEventCategories operation with an AsyncHandler.
*
* @see #describeEventCategoriesAsync(DescribeEventCategoriesRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeEventCategoriesResult> describeEventCategoriesAsync(
com.amazonaws.handlers.AsyncHandler<DescribeEventCategoriesRequest, DescribeEventCategoriesResult> asyncHandler) {
return describeEventCategoriesAsync(new DescribeEventCategoriesRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeEventSubscriptionsResult> describeEventSubscriptionsAsync(DescribeEventSubscriptionsRequest request) {
return describeEventSubscriptionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeEventSubscriptionsResult> describeEventSubscriptionsAsync(DescribeEventSubscriptionsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeEventSubscriptionsRequest, DescribeEventSubscriptionsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeEventSubscriptions operation.
*
* @see #describeEventSubscriptionsAsync(DescribeEventSubscriptionsRequest)
*/
@Override
public java.util.concurrent.Future<DescribeEventSubscriptionsResult> describeEventSubscriptionsAsync() {
return describeEventSubscriptionsAsync(new DescribeEventSubscriptionsRequest());
}
/**
* Simplified method form for invoking the DescribeEventSubscriptions operation with an AsyncHandler.
*
* @see #describeEventSubscriptionsAsync(DescribeEventSubscriptionsRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeEventSubscriptionsResult> describeEventSubscriptionsAsync(
com.amazonaws.handlers.AsyncHandler<DescribeEventSubscriptionsRequest, DescribeEventSubscriptionsResult> asyncHandler) {
return describeEventSubscriptionsAsync(new DescribeEventSubscriptionsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeEventsResult> describeEventsAsync(DescribeEventsRequest request) {
return describeEventsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeEventsResult> describeEventsAsync(DescribeEventsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeEventsRequest, DescribeEventsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeEvents operation.
*
* @see #describeEventsAsync(DescribeEventsRequest)
*/
@Override
public java.util.concurrent.Future<DescribeEventsResult> describeEventsAsync() {
return describeEventsAsync(new DescribeEventsRequest());
}
/**
* Simplified method form for invoking the DescribeEvents operation with an AsyncHandler.
*
* @see #describeEventsAsync(DescribeEventsRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeEventsResult> describeEventsAsync(
com.amazonaws.handlers.AsyncHandler<DescribeEventsRequest, DescribeEventsResult> asyncHandler) {
return describeEventsAsync(new DescribeEventsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeOptionGroupOptionsResult> describeOptionGroupOptionsAsync(DescribeOptionGroupOptionsRequest request) {
return describeOptionGroupOptionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeOptionGroupOptionsResult> describeOptionGroupOptionsAsync(DescribeOptionGroupOptionsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeOptionGroupOptionsRequest, DescribeOptionGroupOptionsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeOptionGroupsResult> describeOptionGroupsAsync(DescribeOptionGroupsRequest request) {
return describeOptionGroupsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeOptionGroupsResult> describeOptionGroupsAsync(DescribeOptionGroupsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeOptionGroupsRequest, DescribeOptionGroupsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeOptionGroups operation.
*
* @see #describeOptionGroupsAsync(DescribeOptionGroupsRequest)
*/
@Override
public java.util.concurrent.Future<DescribeOptionGroupsResult> describeOptionGroupsAsync() {
return describeOptionGroupsAsync(new DescribeOptionGroupsRequest());
}
/**
* Simplified method form for invoking the DescribeOptionGroups operation with an AsyncHandler.
*
* @see #describeOptionGroupsAsync(DescribeOptionGroupsRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeOptionGroupsResult> describeOptionGroupsAsync(
com.amazonaws.handlers.AsyncHandler<DescribeOptionGroupsRequest, DescribeOptionGroupsResult> asyncHandler) {
return describeOptionGroupsAsync(new DescribeOptionGroupsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeOrderableDBInstanceOptionsResult> describeOrderableDBInstanceOptionsAsync(
DescribeOrderableDBInstanceOptionsRequest request) {
return describeOrderableDBInstanceOptionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeOrderableDBInstanceOptionsResult> describeOrderableDBInstanceOptionsAsync(
DescribeOrderableDBInstanceOptionsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeOrderableDBInstanceOptionsRequest, DescribeOrderableDBInstanceOptionsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribePendingMaintenanceActionsResult> describePendingMaintenanceActionsAsync(
DescribePendingMaintenanceActionsRequest request) {
return describePendingMaintenanceActionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribePendingMaintenanceActionsResult> describePendingMaintenanceActionsAsync(
DescribePendingMaintenanceActionsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribePendingMaintenanceActionsRequest, DescribePendingMaintenanceActionsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribePendingMaintenanceActions operation.
*
* @see #describePendingMaintenanceActionsAsync(DescribePendingMaintenanceActionsRequest)
*/
@Override
public java.util.concurrent.Future<DescribePendingMaintenanceActionsResult> describePendingMaintenanceActionsAsync() {
return describePendingMaintenanceActionsAsync(new DescribePendingMaintenanceActionsRequest());
}
/**
* Simplified method form for invoking the DescribePendingMaintenanceActions operation with an AsyncHandler.
*
* @see #describePendingMaintenanceActionsAsync(DescribePendingMaintenanceActionsRequest,
* com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribePendingMaintenanceActionsResult> describePendingMaintenanceActionsAsync(
com.amazonaws.handlers.AsyncHandler<DescribePendingMaintenanceActionsRequest, DescribePendingMaintenanceActionsResult> asyncHandler) {
return describePendingMaintenanceActionsAsync(new DescribePendingMaintenanceActionsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeReservedDBInstancesResult> describeReservedDBInstancesAsync(DescribeReservedDBInstancesRequest request) {
return describeReservedDBInstancesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeReservedDBInstancesResult> describeReservedDBInstancesAsync(DescribeReservedDBInstancesRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeReservedDBInstancesRequest, DescribeReservedDBInstancesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeReservedDBInstances operation.
*
* @see #describeReservedDBInstancesAsync(DescribeReservedDBInstancesRequest)
*/
@Override
public java.util.concurrent.Future<DescribeReservedDBInstancesResult> describeReservedDBInstancesAsync() {
return describeReservedDBInstancesAsync(new DescribeReservedDBInstancesRequest());
}
/**
* Simplified method form for invoking the DescribeReservedDBInstances operation with an AsyncHandler.
*
* @see #describeReservedDBInstancesAsync(DescribeReservedDBInstancesRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeReservedDBInstancesResult> describeReservedDBInstancesAsync(
com.amazonaws.handlers.AsyncHandler<DescribeReservedDBInstancesRequest, DescribeReservedDBInstancesResult> asyncHandler) {
return describeReservedDBInstancesAsync(new DescribeReservedDBInstancesRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeReservedDBInstancesOfferingsResult> describeReservedDBInstancesOfferingsAsync(
DescribeReservedDBInstancesOfferingsRequest request) {
return describeReservedDBInstancesOfferingsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeReservedDBInstancesOfferingsResult> describeReservedDBInstancesOfferingsAsync(
DescribeReservedDBInstancesOfferingsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeReservedDBInstancesOfferingsRequest, DescribeReservedDBInstancesOfferingsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the DescribeReservedDBInstancesOfferings operation.
*
* @see #describeReservedDBInstancesOfferingsAsync(DescribeReservedDBInstancesOfferingsRequest)
*/
@Override
public java.util.concurrent.Future<DescribeReservedDBInstancesOfferingsResult> describeReservedDBInstancesOfferingsAsync() {
return describeReservedDBInstancesOfferingsAsync(new DescribeReservedDBInstancesOfferingsRequest());
}
/**
* Simplified method form for invoking the DescribeReservedDBInstancesOfferings operation with an AsyncHandler.
*
* @see #describeReservedDBInstancesOfferingsAsync(DescribeReservedDBInstancesOfferingsRequest,
* com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DescribeReservedDBInstancesOfferingsResult> describeReservedDBInstancesOfferingsAsync(
com.amazonaws.handlers.AsyncHandler<DescribeReservedDBInstancesOfferingsRequest, DescribeReservedDBInstancesOfferingsResult> asyncHandler) {
return describeReservedDBInstancesOfferingsAsync(new DescribeReservedDBInstancesOfferingsRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<DescribeSourceRegionsResult> describeSourceRegionsAsync(DescribeSourceRegionsRequest request) {
return describeSourceRegionsAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeSourceRegionsResult> describeSourceRegionsAsync(DescribeSourceRegionsRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeSourceRegionsRequest, DescribeSourceRegionsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DownloadDBLogFilePortionResult> downloadDBLogFilePortionAsync(DownloadDBLogFilePortionRequest request) {
return downloadDBLogFilePortionAsync(request, null);
}
@Override
public java.util.concurrent.Future<DownloadDBLogFilePortionResult> downloadDBLogFilePortionAsync(DownloadDBLogFilePortionRequest request,
com.amazonaws.handlers.AsyncHandler<DownloadDBLogFilePortionRequest, DownloadDBLogFilePortionResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBCluster> failoverDBClusterAsync(FailoverDBClusterRequest request) {
return failoverDBClusterAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBCluster> failoverDBClusterAsync(FailoverDBClusterRequest request,
com.amazonaws.handlers.AsyncHandler<FailoverDBClusterRequest, DBCluster> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
/**
* Simplified method form for invoking the FailoverDBCluster operation.
*
* @see #failoverDBClusterAsync(FailoverDBClusterRequest)
*/
@Override
public java.util.concurrent.Future<DBCluster> failoverDBClusterAsync() {
return failoverDBClusterAsync(new FailoverDBClusterRequest());
}
/**
* Simplified method form for invoking the FailoverDBCluster operation with an AsyncHandler.
*
* @see #failoverDBClusterAsync(FailoverDBClusterRequest, com.amazonaws.handlers.AsyncHandler)
*/
@Override
public java.util.concurrent.Future<DBCluster> failoverDBClusterAsync(com.amazonaws.handlers.AsyncHandler<FailoverDBClusterRequest, DBCluster> asyncHandler) {
return failoverDBClusterAsync(new FailoverDBClusterRequest(), asyncHandler);
}
@Override
public java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest request) {
return listTagsForResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest request,
com.amazonaws.handlers.AsyncHandler<ListTagsForResourceRequest, ListTagsForResourceResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBCluster> modifyDBClusterAsync(ModifyDBClusterRequest request) {
return modifyDBClusterAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBCluster> modifyDBClusterAsync(ModifyDBClusterRequest request,
com.amazonaws.handlers.AsyncHandler<ModifyDBClusterRequest, DBCluster> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ModifyDBClusterParameterGroupResult> modifyDBClusterParameterGroupAsync(ModifyDBClusterParameterGroupRequest request) {
return modifyDBClusterParameterGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<ModifyDBClusterParameterGroupResult> modifyDBClusterParameterGroupAsync(ModifyDBClusterParameterGroupRequest request,
com.amazonaws.handlers.AsyncHandler<ModifyDBClusterParameterGroupRequest, ModifyDBClusterParameterGroupResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBClusterSnapshotAttributesResult> modifyDBClusterSnapshotAttributeAsync(ModifyDBClusterSnapshotAttributeRequest request) {
return modifyDBClusterSnapshotAttributeAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBClusterSnapshotAttributesResult> modifyDBClusterSnapshotAttributeAsync(
ModifyDBClusterSnapshotAttributeRequest request,
com.amazonaws.handlers.AsyncHandler<ModifyDBClusterSnapshotAttributeRequest, DBClusterSnapshotAttributesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBInstance> modifyDBInstanceAsync(ModifyDBInstanceRequest request) {
return modifyDBInstanceAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBInstance> modifyDBInstanceAsync(ModifyDBInstanceRequest request,
com.amazonaws.handlers.AsyncHandler<ModifyDBInstanceRequest, DBInstance> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ModifyDBParameterGroupResult> modifyDBParameterGroupAsync(ModifyDBParameterGroupRequest request) {
return modifyDBParameterGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<ModifyDBParameterGroupResult> modifyDBParameterGroupAsync(ModifyDBParameterGroupRequest request,
com.amazonaws.handlers.AsyncHandler<ModifyDBParameterGroupRequest, ModifyDBParameterGroupResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBSnapshot> modifyDBSnapshotAsync(ModifyDBSnapshotRequest request) {
return modifyDBSnapshotAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBSnapshot> modifyDBSnapshotAsync(ModifyDBSnapshotRequest request,
com.amazonaws.handlers.AsyncHandler<ModifyDBSnapshotRequest, DBSnapshot> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBSnapshotAttributesResult> modifyDBSnapshotAttributeAsync(ModifyDBSnapshotAttributeRequest request) {
return modifyDBSnapshotAttributeAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBSnapshotAttributesResult> modifyDBSnapshotAttributeAsync(ModifyDBSnapshotAttributeRequest request,
com.amazonaws.handlers.AsyncHandler<ModifyDBSnapshotAttributeRequest, DBSnapshotAttributesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBSubnetGroup> modifyDBSubnetGroupAsync(ModifyDBSubnetGroupRequest request) {
return modifyDBSubnetGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBSubnetGroup> modifyDBSubnetGroupAsync(ModifyDBSubnetGroupRequest request,
com.amazonaws.handlers.AsyncHandler<ModifyDBSubnetGroupRequest, DBSubnetGroup> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<EventSubscription> modifyEventSubscriptionAsync(ModifyEventSubscriptionRequest request) {
return modifyEventSubscriptionAsync(request, null);
}
@Override
public java.util.concurrent.Future<EventSubscription> modifyEventSubscriptionAsync(ModifyEventSubscriptionRequest request,
com.amazonaws.handlers.AsyncHandler<ModifyEventSubscriptionRequest, EventSubscription> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<OptionGroup> modifyOptionGroupAsync(ModifyOptionGroupRequest request) {
return modifyOptionGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<OptionGroup> modifyOptionGroupAsync(ModifyOptionGroupRequest request,
com.amazonaws.handlers.AsyncHandler<ModifyOptionGroupRequest, OptionGroup> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBInstance> promoteReadReplicaAsync(PromoteReadReplicaRequest request) {
return promoteReadReplicaAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBInstance> promoteReadReplicaAsync(PromoteReadReplicaRequest request,
com.amazonaws.handlers.AsyncHandler<PromoteReadReplicaRequest, DBInstance> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBCluster> promoteReadReplicaDBClusterAsync(PromoteReadReplicaDBClusterRequest request) {
return promoteReadReplicaDBClusterAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBCluster> promoteReadReplicaDBClusterAsync(PromoteReadReplicaDBClusterRequest request,
com.amazonaws.handlers.AsyncHandler<PromoteReadReplicaDBClusterRequest, DBCluster> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ReservedDBInstance> purchaseReservedDBInstancesOfferingAsync(PurchaseReservedDBInstancesOfferingRequest request) {
return purchaseReservedDBInstancesOfferingAsync(request, null);
}
@Override
public java.util.concurrent.Future<ReservedDBInstance> purchaseReservedDBInstancesOfferingAsync(PurchaseReservedDBInstancesOfferingRequest request,
com.amazonaws.handlers.AsyncHandler<PurchaseReservedDBInstancesOfferingRequest, ReservedDBInstance> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBInstance> rebootDBInstanceAsync(RebootDBInstanceRequest request) {
return rebootDBInstanceAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBInstance> rebootDBInstanceAsync(RebootDBInstanceRequest request,
com.amazonaws.handlers.AsyncHandler<RebootDBInstanceRequest, DBInstance> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<RemoveRoleFromDBClusterResult> removeRoleFromDBClusterAsync(RemoveRoleFromDBClusterRequest request) {
return removeRoleFromDBClusterAsync(request, null);
}
@Override
public java.util.concurrent.Future<RemoveRoleFromDBClusterResult> removeRoleFromDBClusterAsync(RemoveRoleFromDBClusterRequest request,
com.amazonaws.handlers.AsyncHandler<RemoveRoleFromDBClusterRequest, RemoveRoleFromDBClusterResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<EventSubscription> removeSourceIdentifierFromSubscriptionAsync(RemoveSourceIdentifierFromSubscriptionRequest request) {
return removeSourceIdentifierFromSubscriptionAsync(request, null);
}
@Override
public java.util.concurrent.Future<EventSubscription> removeSourceIdentifierFromSubscriptionAsync(RemoveSourceIdentifierFromSubscriptionRequest request,
com.amazonaws.handlers.AsyncHandler<RemoveSourceIdentifierFromSubscriptionRequest, EventSubscription> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<RemoveTagsFromResourceResult> removeTagsFromResourceAsync(RemoveTagsFromResourceRequest request) {
return removeTagsFromResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<RemoveTagsFromResourceResult> removeTagsFromResourceAsync(RemoveTagsFromResourceRequest request,
com.amazonaws.handlers.AsyncHandler<RemoveTagsFromResourceRequest, RemoveTagsFromResourceResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ResetDBClusterParameterGroupResult> resetDBClusterParameterGroupAsync(ResetDBClusterParameterGroupRequest request) {
return resetDBClusterParameterGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<ResetDBClusterParameterGroupResult> resetDBClusterParameterGroupAsync(ResetDBClusterParameterGroupRequest request,
com.amazonaws.handlers.AsyncHandler<ResetDBClusterParameterGroupRequest, ResetDBClusterParameterGroupResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ResetDBParameterGroupResult> resetDBParameterGroupAsync(ResetDBParameterGroupRequest request) {
return resetDBParameterGroupAsync(request, null);
}
@Override
public java.util.concurrent.Future<ResetDBParameterGroupResult> resetDBParameterGroupAsync(ResetDBParameterGroupRequest request,
com.amazonaws.handlers.AsyncHandler<ResetDBParameterGroupRequest, ResetDBParameterGroupResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBCluster> restoreDBClusterFromS3Async(RestoreDBClusterFromS3Request request) {
return restoreDBClusterFromS3Async(request, null);
}
@Override
public java.util.concurrent.Future<DBCluster> restoreDBClusterFromS3Async(RestoreDBClusterFromS3Request request,
com.amazonaws.handlers.AsyncHandler<RestoreDBClusterFromS3Request, DBCluster> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBCluster> restoreDBClusterFromSnapshotAsync(RestoreDBClusterFromSnapshotRequest request) {
return restoreDBClusterFromSnapshotAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBCluster> restoreDBClusterFromSnapshotAsync(RestoreDBClusterFromSnapshotRequest request,
com.amazonaws.handlers.AsyncHandler<RestoreDBClusterFromSnapshotRequest, DBCluster> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBCluster> restoreDBClusterToPointInTimeAsync(RestoreDBClusterToPointInTimeRequest request) {
return restoreDBClusterToPointInTimeAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBCluster> restoreDBClusterToPointInTimeAsync(RestoreDBClusterToPointInTimeRequest request,
com.amazonaws.handlers.AsyncHandler<RestoreDBClusterToPointInTimeRequest, DBCluster> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBInstance> restoreDBInstanceFromDBSnapshotAsync(RestoreDBInstanceFromDBSnapshotRequest request) {
return restoreDBInstanceFromDBSnapshotAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBInstance> restoreDBInstanceFromDBSnapshotAsync(RestoreDBInstanceFromDBSnapshotRequest request,
com.amazonaws.handlers.AsyncHandler<RestoreDBInstanceFromDBSnapshotRequest, DBInstance> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBInstance> restoreDBInstanceToPointInTimeAsync(RestoreDBInstanceToPointInTimeRequest request) {
return restoreDBInstanceToPointInTimeAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBInstance> restoreDBInstanceToPointInTimeAsync(RestoreDBInstanceToPointInTimeRequest request,
com.amazonaws.handlers.AsyncHandler<RestoreDBInstanceToPointInTimeRequest, DBInstance> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DBSecurityGroup> revokeDBSecurityGroupIngressAsync(RevokeDBSecurityGroupIngressRequest request) {
return revokeDBSecurityGroupIngressAsync(request, null);
}
@Override
public java.util.concurrent.Future<DBSecurityGroup> revokeDBSecurityGroupIngressAsync(RevokeDBSecurityGroupIngressRequest request,
com.amazonaws.handlers.AsyncHandler<RevokeDBSecurityGroupIngressRequest, DBSecurityGroup> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/resources/user_location_view.proto
package com.google.ads.googleads.v9.resources;
/**
* <pre>
* A user location view.
* User Location View includes all metrics aggregated at the country level,
* one row per country. It reports metrics at the actual physical location of
* the user by targeted or not targeted location. If other segment fields are
* used, you may get more than one row per country.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.resources.UserLocationView}
*/
public final class UserLocationView extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v9.resources.UserLocationView)
UserLocationViewOrBuilder {
private static final long serialVersionUID = 0L;
// Use UserLocationView.newBuilder() to construct.
private UserLocationView(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UserLocationView() {
resourceName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UserLocationView();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private UserLocationView(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
resourceName_ = s;
break;
}
case 32: {
bitField0_ |= 0x00000001;
countryCriterionId_ = input.readInt64();
break;
}
case 40: {
bitField0_ |= 0x00000002;
targetingLocation_ = input.readBool();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.resources.UserLocationViewProto.internal_static_google_ads_googleads_v9_resources_UserLocationView_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.resources.UserLocationViewProto.internal_static_google_ads_googleads_v9_resources_UserLocationView_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.resources.UserLocationView.class, com.google.ads.googleads.v9.resources.UserLocationView.Builder.class);
}
private int bitField0_;
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object resourceName_;
/**
* <pre>
* Output only. The resource name of the user location view.
* UserLocation view resource names have the form:
* `customers/{customer_id}/userLocationViews/{country_criterion_id}~{targeting_location}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Output only. The resource name of the user location view.
* UserLocation view resource names have the form:
* `customers/{customer_id}/userLocationViews/{country_criterion_id}~{targeting_location}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int COUNTRY_CRITERION_ID_FIELD_NUMBER = 4;
private long countryCriterionId_;
/**
* <pre>
* Output only. Criterion Id for the country.
* </pre>
*
* <code>optional int64 country_criterion_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the countryCriterionId field is set.
*/
@java.lang.Override
public boolean hasCountryCriterionId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Output only. Criterion Id for the country.
* </pre>
*
* <code>optional int64 country_criterion_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The countryCriterionId.
*/
@java.lang.Override
public long getCountryCriterionId() {
return countryCriterionId_;
}
public static final int TARGETING_LOCATION_FIELD_NUMBER = 5;
private boolean targetingLocation_;
/**
* <pre>
* Output only. Indicates whether location was targeted or not.
* </pre>
*
* <code>optional bool targeting_location = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the targetingLocation field is set.
*/
@java.lang.Override
public boolean hasTargetingLocation() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. Indicates whether location was targeted or not.
* </pre>
*
* <code>optional bool targeting_location = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The targetingLocation.
*/
@java.lang.Override
public boolean getTargetingLocation() {
return targetingLocation_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt64(4, countryCriterionId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeBool(5, targetingLocation_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(4, countryCriterionId_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(5, targetingLocation_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v9.resources.UserLocationView)) {
return super.equals(obj);
}
com.google.ads.googleads.v9.resources.UserLocationView other = (com.google.ads.googleads.v9.resources.UserLocationView) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (hasCountryCriterionId() != other.hasCountryCriterionId()) return false;
if (hasCountryCriterionId()) {
if (getCountryCriterionId()
!= other.getCountryCriterionId()) return false;
}
if (hasTargetingLocation() != other.hasTargetingLocation()) return false;
if (hasTargetingLocation()) {
if (getTargetingLocation()
!= other.getTargetingLocation()) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
if (hasCountryCriterionId()) {
hash = (37 * hash) + COUNTRY_CRITERION_ID_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getCountryCriterionId());
}
if (hasTargetingLocation()) {
hash = (37 * hash) + TARGETING_LOCATION_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
getTargetingLocation());
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v9.resources.UserLocationView parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.resources.UserLocationView parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.resources.UserLocationView parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.resources.UserLocationView parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.resources.UserLocationView parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.resources.UserLocationView parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.resources.UserLocationView parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.resources.UserLocationView parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.resources.UserLocationView parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.resources.UserLocationView parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.resources.UserLocationView parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.resources.UserLocationView parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v9.resources.UserLocationView prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A user location view.
* User Location View includes all metrics aggregated at the country level,
* one row per country. It reports metrics at the actual physical location of
* the user by targeted or not targeted location. If other segment fields are
* used, you may get more than one row per country.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.resources.UserLocationView}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.resources.UserLocationView)
com.google.ads.googleads.v9.resources.UserLocationViewOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.resources.UserLocationViewProto.internal_static_google_ads_googleads_v9_resources_UserLocationView_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.resources.UserLocationViewProto.internal_static_google_ads_googleads_v9_resources_UserLocationView_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.resources.UserLocationView.class, com.google.ads.googleads.v9.resources.UserLocationView.Builder.class);
}
// Construct using com.google.ads.googleads.v9.resources.UserLocationView.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
resourceName_ = "";
countryCriterionId_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
targetingLocation_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v9.resources.UserLocationViewProto.internal_static_google_ads_googleads_v9_resources_UserLocationView_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v9.resources.UserLocationView getDefaultInstanceForType() {
return com.google.ads.googleads.v9.resources.UserLocationView.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v9.resources.UserLocationView build() {
com.google.ads.googleads.v9.resources.UserLocationView result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v9.resources.UserLocationView buildPartial() {
com.google.ads.googleads.v9.resources.UserLocationView result = new com.google.ads.googleads.v9.resources.UserLocationView(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.resourceName_ = resourceName_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.countryCriterionId_ = countryCriterionId_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.targetingLocation_ = targetingLocation_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v9.resources.UserLocationView) {
return mergeFrom((com.google.ads.googleads.v9.resources.UserLocationView)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v9.resources.UserLocationView other) {
if (other == com.google.ads.googleads.v9.resources.UserLocationView.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
onChanged();
}
if (other.hasCountryCriterionId()) {
setCountryCriterionId(other.getCountryCriterionId());
}
if (other.hasTargetingLocation()) {
setTargetingLocation(other.getTargetingLocation());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v9.resources.UserLocationView parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v9.resources.UserLocationView) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Output only. The resource name of the user location view.
* UserLocation view resource names have the form:
* `customers/{customer_id}/userLocationViews/{country_criterion_id}~{targeting_location}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Output only. The resource name of the user location view.
* UserLocation view resource names have the form:
* `customers/{customer_id}/userLocationViews/{country_criterion_id}~{targeting_location}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Output only. The resource name of the user location view.
* UserLocation view resource names have the form:
* `customers/{customer_id}/userLocationViews/{country_criterion_id}~{targeting_location}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceName_ = value;
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the user location view.
* UserLocation view resource names have the form:
* `customers/{customer_id}/userLocationViews/{country_criterion_id}~{targeting_location}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
onChanged();
return this;
}
/**
* <pre>
* Output only. The resource name of the user location view.
* UserLocation view resource names have the form:
* `customers/{customer_id}/userLocationViews/{country_criterion_id}~{targeting_location}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceName_ = value;
onChanged();
return this;
}
private long countryCriterionId_ ;
/**
* <pre>
* Output only. Criterion Id for the country.
* </pre>
*
* <code>optional int64 country_criterion_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the countryCriterionId field is set.
*/
@java.lang.Override
public boolean hasCountryCriterionId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Output only. Criterion Id for the country.
* </pre>
*
* <code>optional int64 country_criterion_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The countryCriterionId.
*/
@java.lang.Override
public long getCountryCriterionId() {
return countryCriterionId_;
}
/**
* <pre>
* Output only. Criterion Id for the country.
* </pre>
*
* <code>optional int64 country_criterion_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The countryCriterionId to set.
* @return This builder for chaining.
*/
public Builder setCountryCriterionId(long value) {
bitField0_ |= 0x00000001;
countryCriterionId_ = value;
onChanged();
return this;
}
/**
* <pre>
* Output only. Criterion Id for the country.
* </pre>
*
* <code>optional int64 country_criterion_id = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return This builder for chaining.
*/
public Builder clearCountryCriterionId() {
bitField0_ = (bitField0_ & ~0x00000001);
countryCriterionId_ = 0L;
onChanged();
return this;
}
private boolean targetingLocation_ ;
/**
* <pre>
* Output only. Indicates whether location was targeted or not.
* </pre>
*
* <code>optional bool targeting_location = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the targetingLocation field is set.
*/
@java.lang.Override
public boolean hasTargetingLocation() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Output only. Indicates whether location was targeted or not.
* </pre>
*
* <code>optional bool targeting_location = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The targetingLocation.
*/
@java.lang.Override
public boolean getTargetingLocation() {
return targetingLocation_;
}
/**
* <pre>
* Output only. Indicates whether location was targeted or not.
* </pre>
*
* <code>optional bool targeting_location = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @param value The targetingLocation to set.
* @return This builder for chaining.
*/
public Builder setTargetingLocation(boolean value) {
bitField0_ |= 0x00000002;
targetingLocation_ = value;
onChanged();
return this;
}
/**
* <pre>
* Output only. Indicates whether location was targeted or not.
* </pre>
*
* <code>optional bool targeting_location = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return This builder for chaining.
*/
public Builder clearTargetingLocation() {
bitField0_ = (bitField0_ & ~0x00000002);
targetingLocation_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.resources.UserLocationView)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v9.resources.UserLocationView)
private static final com.google.ads.googleads.v9.resources.UserLocationView DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v9.resources.UserLocationView();
}
public static com.google.ads.googleads.v9.resources.UserLocationView getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UserLocationView>
PARSER = new com.google.protobuf.AbstractParser<UserLocationView>() {
@java.lang.Override
public UserLocationView parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UserLocationView(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<UserLocationView> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UserLocationView> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v9.resources.UserLocationView getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2019 West Coast Informatics, LLC
*/
package org.ihtsdo.otf.mapping.jpa.algo;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.log4j.Logger;
import org.ihtsdo.otf.mapping.algo.Algorithm;
import org.ihtsdo.otf.mapping.helpers.ConceptList;
import org.ihtsdo.otf.mapping.helpers.DescriptionList;
import org.ihtsdo.otf.mapping.helpers.LanguageRefSetMemberList;
import org.ihtsdo.otf.mapping.helpers.RelationshipList;
import org.ihtsdo.otf.mapping.jpa.helpers.LoggerUtility;
import org.ihtsdo.otf.mapping.jpa.services.ContentServiceJpa;
import org.ihtsdo.otf.mapping.jpa.services.MappingServiceJpa;
import org.ihtsdo.otf.mapping.jpa.services.MetadataServiceJpa;
import org.ihtsdo.otf.mapping.jpa.services.RootServiceJpa;
import org.ihtsdo.otf.mapping.rf2.Concept;
import org.ihtsdo.otf.mapping.rf2.Description;
import org.ihtsdo.otf.mapping.rf2.LanguageRefSetMember;
import org.ihtsdo.otf.mapping.rf2.Relationship;
import org.ihtsdo.otf.mapping.rf2.jpa.ConceptJpa;
import org.ihtsdo.otf.mapping.rf2.jpa.DescriptionJpa;
import org.ihtsdo.otf.mapping.rf2.jpa.LanguageRefSetMemberJpa;
import org.ihtsdo.otf.mapping.rf2.jpa.RelationshipJpa;
import org.ihtsdo.otf.mapping.services.MappingService;
import org.ihtsdo.otf.mapping.services.MetadataService;
import org.ihtsdo.otf.mapping.services.helpers.ConfigUtility;
import org.ihtsdo.otf.mapping.services.helpers.ProgressListener;
public class Rf2DeltaLoaderAlgorithm extends RootServiceJpa
implements Algorithm, AutoCloseable {
/** Listeners. */
private List<ProgressListener> listeners = new ArrayList<>();
/** The request cancel flag. */
private boolean requestCancel = false;
/** Name of terminology to be loaded. */
private String terminology;
/** Terminology version */
private String version;
/**
* Last publication version passed in. This is used to "remove retired
* cocepts" routine
*/
private String lastPublicationDate;
/** The input directory */
private String inputDir;
/** The delta dir. */
private File deltaDir;
/** the defaultPreferredNames type id. */
private Long dpnTypeId = 900000000000003001L;
/** The dpn ref set id. */
private Long dpnRefsetId = 900000000000509007L;
/** The dpn acceptability id. */
private Long dpnAcceptabilityId = 900000000000548007L;
/** The concept reader. */
private BufferedReader conceptReader;
/** The description reader. */
private BufferedReader descriptionReader;
/** The text definition reader. */
private BufferedReader textDefinitionReader;
/** The relationship reader. */
private BufferedReader relationshipReader;
/** The language reader. */
private BufferedReader languageReader;
/** progress tracking variables. */
private int objectCt; //
/** The ft. */
SimpleDateFormat ft = new SimpleDateFormat("hh:mm:ss a");
/** The dt. */
SimpleDateFormat dt = new SimpleDateFormat("yyyyMMdd");
/** The start time. */
long startTime;
/** The time at which drip feed was started. */
private Date deltaLoaderStartDate = new Date();
/** Content and Mapping Services. */
private ContentServiceJpa contentService = null;
/** The mapping service. */
private MappingService mappingService = null;
/** The concept cache. */
private Map<String, Concept> conceptCache = new HashMap<>();
/** The description cache. */
private Map<String, Description> descriptionCache = new HashMap<>();
/** The relationship cache. */
private Map<String, Relationship> relationshipCache = new HashMap<>();
/** The language ref set member cache. */
private Map<String, LanguageRefSetMember> languageRefSetMemberCache =
new HashMap<>();
// These track data that existed prior to the delta loader run
/** The delta concept ids. */
private Set<String> deltaConceptIds = new HashSet<>();
/** The delta relationship ids. */
private Set<String> deltaRelationshipIds = new HashSet<>();
/** The delta description ids. */
private Set<String> deltaDescriptionIds = new HashSet<>();
/** The delta language refset member ids. */
private Set<String> deltaLanguageRefSetMemberIds = new HashSet<>();
/** The "recompute preferred name" concept ids. */
private Set<String> recomputePnConceptIds = new HashSet<>();
/** The existing concept cache. */
private Map<String, Concept> existingConceptCache = new HashMap<>();
/** The existing description ids. */
private Set<String> existingDescriptionIds = new HashSet<>();
/** The existing relationship ids. */
private Set<String> existingRelationshipIds = new HashSet<>();
/** The existing language ref set member ids. */
private Set<String> existingLanguageRefSetMemberIds = new HashSet<>();
/** The log. */
private static Logger log;
/** The log file. */
private File logFile;
public Rf2DeltaLoaderAlgorithm(String terminology, String inputDir,
String lastPublicationDate) throws Exception {
super();
this.terminology = terminology;
this.inputDir = inputDir;
this.lastPublicationDate = lastPublicationDate;
// initialize logger
String rootPath = ConfigUtility.getConfigProperties()
.getProperty("map.principle.source.document.dir");
if (!rootPath.endsWith("/") && !rootPath.endsWith("\\")) {
rootPath += "/";
}
rootPath += "logs";
File logDirectory = new File(rootPath);
if (!logDirectory.exists()) {
logDirectory.mkdir();
}
logFile = new File(logDirectory, "load_" + terminology + ".log");
LoggerUtility.setConfiguration("load", logFile.getAbsolutePath());
this.log = LoggerUtility.getLogger("load");
}
@Override
public void compute() throws Exception {
// clear log before starting process
PrintWriter writer = new PrintWriter(logFile);
writer.print("");
writer.close();
try {
setup();
ConceptList conceptList =
contentService.getAllConcepts(terminology, version);
for (Concept c : conceptList.getConcepts()) {
existingConceptCache.put(c.getTerminologyId(), c);
}
log.info(" count = " + conceptList.getCount());
// Precache the description, langauge refset, and relationship ids
log.info(" Load all description, language, and relationship ids");
existingDescriptionIds =
contentService.getAllDescriptionTerminologyIds(terminology, version);
log.info(" descriptionCt = " + existingDescriptionIds.size());
existingLanguageRefSetMemberIds = contentService
.getAllLanguageRefSetMemberTerminologyIds(terminology, version);
log.info(" languageCt = " + existingLanguageRefSetMemberIds.size());
existingRelationshipIds =
contentService.getAllRelationshipTerminologyIds(terminology, version);
log.info(" relationshipCt = " + existingRelationshipIds.size());
// Load delta data
loadDelta();
// Compute the number of modified objects of each type
log.info(" Computing number of modified objects");
int nConceptsUpdated = 0;
int nDescriptionsUpdated = 0;
int nLanguagesUpdated = 0;
int nRelationshipsUpdated = 0;
for (Concept c : conceptCache.values()) {
if (c.getEffectiveTime().equals(deltaLoaderStartDate)) {
nConceptsUpdated++;
}
}
for (Relationship r : relationshipCache.values()) {
if (r.getEffectiveTime().equals(deltaLoaderStartDate)) {
nRelationshipsUpdated++;
}
}
for (Description d : descriptionCache.values()) {
if (d.getEffectiveTime().equals(deltaLoaderStartDate)) {
nDescriptionsUpdated++;
}
}
for (LanguageRefSetMember l : languageRefSetMemberCache.values()) {
if (l.getEffectiveTime().equals(deltaLoaderStartDate)) {
nLanguagesUpdated++;
}
}
// Report counts
log.info(" Cached objects modified by this delta");
log.info(" " + nConceptsUpdated + " concepts");
log.info(" " + nDescriptionsUpdated + " descriptions");
log.info(" " + nRelationshipsUpdated + " relationships");
log.info(" " + nLanguagesUpdated + " language ref set members");
// Commit the content changes
log.info(" Committing.");
contentService.commit();
// QA
log.info(
" QA - Check database contents against previously modified objects.");
ConceptList modifiedConcepts =
contentService.getConceptsModifiedSinceDate(terminology,
deltaLoaderStartDate, null);
RelationshipList modifiedRelationships = contentService
.getRelationshipsModifiedSinceDate(terminology, deltaLoaderStartDate);
DescriptionList modifiedDescriptions = contentService
.getDescriptionsModifiedSinceDate(terminology, deltaLoaderStartDate);
LanguageRefSetMemberList modifiedLanguageRefSetMembers =
contentService.getLanguageRefSetMembersModifiedSinceDate(terminology,
deltaLoaderStartDate);
// Report
log.info((modifiedConcepts.getCount() != nConceptsUpdated)
? " " + nConceptsUpdated + " concepts expected, found "
+ modifiedConcepts.getCount()
: " Concept count matches");
log.info((modifiedRelationships.getCount() != nRelationshipsUpdated)
? " " + nRelationshipsUpdated + " relationships expected, found"
+ modifiedRelationships.getCount()
: " Relationship count matches");
log.info((modifiedDescriptions.getCount() != nDescriptionsUpdated)
? " " + nDescriptionsUpdated + " descriptions expected, found"
+ modifiedDescriptions.getCount()
: " Description count matches");
log.info((modifiedLanguageRefSetMembers.getCount() != nLanguagesUpdated)
? " " + nLanguagesUpdated
+ " languageRefSetMembers expected, found"
+ modifiedLanguageRefSetMembers.getCount()
: " LanguageRefSetMember count matches");
// Clean up resources
contentService.close();
// Compute default preferred names
log.info(" Compute preferred names for delta concepts.");
contentService = new ContentServiceJpa();
contentService.setTransactionPerOperation(false);
contentService.beginTransaction();
computeDefaultPreferredNames();
contentService.commit();
log.info("Done");
} catch (Exception e) {
e.printStackTrace();
log.error(e.getMessage(), e);
throw new Exception("Unexpected exception:", e);
}
}
/**
* Instantiate global vars.
*
* @throws Exception the exception
*/
private void setup() throws Exception {
Properties config = ConfigUtility.getConfigProperties();
// instantiate the services
contentService = new ContentServiceJpa();
mappingService = new MappingServiceJpa();
// set the transaction per operation on the service managers
contentService.setTransactionPerOperation(false);
mappingService.setTransactionPerOperation(false);
// initialize the transactions
contentService.beginTransaction();
mappingService.beginTransaction();
if (lastPublicationDate == null) {
// NOTE: this is very MySQL-centric (native query).
// We want the max effective time where the "time"
// part of it is 00:00:00
final javax.persistence.Query query =
contentService.getEntityManager().createNativeQuery(
"select date_format(max(effectiveTime),'%Y%m%d') from concepts "
+ "where terminology = :terminology "
+ " and effectiveTime = date(effectiveTime)");
query.setParameter("terminology", terminology);
lastPublicationDate = query.getSingleResult().toString();
}
// set the delta file directory=
deltaDir = new File(inputDir);
if (!deltaDir.exists()) {
throw new Exception("Specified input dir");
}
// get the first file for determining
File files[] = deltaDir.listFiles();
if (files.length == 0)
throw new Exception(
"Could not determine terminology version, no files exist");
// Previous computation of terminology version is based on file name
// but for delta/daily build files, this is not the current version
// look up the current version instead
if (version == null) {
try (final MetadataService metadataService = new MetadataServiceJpa();) {
version = metadataService.getLatestVersion(terminology);
if (version == null) {
throw new Exception("Unable to determine terminology version.");
}
}
}
// set the parameters for determining defaultPreferredNames
String prop = config.getProperty("loader.defaultPreferredNames.typeId");
if (prop != null) {
dpnTypeId = Long.valueOf(prop);
}
prop = config.getProperty("loader.defaultPreferredNames.refsetId");
if (prop != null) {
dpnRefsetId = Long.valueOf(prop);
}
prop = config.getProperty("loader.defaultPreferredNames.acceptabilityId");
if (prop != null) {
dpnAcceptabilityId = Long.valueOf(prop);
}
// output relevant properties/settings to console
log.info(" typeId: " + dpnTypeId);
log.info(" refsetId: " + dpnRefsetId);
log.info(" acceptabilityId: " + dpnAcceptabilityId);
// Open files
instantiateFileReaders();
}
/**
* Instantiate file readers.
*
* @throws Exception the exception
*/
private void instantiateFileReaders() throws Exception {
log.info(" Open readers for terminology files");
// concepts file
for (File f : deltaDir.listFiles()) {
if (f.getName().contains("_Concept_Delta_")) {
log.info(" Concepts: " + f.getName());
conceptReader = new BufferedReader(new FileReader(f));
} else if (f.getName().contains("_Relationship_Delta_")) {
log.info(" Relationships: " + f.getName());
relationshipReader = new BufferedReader(new FileReader(f));
/*
* Removed due to invalid relationship loading } else if
* (f.getName().contains("_StatedRelationship_")) { log.
* info(" Stated Relationship file: " + f.getName());
* statedRelationshipReader = new BufferedReader(new FileReader(f));
*/
} else if (f.getName().contains("_Description_")) {
log.info(" Descriptions: " + f.getName());
descriptionReader = new BufferedReader(new FileReader(f));
} else if (f.getName().contains("_TextDefinition_")) {
log.info(" Text Definitions: " + f.getName());
textDefinitionReader = new BufferedReader(new FileReader(f));
} else if (f.getName().contains("_LanguageDelta-en")) {
log.info(" Languages: " + f.getName());
languageReader = new BufferedReader(new FileReader(f));
}
}
// check file readers were opened successfully
if (conceptReader == null)
throw new Exception("Could not open concept file reader");
if (relationshipReader == null)
throw new Exception("Could not open relationship file reader");
if (descriptionReader == null)
throw new Exception("Could not open description file reader");
if (languageReader == null)
throw new Exception("Could not open language ref set member file reader");
}
/**
* Load delta.
*
* @throws Exception the exception
*/
private void loadDelta() throws Exception {
log.info(" Load delta data");
// Load concepts
if (conceptReader != null) {
log.info(" Loading Concepts ...");
startTime = System.nanoTime();
loadConcepts(conceptReader);
contentService.commit();
contentService.beginTransaction();
log.info(" evaluated = " + Integer.toString(objectCt) + " (Ended at "
+ ft.format(new Date()) + ")");
}
// Load relationships
if (relationshipReader != null) {
log.info(" Loading Relationships ...");
startTime = System.nanoTime();
loadRelationships(relationshipReader);
contentService.commit();
contentService.beginTransaction();
log.info(" evaluated = " + Integer.toString(objectCt) + " (Ended at "
+ ft.format(new Date()) + ")");
}
// Load descriptions
if (descriptionReader != null) {
log.info(" Loading Descriptions ...");
startTime = System.nanoTime();
loadDescriptions(descriptionReader);
contentService.commit();
contentService.beginTransaction();
log.info(" evaluated = " + Integer.toString(objectCt) + " (Ended at "
+ ft.format(new Date()) + ")");
}
// Load text definitions
if (descriptionReader != null) {
log.info(" Loading Text Definitions...");
startTime = System.nanoTime();
loadDescriptions(textDefinitionReader);
contentService.commit();
contentService.beginTransaction();
log.info(" evaluated = " + Integer.toString(objectCt) + " (Ended at "
+ ft.format(new Date()) + ")");
}
// Load language refset members
if (languageReader != null) {
log.info(" Loading Language Ref Sets...");
startTime = System.nanoTime();
loadLanguageRefSetMembers(languageReader);
contentService.commit();
contentService.beginTransaction();
log.info(" evaluated = " + Integer.toString(objectCt) + " (Ended at "
+ ft.format(new Date()) + ")");
}
// Skip other delta data structures
// Remove concepts in the DB that were created by prior
// deltas that no longer exist in the delta
log.info(" Retire non-existent content");
retireRemovedContent();
}
/**
* Loads the concepts from the delta files.
*
* @param reader the reader
* @throws Exception the exception
*/
private void loadConcepts(BufferedReader reader) throws Exception {
// Setup vars
String line;
objectCt = 0;
int objectsAdded = 0;
int objectsUpdated = 0;
// Iterate through concept reader
while ((line = reader.readLine()) != null) {
// Split line
String fields[] = line.split("\t");
// if not header
if (!fields[0].equals("id")) {
// Check if concept exists from before
Concept concept = existingConceptCache.get(fields[0]);
// Track all delta concept ids so we can properly remove
// concepts later.
deltaConceptIds.add(fields[0]);
// Setup delta concept (either new or based on existing one)
Concept newConcept = null;
if (concept == null) {
newConcept = new ConceptJpa();
} else {
newConcept = new ConceptJpa(concept, true);
}
// Set fields
newConcept.setTerminologyId(fields[0]);
newConcept.setEffectiveTime(deltaLoaderStartDate);
newConcept.setActive(fields[2].equals("1") ? true : false);
newConcept.setModuleId(Long.valueOf(fields[3]));
newConcept.setDefinitionStatusId(Long.valueOf(fields[4]));
newConcept.setTerminology(terminology);
newConcept.setTerminologyVersion(version);
newConcept.setDefaultPreferredName("TBD");
// If concept is new, add it
if (concept == null) {
log.info(" add concept " + newConcept.getTerminologyId());
recomputePnConceptIds.add(fields[0]);
contentService.addConcept(newConcept);
objectsAdded++;
}
// If concept has changed, update it
else if (!newConcept.equals(concept)) {
log.info(" update concept " + newConcept.getTerminologyId());
recomputePnConceptIds.add(fields[0]);
contentService.updateConcept(newConcept);
objectsUpdated++;
}
// Otherwise, reset effective time (for modified check later)
else {
newConcept.setEffectiveTime(concept.getEffectiveTime());
}
// Cache the concept element
cacheConcept(newConcept);
}
}
log.info(" new = " + objectsAdded);
log.info(" updated = " + objectsUpdated);
}
/**
* Load descriptions.
*
* @param reader the reader
* @throws Exception the exception
*/
private void loadDescriptions(BufferedReader reader) throws Exception {
// Setup vars
String line = "";
objectCt = 0;
int objectsAdded = 0;
int objectsUpdated = 0;
// Iterate through description reader
while ((line = reader.readLine()) != null) {
// split line
String fields[] = line.split("\t");
// if not header
if (!fields[0].equals("id")) {
deltaDescriptionIds.add(fields[0]);
// Get concept from cache or from db
Concept concept = null;
if (conceptCache.containsKey(fields[4])) {
concept = conceptCache.get(fields[4]);
} else if (existingConceptCache.containsKey(fields[4])) {
concept = contentService
.getConcept(existingConceptCache.get(fields[4]).getId());
} else {
// retrieve concept
concept = contentService.getConcept(fields[4], terminology, version);
}
// if the concept is not null
if (concept != null) {
// Add concept to the cache
cacheConcept(concept);
// Load description from cache or db
Description description = null;
if (descriptionCache.containsKey(fields[0])) {
description = descriptionCache.get(fields[0]);
} else if (existingDescriptionIds.contains(fields[0])) {
description =
contentService.getDescription(fields[0], terminology, version);
}
// verify description is found
if (description == null
&& existingDescriptionIds.contains(fields[0])) {
throw new Exception("** Description " + fields[0]
+ " is in existing id cache, but was not precached via concept "
+ concept.getTerminologyId());
}
// Setup delta description (either new or based on existing
// one)
Description newDescription = null;
if (description == null) {
newDescription = new DescriptionJpa();
} else {
newDescription = new DescriptionJpa(description, true);
}
newDescription.setConcept(concept);
// Set fields
newDescription.setTerminologyId(fields[0]);
newDescription.setEffectiveTime(deltaLoaderStartDate);
newDescription.setActive(fields[2].equals("1") ? true : false);
newDescription.setModuleId(Long.valueOf(fields[3]));
newDescription.setLanguageCode(fields[5]);
newDescription.setTypeId(Long.valueOf(fields[6]));
newDescription.setTerm(fields[7]);
newDescription.setCaseSignificanceId(Long.valueOf(fields[8]));
newDescription.setTerminology(terminology);
newDescription.setTerminologyVersion(version);
// If description is new, add it
if (description == null) {
log.info(
" add description " + newDescription.getTerminologyId());
recomputePnConceptIds.add(fields[4]);
contentService.addDescription(newDescription);
cacheDescription(newDescription);
objectsAdded++;
}
// If description has changed, update it
else if (!newDescription.equals(description)) {
log.info(" update description "
+ newDescription.getTerminologyId());
recomputePnConceptIds.add(fields[4]);
contentService.updateDescription(newDescription);
cacheDescription(newDescription);
objectsUpdated++;
}
// Otherwise, reset effective time (for modified check
// later)
else {
newDescription.setEffectiveTime(description.getEffectiveTime());
}
}
// Major error if there is a delta description with a
// non-existent concept
else {
// skip
log.info("SKIP DESC with concept " + fields[4]);
continue;
// throw new Exception("Could not find concept " + fields[4]
// + " for Description " + fields[0]);
}
}
}
log.info(" new = " + objectsAdded);
log.info(" updated = " + objectsUpdated);
}
/**
* Load language ref set members.
*
* @param reader the reader
* @throws Exception the exception
*/
private void loadLanguageRefSetMembers(BufferedReader reader)
throws Exception {
// Setup variables
String line = "";
objectCt = 0;
int objectsAdded = 0;
int objectsUpdated = 0;
// Iterate through language refset reader
while ((line = reader.readLine()) != null) {
// split line
String fields[] = line.split("\t");
// if not header
if (!fields[0].equals("id")) {
deltaLanguageRefSetMemberIds.add(fields[0]);
// Get the description
Description description = null;
if (descriptionCache.containsKey(fields[5])) {
description = descriptionCache.get(fields[5]);
} else {
description =
contentService.getDescription(fields[5], terminology, version);
}
if (description == null) {
// skip
log.info("SKIP LANG with desc " + fields[4]);
continue;
// throw new Exception("Could not find description " +
// fields[4]
// + " for language refset member " + fields[0]);
}
// get the concept
Concept concept = description.getConcept();
// description should have concept (unless cached descriptions
// don't
// have them)
if (concept == null) {
throw new Exception(
"Description" + fields[4] + " does not have concept");
}
// Cache concept and description
cacheConcept(concept);
cacheDescription(description);
// Ensure effective time is set on all appropriate objects
LanguageRefSetMember languageRefSetMember = null;
if (languageRefSetMemberCache.containsKey(fields[0])) {
languageRefSetMember = languageRefSetMemberCache.get(fields[0]);
// to investigate if there will be an update
} else if (existingLanguageRefSetMemberIds.contains(fields[0])) {
// retrieve languageRefSetMember
languageRefSetMember = contentService
.getLanguageRefSetMember(fields[0], terminology, version);
}
if (languageRefSetMember == null
&& existingLanguageRefSetMemberIds.contains(fields[0])) {
throw new Exception("LanguageRefSetMember " + fields[0]
+ " is in existing id cache, but was not precached via description "
+ description.getTerminologyId());
}
// Setup delta language entry (either new or based on existing
// one)
LanguageRefSetMember newLanguageRefSetMember = null;
if (languageRefSetMember == null) {
newLanguageRefSetMember = new LanguageRefSetMemberJpa();
} else {
newLanguageRefSetMember =
new LanguageRefSetMemberJpa(languageRefSetMember, false);
}
newLanguageRefSetMember.setDescription(description);
// Universal RefSet attributes
newLanguageRefSetMember.setTerminologyId(fields[0]);
newLanguageRefSetMember.setEffectiveTime(deltaLoaderStartDate);
newLanguageRefSetMember.setActive(fields[2].equals("1") ? true : false);
newLanguageRefSetMember.setModuleId(Long.valueOf(fields[3]));
newLanguageRefSetMember.setRefSetId(fields[4]);
// Language unique attributes
newLanguageRefSetMember.setAcceptabilityId(Long.valueOf(fields[6]));
// Terminology attributes
newLanguageRefSetMember.setTerminology(terminology);
newLanguageRefSetMember.setTerminologyVersion(version);
// If language refset entry is new, add it
if (languageRefSetMember == null) {
log.info(" add language "
+ newLanguageRefSetMember.getTerminologyId());
recomputePnConceptIds
.add(description.getConcept().getTerminologyId());
contentService.addLanguageRefSetMember(newLanguageRefSetMember);
cacheLanguageRefSetMember(newLanguageRefSetMember);
objectsAdded++;
}
// If language refset entry is changed, update it
else if (!newLanguageRefSetMember.equals(languageRefSetMember)) {
log.info(" update language "
+ newLanguageRefSetMember.getTerminologyId());
recomputePnConceptIds
.add(description.getConcept().getTerminologyId());
contentService.updateLanguageRefSetMember(newLanguageRefSetMember);
cacheLanguageRefSetMember(newLanguageRefSetMember);
objectsUpdated++;
}
// Otherwise, reset effective time (for modified check later)
else {
newLanguageRefSetMember
.setEffectiveTime(languageRefSetMember.getEffectiveTime());
}
}
}
log.info(" new = " + objectsAdded);
log.info(" updated = " + objectsUpdated);
}
/**
* Load relationships.
*
* @param reader the reader
* @throws Exception the exception
*/
private void loadRelationships(BufferedReader reader) throws Exception {
// Setup variables
String line = "";
objectCt = 0;
int objectsAdded = 0;
int objectsUpdated = 0;
// Iterate through relationships reader
while ((line = reader.readLine()) != null) {
// Split line
String fields[] = line.split("\t");
// If not header
if (!fields[0].equals("id")) {
deltaRelationshipIds.add(fields[0]);
// Retrieve source concept
Concept sourceConcept = null;
Concept destinationConcept = null;
if (conceptCache.containsKey(fields[4])) {
sourceConcept = conceptCache.get(fields[4]);
} else if (existingConceptCache.containsKey(fields[4])) {
sourceConcept = contentService
.getConcept(existingConceptCache.get(fields[4]).getId());
} else {
sourceConcept =
contentService.getConcept(fields[4], terminology, version);
}
if (sourceConcept == null) {
// skip
log.info("SKIP REL with source concept " + fields[4]);
continue;
// throw new Exception("Relationship " + fields[0] +
// " source concept "
// + fields[4] + " cannot be found");
}
// Retrieve destination concept
if (conceptCache.containsKey(fields[5])) {
destinationConcept = conceptCache.get(fields[5]);
} else if (existingConceptCache.containsKey(fields[5])) {
destinationConcept = contentService
.getConcept(existingConceptCache.get(fields[5]).getId());
} else {
destinationConcept =
contentService.getConcept(fields[5], terminology, version);
}
if (destinationConcept == null) {
// skip
log.info("SKIP REL with source concept " + fields[5]);
continue;
// throw new Exception("Relationship " + fields[0]
// + " destination concept " + fields[5] + " cannot be
// found");
}
// Cache concepts
cacheConcept(sourceConcept);
cacheConcept(destinationConcept);
// Retrieve relationship
Relationship relationship = null;
if (relationshipCache.containsKey(fields[0])) {
relationship = relationshipCache.get(fields[0]);
} else if (existingRelationshipIds.contains(fields[0])) {
relationship =
contentService.getRelationship(fields[0], terminology, version);
}
// Verify cache
if (relationship == null
&& existingRelationshipIds.contains(fields[0])) {
throw new Exception("** Relationship " + fields[0]
+ " is in existing id cache, but was not precached via concepts "
+ sourceConcept.getTerminologyId() + " or "
+ destinationConcept.getTerminologyId());
}
// Setup delta relationship (either new or based on existing
// one)
Relationship newRelationship = null;
if (relationship == null) {
newRelationship = new RelationshipJpa();
} else {
newRelationship = new RelationshipJpa(relationship, false);
}
// Set fields
newRelationship.setTerminologyId(fields[0]);
newRelationship.setEffectiveTime(deltaLoaderStartDate);
newRelationship.setActive(fields[2].equals("1") ? true : false); // active
newRelationship.setModuleId(Long.valueOf(fields[3])); // moduleId
newRelationship.setRelationshipGroup(Integer.valueOf(fields[6])); // relationshipGroup
newRelationship.setTypeId(Long.valueOf(fields[7])); // typeId
newRelationship.setCharacteristicTypeId(Long.valueOf(fields[8])); // characteristicTypeId
newRelationship.setTerminology(terminology);
newRelationship.setTerminologyVersion(version);
newRelationship.setModifierId(Long.valueOf(fields[9]));
newRelationship.setSourceConcept(sourceConcept);
newRelationship.setDestinationConcept(destinationConcept);
// If relationship is new, add it
if (!existingRelationshipIds.contains(fields[0])) {
log.info(
" add relationship " + newRelationship.getTerminologyId());
contentService.addRelationship(newRelationship);
cacheRelationship(newRelationship);
objectsAdded++;
}
// If relationship is changed, update it
else if (relationship != null
&& !newRelationship.equals(relationship)) {
log.info(" update relationship "
+ newRelationship.getTerminologyId());
contentService.updateRelationship(newRelationship);
cacheRelationship(newRelationship);
objectsUpdated++;
}
// Otherwise, reset effective time (for modified check later)
else {
if (relationship != null) {
newRelationship.setEffectiveTime(relationship.getEffectiveTime());
}
}
if (objectCt % 2000 == 0) {
contentService.commit();
contentService.beginTransaction();
}
}
}
log.info(" new = " + objectsAdded);
log.info(" updated = " + objectsUpdated);
}
/**
* Calculates default preferred names for any concept that has changed. Note:
* at this time computes for concepts that have only changed due to
* relationships, which is unnecessary
*
* @throws Exception the exception
*/
private void computeDefaultPreferredNames() throws Exception {
// Setup vars
int dpnNotFoundCt = 0;
int dpnFoundCt = 0;
int dpnSkippedCt = 0;
// Compute default preferred names for any concept in the delta
for (String terminologyId : recomputePnConceptIds) {
Concept concept =
contentService.getConcept(terminologyId, terminology, version);
// Skip if inactive
if (!concept.isActive()) {
dpnSkippedCt++;
continue;
}
log.info("Checking concept " + concept.getTerminologyId());
boolean dpnFound = false;
// Iterate over descriptions
for (Description description : concept.getDescriptions()) {
log.info(" Checking description " + description.getTerminologyId()
+ ", active = " + description.isActive() + ", typeId = "
+ description.getTypeId());
// If active and preferred type
if (description.isActive()
&& description.getTypeId().equals(dpnTypeId)) {
// Iterate over language refset members
for (LanguageRefSetMember language : description
.getLanguageRefSetMembers()) {
log.info(" Checking language " + language.getTerminologyId()
+ ", active = " + language.isActive() + ", refsetId = "
+ language.getRefSetId() + ", acceptabilityId = "
+ language.getAcceptabilityId());
// If prefrred and has correct refset
if (Long.valueOf(language.getRefSetId()).equals(dpnRefsetId)
&& language.isActive()
&& language.getAcceptabilityId().equals(dpnAcceptabilityId)) {
log.info(" MATCH FOUND: " + description.getTerm());
// print warning for multiple names found
if (dpnFound) {
log.warn("Multiple default preferred names found for concept "
+ concept.getTerminologyId());
log.warn(
" " + "Existing: " + concept.getDefaultPreferredName());
log.warn(" " + "Replaced with: " + description.getTerm());
}
// Set preferred name
concept.setDefaultPreferredName(description.getTerm());
// set found to true
dpnFound = true;
}
}
}
}
// Pref name not found
if (!dpnFound) {
dpnNotFoundCt++;
log.warn("Could not find defaultPreferredName for concept "
+ concept.getTerminologyId());
concept.setDefaultPreferredName("[Could not be determined]");
} else {
dpnFoundCt++;
}
}
log.info(" found = " + dpnFoundCt);
log.info(" not found = " + dpnNotFoundCt);
log.info(" skipped = " + dpnSkippedCt);
}
/**
* Retires concepts that were removed from prior deltas. Find concepts in the
* DB that are not in the current delta and which have effective times greater
* than the latest release date. The latest release date is the
* "terminologyVersion" in this case.
*
* NOTE: this does not handle a retraction of a change because we don't
* preserve a static copy of the previous release to compare against. What
* this really needs is a daily incremental delta relative to the snapshot
* from the previous day.
*
* @throws Exception
*/
public void retireRemovedContent() throws Exception {
// Base this algortihm on the last publication date
// If editing resumes before last publication date
// this will essentially do nothing until afterwards
// which is fine, it just means some things will remain
// in scope longer than they should.
DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd");
Date rf2Version = dateFormat.parse(lastPublicationDate);
// Now remove retired concepts
// These are concepts created after rf2Version that are no longer in
// the drip feed
int ct = 0;
log.info(" Retire removed concepts");
for (Concept concept : existingConceptCache.values()) {
if (concept.getEffectiveTime().after(rf2Version)
&& !deltaConceptIds.contains(concept.getTerminologyId())
&& concept.isActive()) {
concept = contentService.getConcept(concept.getId());
// Because it's possible that a concept element changed and that
// change was retracted, we need to double-check whether all of
// the concept elements are also new. If so, proceed. It is
// possible
// that ALL descriptions and relationships changed and all of
// those
// changes were retracted. in that case the worst thing that
// happens
// the record has to be remapped
boolean proceed = true;
for (Description description : concept.getDescriptions()) {
if (!description.getEffectiveTime().after(rf2Version)) {
proceed = false;
break;
}
}
if (proceed) {
for (Relationship relationship : concept.getRelationships()) {
if (!relationship.getEffectiveTime().after(rf2Version)) {
proceed = false;
break;
}
}
}
// One gap in the logic is if a concept was retired and that
// retirement was retracted, we don't know. again, the
// consequence
// is that the concept will have to be remapped.
// Retire this concept.
if (proceed) {
ct++;
concept.setActive(false);
concept.setEffectiveTime(deltaLoaderStartDate);
contentService.updateConcept(concept);
}
}
}
log.info(" count = " + ct);
contentService.commit();
contentService.clear();
contentService.beginTransaction();
// Also retire inferred relationships added after the last release
// but not in the current delta. Relationships do not change
// they are created or retired - so we likely do not need to worry
// about retractions of changes here
// OK, so after experimenting with this, we can't effectively identify
// what kind of change was retracted, and so can't assume that it was
// an addition. Every attempt to model this logic has failed because
// we simply do not have the intermediate information
//
/**
* ct = 0; log.info(" Retire removed relationships"); List<Relationship>
* relationships =
* contentService.getRelationshipsModifiedSinceDate(terminology,
* rf2Version).getRelationships(); contentService.clear();
*
* for (Relationship relationship : relationships) {
*
* if (relationship.getEffectiveTime().after(rf2Version) &&
* !deltaRelationshipIds.contains(relationship.getTerminologyId()) &&
* relationship.isActive()) { log.info(" retire " +
* relationship.getTerminologyId()); ct++; relationship.setActive(false);
* relationship.setEffectiveTime(deltaLoaderStartDate);
* contentService.updateRelationship(relationship); } } log.info(" count = "
* + ct);
**/
contentService.commit();
contentService.clear();
contentService.beginTransaction();
// Identifying the difference between a change in a description that
// was retracted and an addition of a description that was retracted
// is difficult and likely very error prone. Failing to properly
// handle retractions of changes or additions has very minor effect.
// So, it is recommended to be skipped.
// As are retracted changes or additions of language refset member
// entries.
}
// helper function to update and store concept
// as well as putting all descendant objects in the cache
// for easy retrieval
/**
* Cache concept.
*
* @param c the c
*/
private void cacheConcept(Concept c) {
if (!conceptCache.containsKey(c.getTerminologyId())) {
for (Relationship r : c.getRelationships()) {
relationshipCache.put(r.getTerminologyId(), r);
}
for (Description d : c.getDescriptions()) {
for (LanguageRefSetMember l : d.getLanguageRefSetMembers()) {
languageRefSetMemberCache.put(l.getTerminologyId(), l);
}
descriptionCache.put(d.getTerminologyId(), d);
}
conceptCache.put(c.getTerminologyId(), c);
}
}
/**
* Cache description.
*
* @param d the d
*/
private void cacheDescription(Description d) {
if (!descriptionCache.containsKey(d.getTerminologyId())) {
for (LanguageRefSetMember l : d.getLanguageRefSetMembers()) {
languageRefSetMemberCache.put(l.getTerminologyId(), l);
}
descriptionCache.put(d.getTerminologyId(), d);
}
}
/**
* Cache relationship.
*
* @param r the r
*/
private void cacheRelationship(Relationship r) {
relationshipCache.put(r.getTerminologyId(), r);
}
// helper function to cache and update a language ref set member
/**
* Cache language ref set member.
*
* @param l the l
*/
private void cacheLanguageRefSetMember(LanguageRefSetMember l) {
languageRefSetMemberCache.put(l.getTerminologyId(), l);
}
@Override
public void addProgressListener(ProgressListener l) {
listeners.add(l);
}
@Override
public void removeProgressListener(ProgressListener l) {
listeners.remove(l);
}
@Override
public void reset() throws Exception {
// n/a
}
@Override
public void checkPreconditions() throws Exception {
// n/a
}
@Override
public void cancel() throws Exception {
this.requestCancel = true;
}
}
| |
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.android;
import android.app.*;
import android.content.*;
import android.content.res.*;
import android.hardware.*;
import android.media.*;
import android.os.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.util.*;
import org.jitsi.*;
import org.jitsi.android.gui.*;
import org.jitsi.android.gui.LauncherActivity;
import org.jitsi.android.gui.account.*;
import org.jitsi.android.gui.chat.*;
import org.jitsi.android.gui.util.*;
import org.jitsi.service.configuration.*;
import org.jitsi.service.log.*;
import org.jitsi.service.osgi.*;
import org.osgi.framework.*;
/**
* <tt>JitsiApplication</tt> is used, as a global context and utility class for
* global actions(like EXIT broadcast).
*
* @author Pawel Domas
*/
public class JitsiApplication
extends Application
{
/**
* The logger
*/
private static final Logger logger
= Logger.getLogger(JitsiApplication.class);
/**
* Name of config property that indicates whether foreground icon should be
* displayed.
*/
public static final String SHOW_ICON_PROPERTY_NAME
= "org.jitsi.android.show_icon";
/**
* The EXIT action name that is broadcasted to all OSGiActivities
*/
public static final String ACTION_EXIT = "org.jitsi.android.exit";
/**
* Static instance holder.
*/
private static JitsiApplication instance;
/**
* The currently shown activity.
*/
private static Activity currentActivity = null;
/**
* Bitmap cache instance.
*/
private final DrawableCache drawableCache = new DrawableCache();
/**
* Used to keep the track of GUI activity.
*/
private static long lastGuiActivity;
/**
* Used to track current <tt>Activity</tt>.
* This monitor is notified each time current <tt>Activity</tt> changes.
*/
private static final Object currentActivityMonitor = new Object();
/**
* {@inheritDoc}
*/
@Override
public void onCreate()
{
super.onCreate();
instance = this;
}
/**
* {@inheritDoc}
*/
@Override
public void onTerminate()
{
instance = null;
super.onTerminate();
}
/**
* Shutdowns the app by stopping <tt>OSGiService</tt> and broadcasting
* {@link #ACTION_EXIT}.
*
*/
public static void shutdownApplication()
{
instance.doShutdownApplication();
}
/**
* Shutdowns the OSGI service and sends the EXIT action broadcast.
*/
private void doShutdownApplication()
{
// Shutdown the OSGi service
stopService(new Intent(this, OSGiService.class));
// Broadcast the exit action
Intent exitIntent = new Intent();
exitIntent.setAction(ACTION_EXIT);
sendBroadcast(exitIntent);
}
/**
* Returns global bitmap cache of the application.
* @return global bitmap cache of the application.
*/
public static DrawableCache getImageCache()
{
return instance.drawableCache;
}
/**
* Retrieves <tt>AudioManager</tt> instance using application context.
*
* @return <tt>AudioManager</tt> service instance.
*/
public static AudioManager getAudioManager()
{
return (AudioManager) getGlobalContext()
.getSystemService(Context.AUDIO_SERVICE);
}
/**
* Retrieves <tt>PowerManager</tt> instance using application context.
*
* @return <tt>PowerManager</tt> service instance.
*/
public static PowerManager getPowerManager()
{
return (PowerManager) getGlobalContext()
.getSystemService(Context.POWER_SERVICE);
}
/**
* Retrieves <tt>SensorManager</tt> instance using application context.
*
* @return <tt>SensorManager</tt> service instance.
*/
public static SensorManager getSensorManager()
{
return (SensorManager) getGlobalContext()
.getSystemService(Context.SENSOR_SERVICE);
}
/**
* Retrieves <tt>NotificationManager</tt> instance using application
* context.
*
* @return <tt>NotificationManager</tt> service instance.
*/
public static NotificationManager getNotificationManager()
{
return (NotificationManager) getGlobalContext()
.getSystemService(Context.NOTIFICATION_SERVICE);
}
/**
* Retrieves <tt>DownloadManager</tt> instance using application
* context.
*
* @return <tt>DownloadManager</tt> service instance.
*/
public static DownloadManager getDownloadManager()
{
return (DownloadManager) getGlobalContext()
.getSystemService(Context.DOWNLOAD_SERVICE);
}
/**
* Returns global application context.
*
* @return Returns global application <tt>Context</tt>.
*/
public static Context getGlobalContext()
{
return instance.getApplicationContext();
}
/**
* Returns application <tt>Resources</tt> object.
* @return application <tt>Resources</tt> object.
*/
public static Resources getAppResources()
{
return instance.getResources();
}
/**
* Returns Android string resource for given <tt>id</tt>.
* @param id the string identifier.
* @return Android string resource for given <tt>id</tt>.
*/
public static String getResString(int id)
{
return getAppResources().getString(id);
}
/**
* Returns Android string resource for given <tt>id</tt> and format
* arguments that will be used for substitution.
* @param id the string identifier.
* @param arg the format arguments that will be used for substitution.
* @return Android string resource for given <tt>id</tt> and format
* arguments.
*/
public static String getResString(int id, Object ... arg)
{
return getAppResources().getString(id, arg);
}
/**
* Returns home <tt>Activity</tt> class.
* @return Returns home <tt>Activity</tt> class.
*/
public static Class<?> getHomeScreenActivityClass()
{
BundleContext osgiContext = AndroidGUIActivator.bundleContext;
if(osgiContext == null)
{
// If OSGI has not started show splash screen as home
return LauncherActivity.class;
}
AccountManager accountManager
= ServiceUtils.getService(osgiContext, AccountManager.class);
// If account manager is null it means that OSGI has not started yet
if(accountManager == null)
return LauncherActivity.class;
final int accountCount = accountManager.getStoredAccounts().size();
if (accountCount == 0)
{
// Start new account Activity
return AccountLoginActivity.class;
}
else
{
// Start main view
return Jitsi.class;
}
}
/**
* Creates the home <tt>Activity</tt> <tt>Intent</tt>.
* @return the home <tt>Activity</tt> <tt>Intent</tt>.
*/
public static Intent getHomeIntent()
{
Intent homeIntent = new Intent(instance, getHomeScreenActivityClass());
// Home is singleTask anyway, but this way it can be started from
// non Activity context.
homeIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
return homeIntent;
}
/**
* Creates pending <tt>Intent</tt> to be started, when Jitsi icon is
* clicked.
* @return new pending <tt>Intent</tt> to be started, when Jitsi icon is
* clicked.
*/
public static PendingIntent getJitsiIconIntent()
{
Intent intent = ChatSessionManager.getLastChatIntent();
if(intent == null)
{
intent = getHomeIntent();
}
return PendingIntent.getActivity(
getGlobalContext(), 0,
intent,
PendingIntent.FLAG_UPDATE_CURRENT);
}
/**
* Returns <tt>ConfigurationService</tt> instance.
* @return <tt>ConfigurationService</tt> instance.
*/
public static ConfigurationService getConfig()
{
return ServiceUtils.getService(
AndroidGUIActivator.bundleContext,
ConfigurationService.class);
}
/**
* Returns <tt>true</tt> if Jitsi notification icon should be displayed.
* @return <tt>true</tt> if Jitsi notification icon should be displayed.
*/
public static boolean isIconEnabled()
{
return getConfig().getBoolean(SHOW_ICON_PROPERTY_NAME, true);
}
/**
* Sets the current activity.
*
* @param a the current activity to set
*/
public static void setCurrentActivity(Activity a)
{
synchronized (currentActivityMonitor)
{
logger.info("Current activity set to "+a);
currentActivity = a;
if(currentActivity == null)
{
lastGuiActivity = System.currentTimeMillis();
}
else
{
lastGuiActivity = -1;
}
// Notify listening threads
currentActivityMonitor.notifyAll();
}
}
/**
* Returns monitor object that will be notified each time current
* <tt>Activity</tt> changes.
* @return monitor object that will be notified each time current
* <tt>Activity</tt> changes.
*/
static public Object getCurrentActivityMonitor()
{
return currentActivityMonitor;
}
/**
* Returns the current activity.
*
* @return the current activity
*/
public static Activity getCurrentActivity()
{
return currentActivity;
}
/**
* Returns the time elapsed since last Jitsi <tt>Activity</tt> was open in
* milliseconds.
*
* @return the time elapsed since last Jitsi <tt>Activity</tt> was open in
* milliseconds.
*/
public static long getLastGuiActivityInterval()
{
// GUI is currently active
if(lastGuiActivity == -1)
{
return 0;
}
return System.currentTimeMillis() - lastGuiActivity;
}
/**
* Checks if current <tt>Activity</tt> is the home one.
* @return <tt>true</tt> if the home <tt>Activity</tt> is currently active.
*/
public static boolean isHomeActivityActive()
{
return currentActivity != null
&& currentActivity.getClass().equals(
getHomeScreenActivityClass());
}
/**
* Displays the send logs dialog.
*/
public static void showSendLogsDialog()
{
LogUploadService logUpload
= ServiceUtils.getService(AndroidGUIActivator.bundleContext,
LogUploadService.class);
String defaultEmail
= getConfig().getString("org.jitsi.android.LOG_REPORT_EMAIL");
logUpload.sendLogs(
new String[]{ defaultEmail },
getResString(R.string.service_gui_SEND_LOGS_SUBJECT),
getResString(R.string.service_gui_SEND_LOGS_TITLE));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.transaction.buffer;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.mockito.Mockito.mock;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import com.google.common.collect.Lists;
import io.netty.buffer.ByteBufUtil;
import io.netty.buffer.Unpooled;
import java.util.List;
import java.util.concurrent.ExecutionException;
import org.apache.pulsar.broker.service.persistent.PersistentTopic;
import org.apache.pulsar.broker.transaction.exception.buffer.TransactionBufferException;
import org.apache.pulsar.broker.transaction.buffer.impl.InMemTransactionBufferProvider;
import org.apache.pulsar.client.api.transaction.TxnID;
import org.apache.pulsar.transaction.coordinator.proto.TxnStatus;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Factory;
import org.testng.annotations.Test;
/**
* Unit test different {@link TransactionBufferProvider}.
*/
@Test(groups = "broker")
public class TransactionBufferTest {
@DataProvider(name = "providers")
public static Object[][] providers() {
return new Object[][] {
{ InMemTransactionBufferProvider.class.getName() }
};
}
private final TxnID txnId = new TxnID(1234L, 2345L);
private final String providerClassName;
private final TransactionBufferProvider provider;
private TransactionBuffer buffer;
@Factory(dataProvider = "providers")
public TransactionBufferTest(String providerClassName) throws Exception {
this.providerClassName = providerClassName;
this.provider = TransactionBufferProvider.newProvider(providerClassName);
}
@BeforeMethod
public void setup() throws Exception {
PersistentTopic persistentTopic = mock(PersistentTopic.class);
this.buffer = this.provider.newTransactionBuffer(persistentTopic);
}
@AfterMethod(alwaysRun = true)
public void teardown() throws Exception {
this.buffer.closeAsync();
}
@Test
public void testOpenReaderOnNonExistentTxn() throws Exception {
try {
buffer.openTransactionBufferReader(txnId, 0L).get();
fail("Should fail to open reader if a transaction doesn't exist");
} catch (ExecutionException ee) {
assertTrue(ee.getCause() instanceof TransactionBufferException.TransactionNotFoundException);
}
}
@Test
public void testOpenReaderOnAnOpenTxn() throws Exception {
final int numEntries = 10;
appendEntries(txnId, numEntries, 0L);
TransactionMeta txnMeta = buffer.getTransactionMeta(txnId).get();
assertEquals(txnId, txnMeta.id());
assertEquals(TxnStatus.OPEN, txnMeta.status());
try {
buffer.openTransactionBufferReader(txnId, 0L).get();
fail("Should fail to open a reader on an OPEN transaction");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof TransactionBufferException.TransactionNotSealedException);
}
}
@Test(enabled = false)
public void testOpenReaderOnCommittedTxn() throws Exception {
final int numEntries = 10;
appendEntries(txnId, numEntries, 0L);
TransactionMeta txnMeta = buffer.getTransactionMeta(txnId).get();
assertEquals(txnId, txnMeta.id());
assertEquals(TxnStatus.OPEN, txnMeta.status());
// commit the transaction
buffer.commitTxn(txnId, Long.MIN_VALUE);
txnMeta = buffer.getTransactionMeta(txnId).get();
assertEquals(txnId, txnMeta.id());
assertEquals(TxnStatus.COMMITTED, txnMeta.status());
// open reader
try (TransactionBufferReader reader = buffer.openTransactionBufferReader(
txnId, 0L
).get()) {
// read 10 entries
List<TransactionEntry> txnEntries = reader.readNext(numEntries).get();
verifyAndReleaseEntries(txnEntries, txnId, 0L, numEntries);
}
}
@Test
public void testCommitNonExistentTxn() throws Exception {
try {
buffer.commitTxn(txnId, Long.MIN_VALUE).get();
fail("Should fail to commit a transaction if it doesn't exist");
} catch (ExecutionException ee) {
assertTrue(ee.getCause() instanceof TransactionBufferException.TransactionNotFoundException);
}
}
@Test
public void testCommitTxn() throws Exception {
final int numEntries = 10;
appendEntries(txnId, numEntries, 0L);
TransactionMeta txnMeta = buffer.getTransactionMeta(txnId).get();
assertEquals(txnId, txnMeta.id());
assertEquals(TxnStatus.OPEN, txnMeta.status());
// commit the transaction
buffer.commitTxn(txnId, Long.MIN_VALUE);
txnMeta = buffer.getTransactionMeta(txnId).get();
assertEquals(txnId, txnMeta.id());
assertEquals(TxnStatus.COMMITTED, txnMeta.status());
}
@Test
public void testAbortNonExistentTxn() throws Exception {
try {
buffer.abortTxn(txnId, Long.MIN_VALUE).get();
fail("Should fail to abort a transaction if it doesn't exist");
} catch (ExecutionException ee) {
assertTrue(ee.getCause() instanceof TransactionBufferException.TransactionNotFoundException);
}
}
@Test
public void testAbortCommittedTxn() throws Exception {
final int numEntries = 10;
appendEntries(txnId, numEntries, 0L);
TransactionMeta txnMeta = buffer.getTransactionMeta(txnId).get();
assertEquals(txnId, txnMeta.id());
assertEquals(TxnStatus.OPEN, txnMeta.status());
// commit the transaction
buffer.commitTxn(txnId, Long.MIN_VALUE);
txnMeta = buffer.getTransactionMeta(txnId).get();
assertEquals(txnId, txnMeta.id());
assertEquals(TxnStatus.COMMITTED, txnMeta.status());
// abort the transaction. it should be discarded from the buffer
try {
buffer.abortTxn(txnId, Long.MIN_VALUE).get();
fail("Should fail to abort a committed transaction");
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof TransactionBufferException.TransactionStatusException);
}
txnMeta = buffer.getTransactionMeta(txnId).get();
assertEquals(txnId, txnMeta.id());
assertEquals(TxnStatus.COMMITTED, txnMeta.status());
}
@Test
public void testAbortTxn() throws Exception {
final int numEntries = 10;
appendEntries(txnId, numEntries, 0L);
TransactionMeta txnMeta = buffer.getTransactionMeta(txnId).get();
assertEquals(txnId, txnMeta.id());
assertEquals(TxnStatus.OPEN, txnMeta.status());
// abort the transaction. it should be discarded from the buffer
buffer.abortTxn(txnId, Long.MIN_VALUE).get();
verifyTxnNotExist(txnId);
}
@Test(enabled = false)
public void testPurgeTxns() throws Exception {
final int numEntries = 10;
// create an OPEN txn
TxnID txnId1 = new TxnID(1234L, 3456L);
appendEntries(txnId1, numEntries, 0L);
TransactionMeta txnMeta = buffer.getTransactionMeta(txnId1).get();
assertEquals(txnId1, txnMeta.id());
assertEquals(TxnStatus.OPEN, txnMeta.status());
// create two committed txns
TxnID txnId2 = new TxnID(1234L, 4567L);
appendEntries(txnId2, numEntries, 0L);
buffer.commitTxn(txnId2, Long.MIN_VALUE);
TransactionMeta txnMeta2 = buffer.getTransactionMeta(txnId2).get();
assertEquals(txnId2, txnMeta2.id());
assertEquals(TxnStatus.COMMITTED, txnMeta2.status());
TxnID txnId3 = new TxnID(1234L, 5678L);
appendEntries(txnId3, numEntries, 0L);
buffer.commitTxn(txnId3, Long.MIN_VALUE);
TransactionMeta txnMeta3 = buffer.getTransactionMeta(txnId3).get();
assertEquals(txnId3, txnMeta3.id());
assertEquals(TxnStatus.COMMITTED, txnMeta3.status());
// purge the transaction committed on ledger `22L`
buffer.purgeTxns(Lists.newArrayList(0L)).get();
// txnId2 should be purged
verifyTxnNotExist(txnId2);
// txnId1 should still be OPEN
txnMeta = buffer.getTransactionMeta(txnId1).get();
assertEquals(txnId1, txnMeta.id());
assertEquals(TxnStatus.OPEN, txnMeta.status());
// txnId3 should still be COMMITTED
txnMeta3 = buffer.getTransactionMeta(txnId3).get();
assertEquals(txnId3, txnMeta3.id());
assertEquals(TxnStatus.COMMITTED, txnMeta3.status());
}
private void appendEntries(TxnID txnId, int numEntries, long startSequenceId) {
for (int i = 0; i < numEntries; i++) {
long sequenceId = startSequenceId + i;
buffer.appendBufferToTxn(
txnId,
sequenceId,
Unpooled.copiedBuffer("message-" + sequenceId, UTF_8)
).join();
}
}
private void verifyAndReleaseEntries(List<TransactionEntry> txnEntries,
TxnID txnID,
long startSequenceId,
int numEntriesToRead) {
assertEquals(txnEntries.size(), numEntriesToRead);
for (int i = 0; i < numEntriesToRead; i++) {
try (TransactionEntry txnEntry = txnEntries.get(i)) {
assertEquals(txnEntry.committedAtLedgerId(), 22L);
assertEquals(txnEntry.committedAtEntryId(), 33L);
assertEquals(txnEntry.txnId(), txnID);
assertEquals(txnEntry.sequenceId(), startSequenceId + i);
assertEquals(new String(
ByteBufUtil.getBytes(txnEntry.getEntry().getDataBuffer()),
UTF_8
), "message-" + i);
}
}
}
private void verifyTxnNotExist(TxnID txnID) throws Exception {
try {
buffer.getTransactionMeta(txnID).get();
fail("Should fail to get transaction metadata if it doesn't exist");
} catch (ExecutionException ee) {
assertTrue(ee.getCause() instanceof TransactionBufferException.TransactionNotFoundException);
}
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.file;
import com.intellij.codeInsight.completion.scope.JavaCompletionHints;
import com.intellij.core.CoreJavaDirectoryService;
import com.intellij.lang.Language;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.navigation.ItemPresentation;
import com.intellij.navigation.ItemPresentationProviders;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.ui.Queryable;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Conditions;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.impl.JavaPsiFacadeImpl;
import com.intellij.psi.impl.source.tree.java.PsiCompositeModifierList;
import com.intellij.psi.scope.ElementClassHint;
import com.intellij.psi.scope.NameHint;
import com.intellij.psi.scope.PsiScopeProcessor;
import com.intellij.psi.search.EverythingGlobalScope;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.PsiSearchScopeUtil;
import com.intellij.psi.util.*;
import com.intellij.reference.SoftReference;
import com.intellij.util.ArrayUtil;
import com.intellij.util.CommonProcessors;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public class PsiPackageImpl extends PsiPackageBase implements PsiPackage, Queryable {
private static final Logger LOG = Logger.getInstance(PsiPackageImpl.class);
private volatile CachedValue<PsiModifierList> myAnnotationList;
private volatile CachedValue<Collection<PsiDirectory>> myDirectories;
private volatile CachedValue<Collection<PsiDirectory>> myDirectoriesWithLibSources;
private volatile SoftReference<Map<String, PsiClass[]>> myClassCache;
private volatile SoftReference<Map<GlobalSearchScope, Map<String, PsiClass[]>>> myDumbModeFullCache;
private volatile SoftReference<Map<Pair<GlobalSearchScope, String>, PsiClass[]>> myDumbModePartialCache;
public PsiPackageImpl(PsiManager manager, String qualifiedName) {
super(manager, qualifiedName);
}
@Override
protected Collection<PsiDirectory> getAllDirectories(boolean includeLibrarySources) {
if (includeLibrarySources) {
if (myDirectoriesWithLibSources == null) {
myDirectoriesWithLibSources = createCachedDirectories(true);
}
return myDirectoriesWithLibSources.getValue();
}
else {
if (myDirectories == null) {
myDirectories = createCachedDirectories(false);
}
return myDirectories.getValue();
}
}
@NotNull
private CachedValue<Collection<PsiDirectory>> createCachedDirectories(final boolean includeLibrarySources) {
return CachedValuesManager.getManager(myManager.getProject()).createCachedValue(new CachedValueProvider<Collection<PsiDirectory>>() {
@Override
public Result<Collection<PsiDirectory>> compute() {
final CommonProcessors.CollectProcessor<PsiDirectory> processor = new CommonProcessors.CollectProcessor<PsiDirectory>();
getFacade().processPackageDirectories(PsiPackageImpl.this, allScope(), processor, includeLibrarySources);
return Result.create(processor.getResults(), PsiPackageImplementationHelper.getInstance().getDirectoryCachedValueDependencies(
PsiPackageImpl.this));
}
}, false);
}
@Override
protected PsiElement findPackage(String qName) {
return getFacade().findPackage(qName);
}
@Override
public void handleQualifiedNameChange(@NotNull final String newQualifiedName) {
PsiPackageImplementationHelper.getInstance().handleQualifiedNameChange(this, newQualifiedName);
}
@Override
public VirtualFile[] occursInPackagePrefixes() {
return PsiPackageImplementationHelper.getInstance().occursInPackagePrefixes(this);
}
@Override
public PsiPackageImpl getParentPackage() {
return (PsiPackageImpl)super.getParentPackage();
}
@Override
protected PsiPackageImpl createInstance(PsiManager manager, String qName) {
return new PsiPackageImpl(myManager, qName);
}
@Override
@NotNull
public Language getLanguage() {
return JavaLanguage.INSTANCE;
}
@Override
public boolean isValid() {
return PsiPackageImplementationHelper.getInstance().packagePrefixExists(this) || !getAllDirectories(true).isEmpty();
}
@Override
public void accept(@NotNull PsiElementVisitor visitor) {
if (visitor instanceof JavaElementVisitor) {
((JavaElementVisitor)visitor).visitPackage(this);
}
else {
visitor.visitElement(this);
}
}
@Override
public String toString() {
return "PsiPackage:" + getQualifiedName();
}
@Override
@NotNull
public PsiClass[] getClasses() {
return getClasses(allScope());
}
@NotNull
protected GlobalSearchScope allScope() {
return PsiPackageImplementationHelper.getInstance().adjustAllScope(this, GlobalSearchScope.allScope(getProject()));
}
@Override
@NotNull
public PsiClass[] getClasses(@NotNull GlobalSearchScope scope) {
return getFacade().getClasses(this, scope);
}
@Override
public PsiFile[] getFiles(@NotNull GlobalSearchScope scope) {
return getFacade().getPackageFiles(this, scope);
}
@Override
@Nullable
public PsiModifierList getAnnotationList() {
if (myAnnotationList == null) {
myAnnotationList = CachedValuesManager.getManager(myManager.getProject()).createCachedValue(new PackageAnnotationValueProvider(), false);
}
return myAnnotationList.getValue();
}
@Override
@NotNull
public PsiPackage[] getSubPackages() {
return getSubPackages(allScope());
}
@Override
@NotNull
public PsiPackage[] getSubPackages(@NotNull GlobalSearchScope scope) {
return getFacade().getSubPackages(this, scope);
}
private JavaPsiFacadeImpl getFacade() {
return (JavaPsiFacadeImpl)JavaPsiFacade.getInstance(myManager.getProject());
}
@NotNull
private PsiClass[] getCachedClassesByName(@NotNull String name, GlobalSearchScope scope) {
if (DumbService.getInstance(getProject()).isDumb()) {
return getCachedClassInDumbMode(name, scope);
}
Map<String, PsiClass[]> map = SoftReference.dereference(myClassCache);
if (map == null) {
myClassCache = new SoftReference<Map<String, PsiClass[]>>(map = ContainerUtil.createConcurrentSoftValueMap());
}
PsiClass[] classes = map.get(name);
if (classes != null) {
return classes;
}
final String qName = getQualifiedName();
final String classQName = !qName.isEmpty() ? qName + "." + name : name;
map.put(name, classes = getFacade().findClasses(classQName, new EverythingGlobalScope(getProject())));
return classes;
}
private PsiClass[] getCachedClassInDumbMode(final String name, GlobalSearchScope scope) {
Map<GlobalSearchScope, Map<String, PsiClass[]>> scopeMap = SoftReference.dereference(myDumbModeFullCache);
if (scopeMap == null) {
myDumbModeFullCache = new SoftReference<Map<GlobalSearchScope, Map<String, PsiClass[]>>>(scopeMap = ContainerUtil.newConcurrentMap());
}
Map<String, PsiClass[]> map = scopeMap.get(scope);
if (map == null) {
// before parsing all files in this package, try cheap heuristics: check if 'name' is a subpackage, check files named like 'name'
PsiClass[] array = findClassesHeuristically(name, scope);
if (array != null) return array;
map = new HashMap<String, PsiClass[]>();
for (PsiClass psiClass : getClasses(scope)) {
String psiClassName = psiClass.getName();
if (psiClassName != null) {
PsiClass[] existing = map.get(psiClassName);
map.put(psiClassName, existing == null ? new PsiClass[]{psiClass} : ArrayUtil.append(existing, psiClass));
}
}
scopeMap.put(scope, map);
}
PsiClass[] classes = map.get(name);
return classes == null ? PsiClass.EMPTY_ARRAY : classes;
}
@Nullable
private PsiClass[] findClassesHeuristically(final String name, GlobalSearchScope scope) {
if (findSubPackageByName(name) != null) {
return PsiClass.EMPTY_ARRAY;
}
Map<Pair<GlobalSearchScope, String>, PsiClass[]> partial = SoftReference.dereference(myDumbModePartialCache);
if (partial == null) {
myDumbModePartialCache = new SoftReference<Map<Pair<GlobalSearchScope, String>, PsiClass[]>>(partial = ContainerUtil.newConcurrentMap());
}
PsiClass[] result = partial.get(Pair.create(scope, name));
if (result == null) {
List<PsiClass> fastClasses = ContainerUtil.newArrayList();
for (PsiDirectory directory : getDirectories(scope)) {
List<PsiFile> sameNamed = ContainerUtil.filter(directory.getFiles(), new Condition<PsiFile>() {
@Override
public boolean value(PsiFile file) {
return file.getName().contains(name);
}
});
Collections.addAll(fastClasses, CoreJavaDirectoryService.getPsiClasses(directory, sameNamed.toArray(new PsiFile[sameNamed.size()])));
}
if (!fastClasses.isEmpty()) {
partial.put(Pair.create(scope, name), result = fastClasses.toArray(new PsiClass[fastClasses.size()]));
}
}
return result;
}
@Override
public boolean containsClassNamed(@NotNull String name) {
return getCachedClassesByName(name, new EverythingGlobalScope(getProject())).length > 0;
}
@NotNull
@Override
public PsiClass[] findClassByShortName(@NotNull String name, @NotNull final GlobalSearchScope scope) {
PsiClass[] allClasses = getCachedClassesByName(name, scope);
if (allClasses.length == 0) return allClasses;
if (allClasses.length == 1) {
return PsiSearchScopeUtil.isInScope(scope, allClasses[0]) ? allClasses : PsiClass.EMPTY_ARRAY;
}
PsiClass[] array = ContainerUtil.findAllAsArray(allClasses, new Condition<PsiClass>() {
@Override
public boolean value(PsiClass aClass) {
return PsiSearchScopeUtil.isInScope(scope, aClass);
}
});
Arrays.sort(array, new Comparator<PsiClass>() {
@Override
public int compare(PsiClass o1, PsiClass o2) {
VirtualFile file1 = o1.getContainingFile().getVirtualFile();
VirtualFile file2 = o2.getContainingFile().getVirtualFile();
if (file1 == null) return file2 == null ? 0 : -1;
if (file2 == null) return 1;
return scope.compare(file2, file1);
}
});
return array;
}
@Nullable
private PsiPackage findSubPackageByName(@NotNull String name) {
final String qName = getQualifiedName();
final String subpackageQName = qName.isEmpty() ? name : qName + "." + name;
return getFacade().findPackage(subpackageQName);
}
@Override
public boolean processDeclarations(@NotNull PsiScopeProcessor processor,
@NotNull ResolveState state,
PsiElement lastParent,
@NotNull PsiElement place) {
GlobalSearchScope scope = place.getResolveScope();
processor.handleEvent(PsiScopeProcessor.Event.SET_DECLARATION_HOLDER, this);
ElementClassHint classHint = processor.getHint(ElementClassHint.KEY);
final Condition<String> nameCondition = processor.getHint(JavaCompletionHints.NAME_FILTER);
NameHint providedNameHint = processor.getHint(NameHint.KEY);
final String providedName = providedNameHint == null ? null : providedNameHint.getName(state);
if (classHint == null || classHint.shouldProcess(ElementClassHint.DeclarationKind.CLASS)) {
if (providedName != null) {
final PsiClass[] classes = findClassByShortName(providedName, scope);
if (!processClasses(processor, state, classes, Conditions.<String>alwaysTrue())) return false;
}
else {
PsiClass[] classes = getClasses(scope);
if (!processClasses(processor, state, classes, nameCondition != null ? nameCondition : Conditions.<String>alwaysTrue())) return false;
}
}
if (classHint == null || classHint.shouldProcess(ElementClassHint.DeclarationKind.PACKAGE)) {
if (providedName != null) {
PsiPackage aPackage = findSubPackageByName(providedName);
if (aPackage != null) {
if (!processor.execute(aPackage, state)) return false;
}
}
else {
PsiPackage[] packs = getSubPackages(scope);
for (PsiPackage pack : packs) {
final String packageName = pack.getName();
if (packageName == null) continue;
if (!PsiNameHelper.getInstance(myManager.getProject()).isIdentifier(packageName, PsiUtil.getLanguageLevel(this))) {
continue;
}
if (!processor.execute(pack, state)) {
return false;
}
}
}
}
return true;
}
private static boolean processClasses(@NotNull PsiScopeProcessor processor,
@NotNull ResolveState state,
@NotNull PsiClass[] classes,
@NotNull Condition<String> nameCondition) {
for (PsiClass aClass : classes) {
String name = aClass.getName();
if (name != null && nameCondition.value(name)) {
try {
if (!processor.execute(aClass, state)) return false;
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Exception e) {
LOG.error(e);
}
}
}
return true;
}
@Override
public boolean canNavigate() {
return isValid();
}
@Override
public ItemPresentation getPresentation() {
return ItemPresentationProviders.getItemPresentation(this);
}
@Override
public void navigate(final boolean requestFocus) {
PsiPackageImplementationHelper.getInstance().navigate(this, requestFocus);
}
private class PackageAnnotationValueProvider implements CachedValueProvider<PsiModifierList> {
@Override
public Result<PsiModifierList> compute() {
List<PsiModifierList> modifiers = ContainerUtil.newArrayList();
for(PsiDirectory directory: getDirectories()) {
PsiFile file = directory.findFile(PACKAGE_INFO_FILE);
PsiPackageStatement stmt = file == null ? null : PsiTreeUtil.getChildOfType(file, PsiPackageStatement.class);
PsiModifierList modifierList = stmt == null ? null : stmt.getAnnotationList();
ContainerUtil.addIfNotNull(modifiers, modifierList);
}
for (PsiClass aClass : getFacade().findClasses(getQualifiedName() + ".package-info", allScope())) {
ContainerUtil.addIfNotNull(modifiers, aClass.getModifierList());
}
PsiCompositeModifierList result = modifiers.isEmpty() ? null : new PsiCompositeModifierList(getManager(), modifiers);
return new Result<PsiModifierList>(result, PsiModificationTracker.OUT_OF_CODE_BLOCK_MODIFICATION_COUNT);
}
}
@Override
@Nullable
public PsiModifierList getModifierList() {
return getAnnotationList();
}
@Override
public boolean hasModifierProperty(@NonNls @NotNull final String name) {
return false;
}
@Override
public PsiQualifiedNamedElement getContainer() {
return getParentPackage();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.